From 9021cf2d70f1908645d1a35113ed7936a98a31f7 Mon Sep 17 00:00:00 2001 From: Benjamin Levesque <14175665+benjlevesque@users.noreply.github.com> Date: Wed, 25 Oct 2023 18:51:59 +0200 Subject: [PATCH] feat!: drop legacy storage (#1117) --- .circleci/config.yml | 60 +- README.md | 11 +- packages/advanced-logic/package.json | 4 +- packages/data-access/package.json | 4 - .../data-access/src/combined-data-access.ts | 2 - packages/data-access/src/data-access.ts | 624 ------ packages/data-access/src/data-read.ts | 194 ++ packages/data-access/src/data-write.ts | 54 + packages/data-access/src/ignored-location.ts | 104 - packages/data-access/src/in-memory-indexer.ts | 104 + packages/data-access/src/index.ts | 7 +- packages/data-access/src/interval-timer.ts | 116 -- packages/data-access/src/mock-data-access.ts | 36 + packages/data-access/src/pending-store.ts | 32 + .../src/transaction-index/index.ts | 6 - .../transaction-index/location-by-topic.ts | 119 -- .../timestamp-by-location.ts | 112 -- .../transaction-index/transaction-index.ts | 239 --- packages/data-access/src/types.ts | 7 + packages/data-access/test/data-access.test.ts | 855 -------- .../data-access/test/ignored-location.test.ts | 48 - .../data-access/test/interval-timer.test.ts | 270 --- .../test/location-by-topic.test.ts | 190 -- .../data-access/test/pending-store.test.ts | 49 + .../test/timestamp-by-location.test.ts | 100 - .../test/transaction-index.test.ts | 347 ---- packages/ethereum-storage/README.md | 36 +- packages/ethereum-storage/package.json | 10 - packages/ethereum-storage/src/config.ts | 55 +- .../ethereum-storage/src/ethereum-blocks.ts | 297 --- .../src/ethereum-entries-to-ipfs-content.ts | 227 --- .../src/ethereum-metadata-cache.ts | 93 - .../src/ethereum-storage-ethers.ts | 117 -- .../ethereum-storage/src/ethereum-storage.ts | 438 +---- .../src/ethereum-tx-submitter.ts | 14 +- .../ethereum-storage/src/ethereum-utils.ts | 17 +- .../ethereum-storage/src/gas-fee-definer.ts | 7 +- .../ethereum-storage/src/gas-price-definer.ts | 7 +- .../ethereum-storage/src/ignored-dataIds.ts | 193 -- packages/ethereum-storage/src/index.ts | 3 +- .../src/ipfs-connection-error.ts | 6 - packages/ethereum-storage/src/ipfs-manager.ts | 28 +- packages/ethereum-storage/src/ipfs-storage.ts | 55 +- .../src/smart-contract-manager.ts | 682 ------- .../test/dataids-ignored.test.ts | 153 -- .../test/ethereum-blocks.test.ts | 194 -- .../ethereum-entries-to-ipfs-content.test.ts | 380 ---- .../test/ethereum-metadata-cache.test.ts | 63 - .../test/ethereum-storage.test.ts | 776 -------- .../test/ethereum-tx-submitter.test.ts | 26 + .../test/ipfs-manager.test.ts | 22 +- .../test/ipfs-storage.test.ts | 61 +- .../test/smartcontract-manager.test.ts | 596 ------ packages/integration-test/package.json | 6 +- packages/integration-test/test/layers.test.ts | 117 +- packages/integration-test/tsconfig.build.json | 1 + .../src/thegraph/info-retriever.ts | 2 +- .../src/payment/erc20-escrow-payment.ts | 2 +- .../request-client.js/src/http-data-access.ts | 5 +- .../src/http-metamask-data-access.ts | 54 +- .../src/http-request-network.ts | 4 +- .../request-client.js/src/mock-data-access.ts | 14 - .../request-client.js/src/mock-storage.ts | 78 +- .../test/declarative-payments.test.ts | 1 - .../test/http-data-access.test.ts | 2 +- packages/request-client.js/test/index.test.ts | 24 +- .../test/mock-storage.test.ts | 2 +- packages/request-node/README.md | 20 +- packages/request-node/package.json | 11 +- packages/request-node/src/bin.ts | 17 + packages/request-node/src/config.ts | 184 +- packages/request-node/src/dataAccess.ts | 50 + packages/request-node/src/dataStorage.ts | 12 + .../src/request/confirmedTransactionStore.ts | 6 +- .../request-node/src/request/getStatus.ts | 13 +- packages/request-node/src/requestNode.ts | 204 +- packages/request-node/src/requestNodeBase.ts | 200 -- packages/request-node/src/server.ts | 63 +- packages/request-node/src/storageUtils.ts | 76 - packages/request-node/src/thegraph-node.ts | 56 - .../test/getChannelsByTopic.test.ts | 67 +- .../test/getConfirmedTransaction.test.ts | 13 +- .../test/getTransactionsByChannelId.test.ts | 6 +- packages/request-node/test/ipfsAdd.test.ts | 6 +- .../test/persistTransaction.test.ts | 36 +- .../request-node/test/requestNode.test.ts | 25 +- .../test/thegraph/persistTransaction.test.ts | 61 - packages/smart-contracts/package.json | 1 - .../scripts/conversion-proxy.ts | 2 +- .../scripts/deploy-payments.ts | 4 +- .../scripts/erc20-swap-to-conversion.ts | 2 +- .../BatchConversionPayments/index.ts | 2 +- .../BatchNoConversionPayments/index.ts | 2 +- .../src/lib/artifacts/BatchPayments/index.ts | 2 +- .../ChainlinkConversionPath/index.ts | 2 +- .../lib/artifacts/ERC20EscrowToPay/index.ts | 2 +- .../src/lib/artifacts/ERC20FeeProxy/index.ts | 2 +- .../src/lib/artifacts/ERC20Proxy/index.ts | 2 +- .../src/lib/artifacts/ERC20SwapToPay/index.ts | 2 +- .../ERC20TransferableReceivable/index.ts | 2 +- .../artifacts/Erc20ConversionProxy/index.ts | 2 +- .../artifacts/Erc20SwapConversion/index.ts | 2 +- .../lib/artifacts/EthConversionProxy/index.ts | 2 +- .../lib/artifacts/EthereumFeeProxy/index.ts | 2 +- .../src/lib/artifacts/EthereumProxy/index.ts | 2 +- .../lib/artifacts/RequestDeployer/index.ts | 2 +- .../lib/artifacts/RequestHashStorage/index.ts | 2 +- .../artifacts/RequestHashSubmitter/index.ts | 2 +- packages/thegraph-data-access/package.json | 3 +- .../thegraph-data-access/src/NoopDataWrite.ts | 17 + .../thegraph-data-access/src/data-access.ts | 402 +--- packages/thegraph-data-access/src/index.ts | 8 +- .../thegraph-data-access/src/pending-store.ts | 30 - packages/thegraph-data-access/src/queries.ts | 2 +- .../src/subgraph-client.ts | 83 +- packages/thegraph-data-access/src/types.ts | 9 + packages/toolbox/package.json | 1 + packages/toolbox/src/commands/hash/submit.ts | 8 +- packages/toolbox/src/create-request.ts | 4 +- .../src/transaction-manager.ts | 2 +- .../transaction-manager/test/index.test.ts | 23 +- packages/types/package.json | 5 +- packages/types/src/data-access-types.ts | 58 +- packages/types/src/events.ts | 6 + packages/types/src/storage-types.ts | 64 +- .../usage-examples/src/mock/mock-storage.ts | 9 +- .../src/request-client-js-add-stakeholders.ts | 98 +- .../src/request-logic-add-stakeholder.ts | 2 +- .../src/request-logic-clear-request.ts | 2 +- .../src/request-logic-encrypted-request.ts | 2 +- .../src/web3-signature-provider.ts | 2 +- tsconfig.json | 2 +- yarn.lock | 1734 +---------------- 133 files changed, 1600 insertions(+), 10707 deletions(-) delete mode 100644 packages/data-access/src/data-access.ts create mode 100644 packages/data-access/src/data-read.ts create mode 100644 packages/data-access/src/data-write.ts delete mode 100644 packages/data-access/src/ignored-location.ts create mode 100644 packages/data-access/src/in-memory-indexer.ts delete mode 100644 packages/data-access/src/interval-timer.ts create mode 100644 packages/data-access/src/mock-data-access.ts create mode 100644 packages/data-access/src/pending-store.ts delete mode 100644 packages/data-access/src/transaction-index/index.ts delete mode 100644 packages/data-access/src/transaction-index/location-by-topic.ts delete mode 100644 packages/data-access/src/transaction-index/timestamp-by-location.ts delete mode 100644 packages/data-access/src/transaction-index/transaction-index.ts create mode 100644 packages/data-access/src/types.ts delete mode 100644 packages/data-access/test/data-access.test.ts delete mode 100644 packages/data-access/test/ignored-location.test.ts delete mode 100644 packages/data-access/test/interval-timer.test.ts delete mode 100644 packages/data-access/test/location-by-topic.test.ts create mode 100644 packages/data-access/test/pending-store.test.ts delete mode 100644 packages/data-access/test/timestamp-by-location.test.ts delete mode 100644 packages/data-access/test/transaction-index.test.ts delete mode 100644 packages/ethereum-storage/src/ethereum-blocks.ts delete mode 100644 packages/ethereum-storage/src/ethereum-entries-to-ipfs-content.ts delete mode 100644 packages/ethereum-storage/src/ethereum-metadata-cache.ts delete mode 100644 packages/ethereum-storage/src/ethereum-storage-ethers.ts delete mode 100644 packages/ethereum-storage/src/ignored-dataIds.ts delete mode 100644 packages/ethereum-storage/src/ipfs-connection-error.ts delete mode 100644 packages/ethereum-storage/src/smart-contract-manager.ts delete mode 100644 packages/ethereum-storage/test/dataids-ignored.test.ts delete mode 100644 packages/ethereum-storage/test/ethereum-blocks.test.ts delete mode 100644 packages/ethereum-storage/test/ethereum-entries-to-ipfs-content.test.ts delete mode 100644 packages/ethereum-storage/test/ethereum-metadata-cache.test.ts delete mode 100644 packages/ethereum-storage/test/ethereum-storage.test.ts create mode 100644 packages/ethereum-storage/test/ethereum-tx-submitter.test.ts delete mode 100644 packages/ethereum-storage/test/smartcontract-manager.test.ts delete mode 100644 packages/request-client.js/src/mock-data-access.ts create mode 100644 packages/request-node/src/bin.ts create mode 100644 packages/request-node/src/dataAccess.ts create mode 100644 packages/request-node/src/dataStorage.ts delete mode 100644 packages/request-node/src/requestNodeBase.ts delete mode 100644 packages/request-node/src/storageUtils.ts delete mode 100644 packages/request-node/src/thegraph-node.ts delete mode 100644 packages/request-node/test/thegraph/persistTransaction.test.ts create mode 100644 packages/thegraph-data-access/src/NoopDataWrite.ts delete mode 100644 packages/thegraph-data-access/src/pending-store.ts create mode 100644 packages/thegraph-data-access/src/types.ts create mode 100644 packages/types/src/events.ts diff --git a/.circleci/config.yml b/.circleci/config.yml index 3e6bd1a70f..3122fd5550 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -96,7 +96,6 @@ jobs: - run: name: Generate Payment Detection queries command: yarn workspace @requestnetwork/payment-detection run codegen - - run: name: Build all packages (tsc) command: yarn build:tsc @@ -372,6 +371,8 @@ jobs: - *node_image - *ipfs_image - *ganache_image + - *postgres_image + - *graph_image working_directory: *working_directory steps: - attach_workspace: @@ -382,6 +383,7 @@ jobs: - run: name: 'Deploy test contract from smart-contracts' command: 'yarn workspace @requestnetwork/smart-contracts run deploy' + - run: *step_graph_deploy - run: name: 'Start request-node' command: 'yarn workspace @requestnetwork/request-node run start' @@ -393,44 +395,6 @@ jobs: - run: name: 'Test integration-test (layers)' command: 'yarn workspace @requestnetwork/integration-test run test:layers --ci --maxWorkers=1' - # This test runs the node-client tests against a node backed by TheGraph data access - test-integration-graph-node: - docker: - - *node_image - - *ipfs_image - - *ganache_image - - *postgres_image - - *graph_image - working_directory: *working_directory - steps: - - attach_workspace: - at: *working_directory - - run: - name: 'Build integration-test' - command: 'yarn workspace @requestnetwork/integration-test run build' - - run: - name: 'Deploy test contract from smart-contracts' - command: 'yarn workspace @requestnetwork/smart-contracts run deploy' - - run: *step_graph_deploy - - run: - name: 'Start request-node' - command: 'yarn workspace @requestnetwork/request-node run start' - background: true - environment: - GRAPH_NODE_URL: http://localhost:8000/subgraphs/name/RequestNetwork/request-storage - - run: *step_wait_for_node - - run: - name: 'Confirm TheGraph mode' - command: | - NODE_ENDPOINT=$(wget -O- http://localhost:3000/status | jq .dataAccess.endpoint -r) - if [ "$NODE_ENDPOINT" != "http://localhost:8000/subgraphs/name/RequestNetwork/request-storage" ] ; then - print "Graph node URL mismatch" - print $NODE_ENDPOINT - exit 1 - fi - - run: - name: 'Test integration-test' - command: 'yarn workspace @requestnetwork/integration-test run test:node' test-usage-examples: docker: - *node_image @@ -528,6 +492,8 @@ jobs: - *node_image - *ipfs_image - *ganache_image + - *postgres_image + - *graph_image working_directory: *working_directory steps: - attach_workspace: @@ -538,20 +504,12 @@ jobs: - run: name: 'Deploy test contract from smart-contracts' command: 'yarn workspace @requestnetwork/smart-contracts run deploy' + - run: *step_graph_deploy - run: name: 'Start request-node' command: 'yarn workspace @requestnetwork/request-node run start' background: true - - run: - name: Waiting for Request Node to be ready - command: | - for i in `seq 1 10`; - do - curl -s -o /dev/null -f http://localhost:3000/readyz > /dev/null && echo Success && exit 0 - echo -n . - sleep 1 - done - echo Failed waiting for Node initialization && exit 1 + - run: *step_wait_for_node - run: name: 'Test integration-test (erc20)' command: 'yarn workspace @requestnetwork/integration-test run test:erc20 --ci --maxWorkers=1' @@ -659,9 +617,6 @@ workflows: - test-integration-test: requires: - build - - test-integration-graph-node: - requires: - - build - test-payment-detection: requires: - build @@ -699,7 +654,6 @@ workflows: - test-epk-signature - test-ethereum-storage - test-integration-test - - test-integration-graph-node - test-smart-contracts - test-multi-format - test-request-client diff --git a/README.md b/README.md index a7a75fdbaa..a43678d72f 100644 --- a/README.md +++ b/README.md @@ -41,12 +41,11 @@ Join the [Request Discord][request-discord-url] to get in touch with us. ### Private Packages -| Package | Description | -| ---------------------------------------------------------------------- | ----------------------------------------------------------- | -| [`@requestnetwork/integration-test`](/packages/integration-test) | Integration test for the Request system | -| [`@requestnetwork/prototype-estimator`](/packages/prototype-estimator) | Give estimates of size and throughput of the Request system | -| [`@requestnetwork/toolbox`](/packages/toolbox) | Toolbox for Request Network | -| [`@requestnetwork/usage-example`](/packages/usage-example) | Usage examples of Request Network | +| Package | Description | +| ---------------------------------------------------------------- | --------------------------------------- | +| [`@requestnetwork/integration-test`](/packages/integration-test) | Integration test for the Request system | +| [`@requestnetwork/toolbox`](/packages/toolbox) | Toolbox for Request Network | +| [`@requestnetwork/usage-example`](/packages/usage-example) | Usage examples of Request Network | ## Contributing diff --git a/packages/advanced-logic/package.json b/packages/advanced-logic/package.json index ea5750f591..8d0d230403 100644 --- a/packages/advanced-logic/package.json +++ b/packages/advanced-logic/package.json @@ -42,13 +42,11 @@ "@requestnetwork/currency": "0.10.0", "@requestnetwork/types": "0.37.0", "@requestnetwork/utils": "0.37.0", - "@types/node": "16.11.7", - "lodash": "4.17.21", "tslib": "2.5.0" }, "devDependencies": { "@types/jest": "26.0.13", - "@types/lodash": "4.14.161", + "@types/node": "16.11.7", "jest": "26.4.2", "nyc": "15.1.0", "shx": "0.3.2", diff --git a/packages/data-access/package.json b/packages/data-access/package.json index 4787e0cacf..9fb89ba194 100644 --- a/packages/data-access/package.json +++ b/packages/data-access/package.json @@ -42,14 +42,10 @@ "@requestnetwork/multi-format": "0.15.11", "@requestnetwork/types": "0.37.0", "@requestnetwork/utils": "0.37.0", - "bluebird": "3.7.2", - "keyv": "4.0.3", "tslib": "2.5.0" }, "devDependencies": { - "@types/bluebird": "3.5.33", "@types/jest": "26.0.13", - "@types/keyv": "3.1.1", "@types/node": "16.11.7", "jest": "26.4.2", "nyc": "15.1.0", diff --git a/packages/data-access/src/combined-data-access.ts b/packages/data-access/src/combined-data-access.ts index 30af0385ba..bbee7cd0a6 100644 --- a/packages/data-access/src/combined-data-access.ts +++ b/packages/data-access/src/combined-data-access.ts @@ -39,6 +39,4 @@ export abstract class CombinedDataAccess implements DataAccessTypes.IDataAccess channelId: string, topics?: string[] | undefined, ) => Promise; - - abstract _getStatus(): Promise; } diff --git a/packages/data-access/src/data-access.ts b/packages/data-access/src/data-access.ts deleted file mode 100644 index 47a961b898..0000000000 --- a/packages/data-access/src/data-access.ts +++ /dev/null @@ -1,624 +0,0 @@ -import MultiFormat from '@requestnetwork/multi-format'; -import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; -import { deepCopy, getCurrentTimestampInSecond, SimpleLogger, unique } from '@requestnetwork/utils'; - -import * as Bluebird from 'bluebird'; -import { EventEmitter } from 'events'; - -import Block from './block'; -import IgnoredLocationIndex from './ignored-location'; -import IntervalTimer from './interval-timer'; -import TransactionIndex from './transaction-index'; - -// Default interval time for auto synchronization -const DEFAULT_INTERVAL_TIME = 10000; - -/** - * Options for the DataAccess initialization - */ -export interface IDataAccessOptions { - /** - * Logger instance - */ - logger: LogTypes.ILogger; - - /** - * the transaction index, defaults to TransactionIndex if not set. - */ - transactionIndex: DataAccessTypes.ITransactionIndex; - - /** - * synchronizationIntervalTime Interval time between each synchronization - * Defaults to DEFAULT_INTERVAL_TIME. - */ - synchronizationIntervalTime: number; - - /** - * Index of the ignored location with the reason - */ - ignoredLocationIndex: IgnoredLocationIndex; - - /** - * Specifies whether to start the synchronization on initialization - */ - autoStartSynchronization: boolean; -} - -const emptyChannelsWithTopics: DataAccessTypes.IReturnGetChannelsByTopic = { - meta: { - storageMeta: {}, - transactionsStorageLocation: {}, - }, - result: { transactions: {} }, -}; - -/** - * Implementation of Data-Access layer without encryption - */ -export default class DataAccess implements DataAccessTypes.IDataAccess { - // Transaction index, that allows storing and retrieving transactions by channel or topic, with time boundaries. - private transactionIndex: DataAccessTypes.ITransactionIndex; - - // boolean to store the initialization state - protected isInitialized = false; - // Storage layer - private storage: StorageTypes.IStorage; - - private ignoredLocationIndex: IgnoredLocationIndex; - - // The function used to synchronize with the storage should be called periodically - // This object allows to handle the periodical call of the function - private synchronizationTimer: IntervalTimer; - - /** - * Specifies whether to start the synchronization on initialization - */ - private autoStartSynchronization: boolean; - - // Timestamp of the last synchronization - // - // Are you debugging and this value is not changing as much as you think it should? Read bellow. - // ATTENTION: This value should be updated with the lastTimestamp returned by the storage - // and never with `now` timestamp. For example, if storage is using ethereum, it may add new blocks - // between the most recent block and the current timestamp. This may lead to blocks being skipped. - private lastSyncStorageTimestamp: number; - - /** - * Logger instance - */ - private logger: LogTypes.ILogger; - - /** - * Constructor DataAccess interface - * - * @param IStorage storage storage object - */ - public constructor(storage: StorageTypes.IStorage, options?: Partial) { - const defaultOptions: IDataAccessOptions = { - ignoredLocationIndex: new IgnoredLocationIndex(), - logger: new SimpleLogger(), - synchronizationIntervalTime: DEFAULT_INTERVAL_TIME, - transactionIndex: new TransactionIndex(), - autoStartSynchronization: false, - }; - const { - ignoredLocationIndex, - logger, - synchronizationIntervalTime, - transactionIndex, - autoStartSynchronization, - } = { - ...defaultOptions, - ...options, - }; - this.storage = storage; - this.lastSyncStorageTimestamp = 0; - this.autoStartSynchronization = autoStartSynchronization; - this.synchronizationTimer = new IntervalTimer( - () => this.synchronizeNewDataIds(), - synchronizationIntervalTime, - logger, - 5, - ); - this.transactionIndex = transactionIndex; - this.ignoredLocationIndex = ignoredLocationIndex; - - this.logger = logger; - } - - /** - * Function to initialize the dataId topic with the previous block - */ - public async initialize(): Promise { - if (this.isInitialized) { - throw new Error('already initialized'); - } - await this.transactionIndex.initialize(); - - // initialize storage - await this.storage.initialize(); - - // if transaction index already has data, then sync from the last available timestamp - const lastSynced = await this.transactionIndex.getLastTransactionTimestamp(); - const now = getCurrentTimestampInSecond(); - - // initialize the dataId topic with the previous block - const allDataWithMeta = await this.storage.getData( - lastSynced - ? { - from: lastSynced, - to: now, - } - : undefined, - ); - - // The last synced timestamp is the latest one returned by storage - this.lastSyncStorageTimestamp = allDataWithMeta.lastTimestamp; - - // check if the data returned by getData are correct - // if yes, the dataIds are indexed with LocationByTopic - await this.pushLocationsWithTopics(allDataWithMeta.entries); - - this.isInitialized = true; - - if (this.autoStartSynchronization) { - this.startAutoSynchronization(); - } - } - - public async close(): Promise { - await this.stopAutoSynchronization(); - } - - /** - * Function to persist transaction and topic in storage - * For now, we create a block for each transaction - * - * @param transaction transaction to persist - * @param channelId string to identify a bunch of transaction - * @param topics list of string to topic the transaction - * - * @returns string dataId where the transaction is stored - */ - public async persistTransaction( - transaction: DataAccessTypes.ITransaction, - channelId: string, - topics: string[] = [], - ): Promise { - this.checkInitialized(); - - // get all the topics not well formatted - const notFormattedTopics: string[] = topics.filter( - (topic) => !MultiFormat.hashFormat.isDeserializableString(topic), - ); - - if (notFormattedTopics.length !== 0) { - throw new Error( - `The following topics are not well formatted: ${JSON.stringify(notFormattedTopics)}`, - ); - } - - // create a block and add the transaction in it - const updatedBlock = Block.pushTransaction( - Block.createEmptyBlock(), - transaction, - channelId, - topics, - ); - - // get the topic of the data in storage - const resultAppend = await this.storage.append(JSON.stringify(updatedBlock)); - - const result: DataAccessTypes.IReturnPersistTransaction = Object.assign(new EventEmitter(), { - meta: { - storageMeta: resultAppend.meta, - topics, - transactionStorageLocation: resultAppend.id, - }, - result: {}, - }); - - // Store the data to the real storage - resultAppend - .on('confirmed', async (resultAppendConfirmed: StorageTypes.IAppendResult) => { - // update the timestamp with the confirmed one - await this.transactionIndex.updateTimestamp( - resultAppendConfirmed.id, - resultAppendConfirmed.meta.timestamp, - ); - - const resultAfterConfirmation: DataAccessTypes.IReturnPersistTransactionRaw = { - meta: { - storageMeta: resultAppendConfirmed.meta, - topics, - transactionStorageLocation: resultAppendConfirmed.id, - }, - result: {}, - }; - - result.emit('confirmed', resultAfterConfirmation); - }) - .on('error', async (error) => { - result.emit('error', error); - }); - - // adds this transaction to the index, to enable retrieving it later. - await this.transactionIndex.addTransaction( - resultAppend.id, - updatedBlock.header, - resultAppend.meta.timestamp, - ); - - return result; - } - - /** - * Function to get a list of transactions indexed by channel id - * if timestampBoundaries is given, the search will be restrict from timestamp 'from' to the timestamp 'to'. - * if timestampBoundaries.from is not given, the search will be start from the very start - * if timestampBoundaries.to is not given, the search will be stop at the current timestamp - * - * @param channelId channel id to retrieve the transaction from - * @param timestampBoundaries timestamp boundaries of the transactions search - * - * @returns list of transactions in the channel - */ - public async getTransactionsByChannelId( - channelId: string, - timestampBoundaries?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - this.checkInitialized(); - // Gets the list of locationStorage indexed by the channel id that are within the boundaries - const storageLocationList = await this.transactionIndex.getStorageLocationList( - channelId, - timestampBoundaries, - ); - - // Gets the block and meta from the storage location - const blockWithMetaList = await this.getBlockAndMetaFromStorageLocation(storageLocationList); - - // Get the transactions (and the meta) indexed by channelIds in the blocks found - const transactionsAndMetaPerBlocks: Array<{ - transactions: DataAccessTypes.ITimestampedTransaction[]; - transactionsStorageLocation: string[]; - storageMeta: StorageTypes.IEntryMetadata[]; - }> = - // for all the blocks found - blockWithMetaList.map((blockAndMeta) => { - // Gets the list of positions of the transaction needed from the block - const transactionPositions: number[] = Block.getTransactionPositionFromChannelId( - blockAndMeta.block, - channelId, - ); - - return this.getTransactionAndMetaFromPosition( - transactionPositions, - blockAndMeta.block, - blockAndMeta.location, - blockAndMeta.meta, - ); - }); - - // Creates the result by concatenating the transactions and meta of every blocks - return transactionsAndMetaPerBlocks.reduce( - (accumulator: DataAccessTypes.IReturnGetTransactions, elem) => ({ - meta: { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - storageMeta: accumulator.meta.storageMeta!.concat(elem.storageMeta), - transactionsStorageLocation: accumulator.meta.transactionsStorageLocation.concat( - elem.transactionsStorageLocation, - ), - }, - result: { - transactions: accumulator.result.transactions.concat(elem.transactions), - }, - }), - // initial value is full of empty arrays - { - meta: { storageMeta: [], transactionsStorageLocation: [] }, - result: { transactions: [] }, - }, - ); - } - - /** - * Function to get a list of channels indexed by topic - * - * @param topic topic to retrieve the channels from - * @param updatedBetween filter the channels that have received new data within the time boundaries - * - * @returns list of channels indexed by topic - */ - public async getChannelsByTopic( - topic: string, - updatedBetween?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - this.checkInitialized(); - - // check if the topic is well formatted - if (!MultiFormat.hashFormat.isDeserializableString(topic)) { - throw new Error(`The topic is not well formatted: ${topic}`); - } - - const channelIds = await this.transactionIndex.getChannelIdsForTopic(topic, updatedBetween); - - // Gets the transactions per channel id - const transactionsAndMeta = Bluebird.map(channelIds, (channelId) => - this.getTransactionsByChannelId(channelId).then((transactionsWithMeta) => ({ - channelId, - transactionsWithMeta, - })), - ); - - // Gather all the transactions in one object - return transactionsAndMeta.reduce((finalResult, channelIdAndTransactions) => { - const id = channelIdAndTransactions.channelId; - - // Adds the storage location of the channel's data - finalResult.meta.transactionsStorageLocation[id] = - channelIdAndTransactions.transactionsWithMeta.meta.transactionsStorageLocation; - - // Adds the meta of the channel - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - finalResult.meta.storageMeta![id] = - channelIdAndTransactions.transactionsWithMeta.meta.storageMeta; - - // Adds the transaction of the channel - finalResult.result.transactions[id] = - channelIdAndTransactions.transactionsWithMeta.result.transactions; - - return finalResult; - }, deepCopy(emptyChannelsWithTopics)); - } - - /** - * Function to get a list of channels indexed by multiple topics - * - * @param topics topics to retrieve the channels from - * @param updatedBetween filter the channels that have received new data within the time boundaries - * - * @returns list of channels indexed by topics - */ - public async getChannelsByMultipleTopics( - topics: string[], - updatedBetween?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - this.checkInitialized(); - - if (topics.some((topic) => !MultiFormat.hashFormat.isDeserializableString(topic))) { - throw new Error(`The topics are not well formatted`); - } - - const channelIds: string[] = await this.transactionIndex.getChannelIdsForMultipleTopics( - topics, - updatedBetween, - ); - - // Gets the transactions per channel id - const transactionsAndMeta = Bluebird.map(channelIds, (channelId) => - this.getTransactionsByChannelId(channelId).then((transactionsWithMeta) => ({ - channelId, - transactionsWithMeta, - })), - ); - - // Gather all the transactions in one object - return transactionsAndMeta.reduce((finalResult, channelIdAndTransactions) => { - const id = channelIdAndTransactions.channelId; - - // Adds the storage location of the channel's data - finalResult.meta.transactionsStorageLocation[id] = - channelIdAndTransactions.transactionsWithMeta.meta.transactionsStorageLocation; - - // Adds the meta of the channel - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - finalResult.meta.storageMeta![id] = - channelIdAndTransactions.transactionsWithMeta.meta.storageMeta; - - // Adds the transaction of the channel - finalResult.result.transactions[id] = - channelIdAndTransactions.transactionsWithMeta.result.transactions; - - return finalResult; - }, deepCopy(emptyChannelsWithTopics)); - } - - /** - * Function to synchronize with the new dataIds on the storage - */ - public async synchronizeNewDataIds(): Promise { - this.checkInitialized(); - const synchronizationTo = getCurrentTimestampInSecond(); - - // We increment lastSyncStorageTimestamp because the data located at lastSyncStorageTimestamp - // 0 means it's the first synchronization - let synchronizationFrom = 0; - if (this.lastSyncStorageTimestamp > 0) { - synchronizationFrom = this.lastSyncStorageTimestamp + 1; - } - - // Read new data from storage - const newDataWithMeta = await this.storage.getData({ - from: synchronizationFrom, - to: synchronizationTo, - }); - - // Try to get some data previously ignored - const oldEntriesWithMeta = await this.storage.getIgnoredData(); - - // check if the data returned by getNewDataId are correct - // if yes, the dataIds are indexed with LocationByTopic - await this.pushLocationsWithTopics(newDataWithMeta.entries.concat(oldEntriesWithMeta)); - - // The last synced timestamp is the latest one returned by storage - this.lastSyncStorageTimestamp = newDataWithMeta.lastTimestamp; - } - - /** - * Start to synchronize with the storage automatically - * Once called, synchronizeNewDataId function is called periodically - */ - public startAutoSynchronization(): void { - this.checkInitialized(); - this.synchronizationTimer.start(); - } - - /** - * Stop to synchronize with the storage automatically - */ - public async stopAutoSynchronization(): Promise { - if (this.synchronizationTimer.isStarted) { - await this.synchronizationTimer.stop(); - } - } - - /** - * Gets information of the data indexed - * - * @param detailed if true get the list of the files hashes - */ - public async _getStatus(detailed = false): Promise { - this.checkInitialized(); - - // last transaction timestamp retrieved - const lastLocationTimestamp = await this.transactionIndex.getLastTransactionTimestamp(); - const listIndexedLocation = await this.transactionIndex.getIndexedLocations(); - const listIgnoredLocationIndex = await this.ignoredLocationIndex.getIgnoredLocations(); - - const synchronizationConfig = this.synchronizationTimer.getConfig(); - - return { - filesIgnored: { - count: Object.keys(listIgnoredLocationIndex).length, - list: detailed ? listIgnoredLocationIndex : undefined, - }, - filesRetrieved: { - count: listIndexedLocation.length, - lastTimestamp: lastLocationTimestamp, - list: detailed ? listIndexedLocation : undefined, - }, - lastSynchronizationTimestamp: this.lastSyncStorageTimestamp, - storage: await this.storage._getStatus(detailed), - synchronizationConfig, - }; - } - - /** - * Check the format of the data, extract the topics from it and push location indexed with the topics - * - * @private - * @param entries data with meta from storage functions - * @param locationByTopic LocationByTopic object to push location - */ - private async pushLocationsWithTopics(entries: StorageTypes.IEntry[]): Promise { - if (!entries) { - throw Error(`data from storage do not follow the standard`); - } - let parsingErrorCount = 0; - let proceedCount = 0; - await Bluebird.each(entries, async (entry) => { - if (!entry.content || !entry.id) { - throw Error(`data from storage do not follow the standard`); - } - - let block; - const blockString = entry.content; - - try { - block = Block.parseBlock(blockString); - proceedCount++; - // adds this transaction to the index, to enable retrieving it later. - await this.transactionIndex.addTransaction(entry.id, block.header, entry.meta.timestamp); - } catch (e) { - parsingErrorCount++; - // Index ignored Location - await this.ignoredLocationIndex.pushReasonByLocation(entry.id, e.message); - this.logger.debug(`Error: can't parse content of the dataId (${entry.id}): ${e}`, [ - 'synchronization', - ]); - } - }); - - this.logger.info( - `Synchronization: ${proceedCount} blocks synchronized, ${parsingErrorCount} ignored from parsing error`, - ['synchronization'], - ); - } - - /** - * Gets the blocks and their metadata from an array of storage location - * - * @param storageLocationList array of storage location - * @returns the blocks and their metadata - */ - private async getBlockAndMetaFromStorageLocation( - storageLocationList: string[], - ): Promise< - Array<{ block: DataAccessTypes.IBlock; meta: StorageTypes.IEntryMetadata; location: string }> - > { - // Gets blocks indexed by topic - return Promise.all( - storageLocationList.map(async (location) => { - const resultRead = await this.storage.read(location); - - return { - block: Block.parseBlock(resultRead.content), - location, - meta: resultRead.meta, - }; - }), - ); - } - - /** - * Gets the transactions and their metadata from a block and an array of transaction positions - * - * @param transactionPositions transaction positions to retrieve - * @param block the block - * @param location location of the block - * @param meta metadata of the block - * @returns the transactions and their metadata - */ - private getTransactionAndMetaFromPosition( - transactionPositions: number[], - block: DataAccessTypes.IBlock, - location: string, - meta: StorageTypes.IEntryMetadata, - ): { - transactions: DataAccessTypes.ITimestampedTransaction[]; - transactionsStorageLocation: string[]; - storageMeta: StorageTypes.IEntryMetadata[]; - } { - // Gets the transaction from the positions - const transactions: DataAccessTypes.ITimestampedTransaction[] = - // first remove de duplicates - unique(transactionPositions).uniqueItems.map( - // Get the transaction from their position and add the timestamp - (position: number) => ({ - state: - meta.state === StorageTypes.ContentState.CONFIRMED - ? DataAccessTypes.TransactionState.CONFIRMED - : DataAccessTypes.TransactionState.PENDING, - timestamp: meta.timestamp, - transaction: block.transactions[position], - }), - ); - - // Gets the list of storage location of the transactions found - const transactionsStorageLocation = Array(transactions.length).fill(location); - - // Gets the list of storage meta of the transactions found - const storageMeta = transactions.map(() => meta); - - return { transactions, transactionsStorageLocation, storageMeta }; - } - - /** - * Throws an error if the data access isn't initialized - */ - private checkInitialized(): void { - if (!this.isInitialized) { - throw new Error('DataAccess must be initialized'); - } - } -} diff --git a/packages/data-access/src/data-read.ts b/packages/data-access/src/data-read.ts new file mode 100644 index 0000000000..34d0698b76 --- /dev/null +++ b/packages/data-access/src/data-read.ts @@ -0,0 +1,194 @@ +import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; +import { DataAccessBaseOptions } from './types'; + +export class DataAccessRead implements DataAccessTypes.IDataRead { + private network: string; + + private pendingStore?: DataAccessTypes.IPendingStore; + + constructor( + private readonly storage: StorageTypes.IIndexer, + { network, pendingStore }: DataAccessBaseOptions, + ) { + this.network = network; + this.pendingStore = pendingStore; + } + + async initialize(): Promise { + await this.storage.initialize(); + } + + close(): Promise { + return Promise.resolve(); + } + + async getTransactionsByChannelId( + channelId: string, + updatedBetween?: DataAccessTypes.ITimestampBoundaries, + ): Promise { + const pending = await this.getPending(channelId); + const result = await this.storage.getTransactionsByChannelId(channelId, updatedBetween); + + return { + meta: { + transactionsStorageLocation: result.transactions + .map((x) => x.hash) + .concat(pending.meta.transactionsStorageLocation), + storageMeta: result.transactions.map((tx) => + this.toStorageMeta(tx, result.blockNumber, this.network), + ), + }, + result: { + transactions: result.transactions + .map(this.toTimestampedTransaction) + .concat(pending.result.transactions), + }, + }; + } + + async getChannelsByTopic( + topic: string, + updatedBetween?: DataAccessTypes.ITimestampBoundaries | undefined, + ): Promise { + return this.getChannelsByMultipleTopics([topic], updatedBetween); + } + + async getChannelsByMultipleTopics( + topics: string[], + updatedBetween?: DataAccessTypes.ITimestampBoundaries, + ): Promise { + const result = await this.storage.getTransactionsByTopics(topics); + const pending = this.pendingStore?.findByTopics(topics) || []; + + const pendingItems = pending.map((item) => ({ + hash: item.storageResult.id, + channelId: item.channelId, + ...item.transaction, + + blockNumber: item.storageResult.meta.ethereum?.blockNumber || -1, + blockTimestamp: item.storageResult.meta.ethereum?.blockTimestamp || -1, + transactionHash: item.storageResult.meta.ethereum?.transactionHash || '', + smartContractAddress: item.storageResult.meta.ethereum?.smartContractAddress || '', + size: String(item.storageResult.meta.ipfs?.size || 0), + topics: item.topics || [], + })); + + const transactions = result.transactions.concat(...pendingItems); + + // list of channels having at least one tx updated during the updatedBetween boundaries + const channels = ( + updatedBetween + ? transactions.filter( + (tx) => + tx.blockTimestamp >= (updatedBetween.from || 0) && + tx.blockTimestamp <= (updatedBetween.to || Number.MAX_SAFE_INTEGER), + ) + : transactions + ).map((x) => x.channelId); + + const filteredTxs = transactions.filter((tx) => channels.includes(tx.channelId)); + return { + meta: { + storageMeta: filteredTxs.reduce((acc, tx) => { + acc[tx.channelId] = [this.toStorageMeta(tx, result.blockNumber, this.network)]; + return acc; + }, {} as Record), + transactionsStorageLocation: filteredTxs.reduce((prev, curr) => { + if (!prev[curr.channelId]) { + prev[curr.channelId] = []; + } + prev[curr.channelId].push(curr.hash); + return prev; + }, {} as Record), + }, + result: { + transactions: filteredTxs.reduce((prev, curr) => { + if (!prev[curr.channelId]) { + prev[curr.channelId] = []; + } + prev[curr.channelId].push(this.toTimestampedTransaction(curr)); + return prev; + }, {} as DataAccessTypes.ITransactionsByChannelIds), + }, + }; + } + + private async getPending(channelId: string): Promise { + const emptyResult = { + meta: { + transactionsStorageLocation: [], + storageMeta: [], + }, + result: { + transactions: [], + }, + }; + const pending = this.pendingStore?.get(channelId); + if (!pending) { + return emptyResult; + } + const { storageResult, transaction } = pending; + + const { transactions } = await this.storage.getTransactionsByStorageLocation(storageResult.id); + + // if the pending tx is found, remove its state and fetch the real data + if (transactions.length > 0) { + this.pendingStore?.remove(channelId); + return emptyResult; + } + + return { + meta: { + transactionsStorageLocation: [storageResult.id], + storageMeta: [storageResult.meta], + }, + result: { + transactions: [ + { + state: DataAccessTypes.TransactionState.PENDING, + timestamp: storageResult.meta.timestamp, + transaction, + }, + ], + }, + }; + } + + protected toStorageMeta( + result: StorageTypes.IIndexedTransaction, + lastBlockNumber: number, + network: string, + ): StorageTypes.IEntryMetadata { + return { + ethereum: { + blockConfirmation: lastBlockNumber - result.blockNumber, + blockNumber: result.blockNumber, + blockTimestamp: result.blockTimestamp, + networkName: network, + smartContractAddress: result.smartContractAddress, + transactionHash: result.transactionHash, + }, + ipfs: { + size: parseInt(result.size), + }, + state: StorageTypes.ContentState.CONFIRMED, + storageType: StorageTypes.StorageSystemType.ETHEREUM_IPFS, + timestamp: result.blockTimestamp, + }; + } + + protected toTimestampedTransaction( + transaction: StorageTypes.IIndexedTransaction, + ): DataAccessTypes.ITimestampedTransaction { + return { + state: DataAccessTypes.TransactionState.CONFIRMED, + timestamp: transaction.blockTimestamp, + transaction: { + data: transaction.data || undefined, + encryptedData: transaction.encryptedData || undefined, + encryptionMethod: transaction.encryptionMethod || undefined, + keys: transaction.keys || undefined, + }, + }; + } +} diff --git a/packages/data-access/src/data-write.ts b/packages/data-access/src/data-write.ts new file mode 100644 index 0000000000..b22edf73b7 --- /dev/null +++ b/packages/data-access/src/data-write.ts @@ -0,0 +1,54 @@ +import { EventEmitter } from 'events'; +import Block from './block'; +import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; + +export class DataAccessWrite implements DataAccessTypes.IDataWrite { + constructor( + protected readonly storage: StorageTypes.IStorageWrite, + private readonly pendingStore?: DataAccessTypes.IPendingStore, + ) { + this.pendingStore = pendingStore; + } + + async initialize(): Promise { + return; + } + + async close(): Promise { + return; + } + + async persistTransaction( + transaction: DataAccessTypes.ITransaction, + channelId: string, + topics?: string[] | undefined, + ): Promise { + const updatedBlock = Block.pushTransaction( + Block.createEmptyBlock(), + transaction, + channelId, + topics, + ); + + const storageResult = await this.storage.append(JSON.stringify(updatedBlock)); + + const eventEmitter = new EventEmitter() as DataAccessTypes.PersistTransactionEmitter; + this.pendingStore?.add(channelId, { transaction, storageResult, topics: topics || [] }); + + const result: DataAccessTypes.IReturnPersistTransactionRaw = { + meta: { + transactionStorageLocation: storageResult.id, + storageMeta: storageResult.meta, + topics: topics || [], + }, + result: {}, + }; + storageResult.on('confirmed', (r) => { + result.meta.storageMeta = r.meta; + eventEmitter.emit('confirmed', result); + }); + storageResult.on('error', (err) => eventEmitter.emit('error', err)); + + return Object.assign(eventEmitter, result); + } +} diff --git a/packages/data-access/src/ignored-location.ts b/packages/data-access/src/ignored-location.ts deleted file mode 100644 index 416f6d5c18..0000000000 --- a/packages/data-access/src/ignored-location.ts +++ /dev/null @@ -1,104 +0,0 @@ -import * as Keyv from 'keyv'; - -/** - * Interface for reason from location - */ -export interface IReasonByIgnoredLocation { - [location: string]: string; -} -/** - * Class used to store the block's reason indexed by location of blocks - */ -export default class ReasonsByIgnoredLocationIndex { - /** - * reason by location - * maps dataId => reason - */ - private reasonsByIgnoredLocation: Keyv; - - private listIgnoredLocation: Keyv; - - /** - * reasonByLocationTransactionIndex constructor - * @param store a Keyv store to persist the index to - */ - public constructor(store?: Keyv.Store) { - this.reasonsByIgnoredLocation = new Keyv({ - namespace: 'reasonsByIgnoredLocation', - store, - }); - - this.listIgnoredLocation = new Keyv({ - namespace: 'listIgnoredLocation', - store, - }); - } - - /** - * Function to push reason indexed by location - * - * @param dataId dataId of the block - * @param reason reason to be ignored - */ - public async pushReasonByLocation(dataId: string, reason: string): Promise { - if (!(await this.reasonsByIgnoredLocation.get(dataId))) { - await this.reasonsByIgnoredLocation.set(dataId, reason); - await this.updateDataId(dataId); - } - } - - /** - * Function to update reason indexed by location - * - * @param dataId dataId of the block - * @param reason reason to be ignored - */ - public async removeReasonByLocation(dataId: string): Promise { - await this.reasonsByIgnoredLocation.delete(dataId); - } - - /** - * Function to get reason from location - * - * @param dataId location to get the reason from - * @returns reason of the location, null if not found - */ - public async getReasonFromLocation(dataId: string): Promise { - const reason: string | undefined = await this.reasonsByIgnoredLocation.get(dataId); - return reason ? reason : null; - } - - /** - * Get the list of data ids stored - * - * @returns the list of data ids stored - */ - public async getIgnoredLocations(): Promise { - const listDataId: string[] | undefined = await this.listIgnoredLocation.get('list'); - - if (!listDataId) { - return {}; - } - const result: any = {}; - for (const dataId of Array.from(listDataId)) { - result[dataId] = await this.reasonsByIgnoredLocation.get(dataId); - } - - return result; - } - - /** - * Update the list of data ids stored - * - * @param dataId data id to add to the list - * @returns - */ - private async updateDataId(dataId: string): Promise { - let listDataIds: string[] | undefined = await this.listIgnoredLocation.get('list'); - if (!listDataIds) { - listDataIds = []; - } - listDataIds.push(dataId); - await this.listIgnoredLocation.set('list', listDataIds); - } -} diff --git a/packages/data-access/src/in-memory-indexer.ts b/packages/data-access/src/in-memory-indexer.ts new file mode 100644 index 0000000000..de438647aa --- /dev/null +++ b/packages/data-access/src/in-memory-indexer.ts @@ -0,0 +1,104 @@ +import { StorageTypes } from '@requestnetwork/types'; +import Block from './block'; + +/** Helper class to manage a map with arrays as values */ +class ArrayMap extends Map { + add(key: string, value: T) { + return this.set(key, this.get(key).concat(value)); + } + get(key: string) { + return super.get(key) || []; + } +} + +/** + * InMemory implementation to index Request storage transactions, for testing and development. + * The data itself is not indexed, only references to its location + */ +export class InMemoryIndexer implements StorageTypes.IIndexer { + // these fields must be private (#) or jest's matcher won't work. + readonly #channelToLocationsIndex = new ArrayMap(); + readonly #topicToChannelsIndex = new ArrayMap(); + + constructor(private readonly storageRead: StorageTypes.IStorageRead) {} + + /** Adds the indexed data for easy retrieval */ + public addIndex(channelId: string, topics: string[], location: string): void { + this.#channelToLocationsIndex.add(channelId, location); + for (const topic of topics || []) { + this.#topicToChannelsIndex.add(topic, channelId); + } + } + + async initialize(): Promise { + return; + } + + async getTransactionsByStorageLocation( + hash: string, + ): Promise { + const transactions = await this.parseDocuments([hash]); + return { + blockNumber: 0, + transactions, + }; + } + + async getTransactionsByChannelId( + channelId: string, + ): Promise { + const locations = this.#channelToLocationsIndex.get(channelId); + const transactions = await this.parseDocuments(locations); + return { + blockNumber: 0, + transactions: transactions.filter((x) => x.channelId === channelId), + }; + } + + async getTransactionsByTopics(topics: string[]): Promise { + const channelIds = topics.map((topic) => this.#topicToChannelsIndex.get(topic)).flat(); + const locations = channelIds + .map((channel) => this.#channelToLocationsIndex.get(channel)) + .flat(); + + const transactions = await this.parseDocuments(locations); + + return { + blockNumber: 0, + transactions, + }; + } + + private async parseDocuments(locations: string[]) { + const entries = await this.storageRead.readMany(locations); + return entries + .filter((x) => x.meta.state === StorageTypes.ContentState.CONFIRMED) + .map((curr) => { + const { id, meta, content } = curr; + const block = Block.parseBlock(content); + return Object.entries(block.header.channelIds).map(([channelId, [index]]) => ({ + locationId: id, + channelId, + meta, + transaction: block.transactions[index], + })); + }) + .flat() + .map( + (item): StorageTypes.IIndexedTransaction => ({ + blockNumber: 0, + blockTimestamp: 0, + channelId: item.channelId, + hash: item.locationId, + size: '0', + smartContractAddress: '', + topics: [], + transactionHash: '', + data: item.transaction.data, + encryptedData: item.transaction.encryptedData, + encryptionMethod: item.transaction.encryptionMethod, + keys: item.transaction.keys, + }), + ); + } +} diff --git a/packages/data-access/src/index.ts b/packages/data-access/src/index.ts index c3240bb96e..f4fc148b26 100644 --- a/packages/data-access/src/index.ts +++ b/packages/data-access/src/index.ts @@ -1,4 +1,7 @@ -export { default as DataAccess } from './data-access'; -export { default as TransactionIndex } from './transaction-index'; export { default as Block } from './block'; export { CombinedDataAccess } from './combined-data-access'; +export { DataAccessWrite } from './data-write'; +export { DataAccessRead } from './data-read'; +export { PendingStore } from './pending-store'; +export { DataAccessBaseOptions } from './types'; +export { MockDataAccess } from './mock-data-access'; diff --git a/packages/data-access/src/interval-timer.ts b/packages/data-access/src/interval-timer.ts deleted file mode 100644 index b24eec61ec..0000000000 --- a/packages/data-access/src/interval-timer.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { LogTypes } from '@requestnetwork/types'; - -/** - * Module to start and stop a periodical function - */ -export default class IntervalTimer { - // Count the successive number of failure from the interval function - // This value is used as we may not want to directly log an error if the interval function fails once - public intervalFunctionSuccessiveFailureCount = 0; - - private readonly intervalFunction: () => Promise; - private readonly intervalTime: number; - private readonly logger: LogTypes.ILogger; - private readonly successiveFailureThreshold: number; - private timeoutObject: NodeJS.Timeout | null = null; - private lastRecursion: Promise = Promise.resolve(); - - /** - * Constructor IntervalTimer - * - * @param intervalFunction function to call periodically when timer is started - * @param intervalTime Interval time between interval function call - * @param logger Logger instance - * @param successiveFailureThreshold Required number of successive failure from interval function before logging an error - */ - public constructor( - intervalFunction: () => Promise, - intervalTime: number, - logger: LogTypes.ILogger, - successiveFailureThreshold = 1, - ) { - this.intervalFunction = intervalFunction; - this.intervalTime = intervalTime; - this.logger = logger; - this.successiveFailureThreshold = successiveFailureThreshold; - } - - /** - * Start the interval timer - */ - public start(): void { - // Timer can't be restarted - if (this.timeoutObject) { - throw Error('IntervalTimer already started'); - } - - // Function to be called periodically - // This function calls and waits for the intervalFunction and sets a timeout - // to call itself recursively - const recursiveTimeoutFunction = async (): Promise => { - try { - // We wait for the internal function to reset the timeout - await this.intervalFunction(); - - // Reset intervalFunctionSuccessiveFailureCount - this.intervalFunctionSuccessiveFailureCount = 0; - } catch (e) { - // An error in the interval function should not stop the interval timer - - // An isolated error from the interval function is considered as a warning - this.logger.warn(`${this.intervalFunction.name || 'intervalFunction'} error: ${e}`); - - this.intervalFunctionSuccessiveFailureCount++; - - // If the interval function failed several times in a row, it can be caused by a bigger problem therefore we display an error - if (this.intervalFunctionSuccessiveFailureCount >= this.successiveFailureThreshold) { - this.logger.error( - `${this.intervalFunction.name || 'intervalFunction'} failed ${ - this.intervalFunctionSuccessiveFailureCount - } times in a row, last error: ${e}`, - ); - } - } - - createRecursion(); - }; - - const createRecursion = () => { - this.timeoutObject = setTimeout(() => { - this.lastRecursion = recursiveTimeoutFunction(); - }, this.intervalTime); - }; - - // First call to the recursive timeout function - createRecursion(); - } - - /** - * Stop the interval timer - */ - public async stop(): Promise { - if (!this.timeoutObject) { - return Promise.reject("Can't stop IntervalTimer if it has not been started"); - } - - clearTimeout(this.timeoutObject); - this.timeoutObject = null; - return this.lastRecursion; - } - - public get isStarted(): boolean { - return Boolean(this.timeoutObject); - } - - /** - * Gets current configuration - * - * @return the current configuration attributes - */ - public getConfig(): { intervalTime: number; successiveFailureThreshold: number } { - return { - intervalTime: this.intervalTime, - successiveFailureThreshold: this.successiveFailureThreshold, - }; - } -} diff --git a/packages/data-access/src/mock-data-access.ts b/packages/data-access/src/mock-data-access.ts new file mode 100644 index 0000000000..55b6e0d4fe --- /dev/null +++ b/packages/data-access/src/mock-data-access.ts @@ -0,0 +1,36 @@ +import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; +import { DataAccessWrite } from './data-write'; +import { DataAccessRead } from './data-read'; +import { PendingStore } from './pending-store'; +import { InMemoryIndexer } from './in-memory-indexer'; +import { CombinedDataAccess } from './combined-data-access'; + +/** + * Mock Data access that bypasses the initialization. + * This class is meant to be used with HttpRequestNetwork and useMockStorage=true. + * Data-access initialization is asynchronous and this class is a hack to avoid having an asynchronous operation in the HttpRequestNetwork constructor. + */ +export class MockDataAccess extends CombinedDataAccess { + private readonly dataIndex: InMemoryIndexer; + + constructor(storage: StorageTypes.IStorage) { + const dataIndex = new InMemoryIndexer(storage); + const pendingStore = new PendingStore(); + + super( + new DataAccessRead(dataIndex, { network: 'mock', pendingStore }), + new DataAccessWrite(storage, pendingStore), + ); + this.dataIndex = dataIndex; + } + + persistTransaction = async ( + transactionData: DataAccessTypes.ITransaction, + channelId: string, + topics?: string[] | undefined, + ): Promise => { + const result = await this.writer.persistTransaction(transactionData, channelId, topics); + this.dataIndex.addIndex(channelId, topics || [], result.meta.transactionStorageLocation); + return result; + }; +} diff --git a/packages/data-access/src/pending-store.ts b/packages/data-access/src/pending-store.ts new file mode 100644 index 0000000000..d08c8a61a3 --- /dev/null +++ b/packages/data-access/src/pending-store.ts @@ -0,0 +1,32 @@ +import { DataAccessTypes } from '@requestnetwork/types'; + +/** + * A simple in-memory store to share state between DataReader and DataWriter + * Useful to retrieve a transaction that was just emitted but is not confirmed yet + **/ +export class PendingStore implements DataAccessTypes.IPendingStore { + private pending = new Map(); + + /** Gets a pending tx */ + public get(channelId: string): DataAccessTypes.PendingItem | undefined { + return this.pending.get(channelId); + } + + public findByTopics(topics: string[]): (DataAccessTypes.PendingItem & { channelId: string })[] { + const results: (DataAccessTypes.PendingItem & { channelId: string })[] = []; + for (const [channelId, pendingItem] of this.pending.entries()) { + if (topics.find((topic) => pendingItem.topics.includes(topic))) { + results.push({ channelId, ...pendingItem }); + } + } + return results; + } + + public add(channelId: string, pendingItem: DataAccessTypes.PendingItem): void { + this.pending.set(channelId, pendingItem); + } + + public remove(channelId: string): void { + this.pending.delete(channelId); + } +} diff --git a/packages/data-access/src/transaction-index/index.ts b/packages/data-access/src/transaction-index/index.ts deleted file mode 100644 index f2a99778cf..0000000000 --- a/packages/data-access/src/transaction-index/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -import TransactionIndex from './transaction-index'; - -export * from './location-by-topic'; -export * from './timestamp-by-location'; - -export default TransactionIndex; diff --git a/packages/data-access/src/transaction-index/location-by-topic.ts b/packages/data-access/src/transaction-index/location-by-topic.ts deleted file mode 100644 index c84cc6d84d..0000000000 --- a/packages/data-access/src/transaction-index/location-by-topic.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { DataAccessTypes } from '@requestnetwork/types'; - -import * as Keyv from 'keyv'; -import { flatten2DimensionsArray, unique } from '@requestnetwork/utils'; - -// Serialize function used for keyv to serialize a Set data structure into a string -// There is no way to directly stringify a Set, we need to convert it to an array before -// Typescript error from keyv: deserialized's type should be { value: Set, expires: number } -// but it's inferred as a Set -const serializeSet = (deserialized: any): string => JSON.stringify(Array.from(deserialized.value)); - -// Deserialize function used for keyv to deserialize a string into a Set data structure -// Typescript error from keyv: function return's type should be { value: Set, expires: number } -// but it's inferred as a Set -const deserializeSet = (serialized: string): any => { - // We use JSON.parse to convert the string into an array before converting it to a Set - const set: Set = new Set(JSON.parse(serialized)); - return { value: set }; -}; - -/** - * Helps store and retrieve channelIds by topic, and transaction location per channelId - */ -export default class LocationByTopicTransactionIndex { - /** - * Storage location by channel id - * maps channelId => [storageLocation] - * This attribute is public for mocking purpose - */ - public storageLocationByChannelId: Keyv>; - - /** - * Channel Ids by topic - * maps topic => [channelId] - * This attribute is public for mocking purpose - */ - public channelIdByTopics: Keyv>; - - /** - * Constructor for LocationByTopicIndex - * @param store a Keyv store to persist the index to. - */ - constructor(store?: Keyv.Store) { - this.storageLocationByChannelId = new Keyv>({ - deserialize: deserializeSet, - namespace: 'storageLocationByChannelId', - serialize: serializeSet, - store, - }); - this.channelIdByTopics = new Keyv>({ - deserialize: deserializeSet, - namespace: 'channelIdByTopics', - serialize: serializeSet, - store, - }); - } - - /** - * Function to push location indexed with block topics - * - * @param storageLocation storage location to index - * @param blockHeader header of the block - */ - public async pushStorageLocationIndexedWithBlockTopics( - storageLocation: string, - blockHeader: DataAccessTypes.IBlockHeader, - ): Promise { - // index the new block with the channel ids - for (const id of Object.keys(blockHeader.channelIds)) { - const existingLocationIds: Set = - (await this.storageLocationByChannelId.get(id)) || new Set([]); - await this.storageLocationByChannelId.set(id, existingLocationIds.add(storageLocation)); - } - - // index channel ids by the topics - for (const id of Object.keys(blockHeader.topics)) { - for (const topic of blockHeader.topics[id]) { - const existingChannelIds: Set = - (await this.channelIdByTopics.get(topic)) || new Set([]); - await this.channelIdByTopics.set(topic, existingChannelIds.add(id)); - } - } - } - - /** - * Function to get the channel ids from a topic - * - * @param topic topic to retrieve the dataId - * @returns list of the channel ids connected to the topic - */ - public async getChannelIdsFromTopic(topic: string): Promise { - return Array.from((await this.channelIdByTopics.get(topic)) || []); - } - - /** - * Function to get the channel ids from multiple topics - * - * @param topics topics to retrieve the dataIds - * @returns list of the channel ids linked to the topics - */ - public async getChannelIdsFromMultipleTopics(topics: string[]): Promise { - const channelIdsPromises = topics.map(async (topic) => { - return Array.from((await this.channelIdByTopics.get(topic)) || []); - }); - const channelIds = await Promise.all(channelIdsPromises); - - // flatten the array of array and remove the duplicates - return unique(flatten2DimensionsArray(channelIds)).uniqueItems; - } - /** - * Function to get storage locations from a channel id - * - * @param channelId channel id to retrieve the storage location - * @returns list of the location connected to the channel id - */ - public async getStorageLocationsFromChannelId(channelId: string): Promise { - return Array.from((await this.storageLocationByChannelId.get(channelId)) || []); - } -} diff --git a/packages/data-access/src/transaction-index/timestamp-by-location.ts b/packages/data-access/src/transaction-index/timestamp-by-location.ts deleted file mode 100644 index 47a991cf8d..0000000000 --- a/packages/data-access/src/transaction-index/timestamp-by-location.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { DataAccessTypes } from '@requestnetwork/types'; - -import * as Keyv from 'keyv'; - -/** - * Class used to store the block's timestamp indexed by location - */ -export default class TimestampByLocationTransactionIndex { - /** - * timestamp by location - * maps dataId => timestamp - */ - private timestampByLocation: Keyv; - - /** - * TimestampByLocationTransactionIndex constructor - * @param store a Keyv store to persist the index to - */ - public constructor(store?: Keyv.Store) { - this.timestampByLocation = new Keyv({ - namespace: 'timestampByLocation', - store, - }); - } - - /** - * Function to push timestamp indexed by location - * - * @param dataId dataId of the block - * @param timestamp timestamp of the block - */ - public async pushTimestampByLocation(dataId: string, timestamp: number): Promise { - if (!(await this.timestampByLocation.get(dataId))) { - await this.timestampByLocation.set(dataId, timestamp); - } - const lastTransactionTimestamp = await this.getLastTransactionTimestamp(); - if (!lastTransactionTimestamp || timestamp > lastTransactionTimestamp) { - await this.setLastTransactionTimestamp(timestamp); - } - } - - /** - * Removes timestamp indexed by location - * - * @param dataId dataId of the block - */ - public async removeIndexedDataId(dataId: string): Promise { - await this.timestampByLocation.delete(dataId); - } - - /** - * Function to update timestamp indexed by location - * - * @param dataId dataId of the block - * @param timestamp timestamp of the block - */ - public async updateTimestampByLocation(dataId: string, timestamp: number): Promise { - await this.timestampByLocation.set(dataId, timestamp); - - const lastTransactionTimestamp = await this.getLastTransactionTimestamp(); - if (!lastTransactionTimestamp || timestamp > lastTransactionTimestamp) { - await this.setLastTransactionTimestamp(timestamp); - } - } - - /** - * Function to get timestamp from location - * - * @param dataId location to get the timestamp from - * @returns timestamp of the location, null if not found - */ - public async getTimestampFromLocation(dataId: string): Promise { - const timestamp = await this.timestampByLocation.get(dataId); - return timestamp !== undefined ? timestamp : null; - } - - /** - * Function to get timestamp from location - * - * @param dataId location to get the timestamp from - * @returns timestamp of the location, null if not found - */ - public async isDataInBoundaries( - dataId: string, - boundaries?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - const timestamp = await this.timestampByLocation.get(dataId); - if (!timestamp) { - throw Error(`Unknown timestamp for the dataId ${dataId}`); - } - - return ( - !boundaries || - ((boundaries.from === undefined || boundaries.from <= timestamp) && - (boundaries.to === undefined || boundaries.to >= timestamp)) - ); - } - - /** - * the timestamp of the latest transaction - */ - public async getLastTransactionTimestamp(): Promise { - return (await this.timestampByLocation.get('last_transaction_timestamp')) || null; - } - - /** - * the timestamp of the latest transaction - */ - private async setLastTransactionTimestamp(value: number): Promise { - await this.timestampByLocation.set('last_transaction_timestamp', value); - } -} diff --git a/packages/data-access/src/transaction-index/transaction-index.ts b/packages/data-access/src/transaction-index/transaction-index.ts deleted file mode 100644 index c23b01a8c3..0000000000 --- a/packages/data-access/src/transaction-index/transaction-index.ts +++ /dev/null @@ -1,239 +0,0 @@ -import { DataAccessTypes } from '@requestnetwork/types'; - -import * as Bluebird from 'bluebird'; -import * as Keyv from 'keyv'; - -import LocationByTopic from './location-by-topic'; -import TimestampByLocation from './timestamp-by-location'; - -/** - * An in-memory implementation of the transaction index. - */ - -export default class TransactionIndex implements DataAccessTypes.ITransactionIndex { - // DataIds (Id of data on storage layer) indexed by transaction topic - // Will be used to get the data from storage with the transaction topic - private locationByTopic?: LocationByTopic; - - // Timestamp of the dataIds - // Will be used to get the data from timestamp boundaries - private timestampByLocation: TimestampByLocation; - - private indexedLocation: Keyv; - - /** - * Constructor of TransactionIndex - * @param store a Keyv store to persist the index - */ - constructor(store?: Keyv.Store) { - this.timestampByLocation = new TimestampByLocation(store); - this.locationByTopic = new LocationByTopic(store); - - this.indexedLocation = new Keyv({ - namespace: 'indexedLocation', - store, - }); - } - - // eslint-disable-next-line no-empty, @typescript-eslint/no-empty-function - public async initialize(): Promise {} - - /** - * Get the last indexed timestamp - */ - public getLastTransactionTimestamp(): Promise { - return this.timestampByLocation.getLastTransactionTimestamp(); - } - - /** - * Adds a transaction to the index, for indexing by channel, topic and timestamp - * - * @param dataId the dataId to index - * @param header the headers of the block (containing channels and topics) - * @param timestamp the timestamp of the transaction - */ - public async addTransaction( - dataId: string, - header: DataAccessTypes.IBlockHeader, - timestamp: number, - ): Promise { - if (!this.locationByTopic) { - throw new Error('TransactionIndex must be initialized'); - } - // topic the dataId with block topic - await this.locationByTopic.pushStorageLocationIndexedWithBlockTopics(dataId, header); - - // add the timestamp in the index - await this.timestampByLocation.pushTimestampByLocation(dataId, timestamp); - - await this.updateIndexedLocation(dataId); - } - - /** - * Removes a transaction from the index - * - * @param dataId the dataId to remove - */ - public async removeTransaction(dataId: string): Promise { - if (!this.locationByTopic) { - throw new Error('TransactionIndex must be initialized'); - } - - // remove the timestamp in the index - await this.timestampByLocation.removeIndexedDataId(dataId); - } - - /** - * Update timestamp for a dataId - * - * @param dataId the dataId to index - * @param timestamp the timestamp of the transaction - */ - public async updateTimestamp(dataId: string, timestamp: number): Promise { - if (!this.timestampByLocation) { - throw new Error('TransactionIndex must be initialized'); - } - // update the timestamp in the index - await this.timestampByLocation.updateTimestampByLocation(dataId, timestamp); - } - - /** - * Get a list of transactions indexed by channel id - * @param channelId channel id to retrieve the transaction from - * @param timestampBoundaries timestamp boundaries of the transactions search - * @returns list of location of a channel - */ - public async getStorageLocationList( - channelId: string, - timestampBoundaries?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - if (!this.locationByTopic) { - throw new Error('TransactionIndex must be initialized'); - } - - // get transaction locations for the given channel - let storageLocationList = await this.locationByTopic.getStorageLocationsFromChannelId( - channelId, - ); - - storageLocationList = await Bluebird.filter(storageLocationList, async (dataId: string) => { - // if the dataId has not been suppressed - const exist: boolean = - (await this.timestampByLocation.getTimestampFromLocation(dataId)) !== null; - // if boundaries are passed, only return locations of transaction within these boundaries - const inTimeBoundaries: boolean = - !timestampBoundaries || - (await this.timestampByLocation.isDataInBoundaries(dataId, timestampBoundaries)); - return exist && inTimeBoundaries; - }); - - return storageLocationList; - } - - /** - * Get a list of channels indexed by topic - * @param topic topic to retrieve the channelIds from - * @param timestampBoundaries timestamp boundaries of the transactions search - * @returns list of location of the channels indexed by the topic - */ - public async getChannelIdsForTopic( - topic: string, - timestampBoundaries?: DataAccessTypes.ITimestampBoundaries | undefined, - ): Promise { - if (!this.locationByTopic) { - throw new Error('TransactionIndex must be initialized'); - } - - // get channels for given topic - const channelIds = await this.locationByTopic.getChannelIdsFromTopic(topic); - - // if boundaries are passed, only return channelIds with transactions within these boundaries - if (timestampBoundaries) { - const result = []; - // check for each channel if one of its transactions occurred during the given boundaries - for (const channelId of channelIds) { - const txLocations = await this.locationByTopic.getStorageLocationsFromChannelId(channelId); - for (const txLocation of txLocations) { - const isInBoundaries = await this.timestampByLocation.isDataInBoundaries( - txLocation, - timestampBoundaries, - ); - if (isInBoundaries) { - result.push(channelId); - break; - } - } - } - return result; - } else { - return channelIds; - } - } - - /** - * Get a list of channels indexed by topics - * @param topics topics to retrieve the channelIds from - * @param timestampBoundaries timestamp boundaries of the transactions search - * @returns list of location of the channels indexed by the topics - */ - public async getChannelIdsForMultipleTopics( - topics: string[], - timestampBoundaries?: DataAccessTypes.ITimestampBoundaries | undefined, - ): Promise { - if (!this.locationByTopic) { - throw new Error('TransactionIndex must be initialized'); - } - - // get channels for given topics - const channelIds = await this.locationByTopic.getChannelIdsFromMultipleTopics(topics); - - // if boundaries are passed, only return channelIds with transactions within these boundaries - if (timestampBoundaries) { - const result = []; - // check for each channel if one of its transactions occurred inside the given boundaries - for (const channelId of channelIds) { - const txLocations = await this.locationByTopic.getStorageLocationsFromChannelId(channelId); - for (const txLocation of txLocations) { - const isInBoundaries = await this.timestampByLocation.isDataInBoundaries( - txLocation, - timestampBoundaries, - ); - if (isInBoundaries) { - result.push(channelId); - break; - } - } - } - return result; - } else { - return channelIds; - } - } - - /** - * the list of indexed locations - */ - public async getIndexedLocations(): Promise { - const listDataIds: string[] | undefined = await this.indexedLocation.get('list'); - return listDataIds || []; - } - - /** - * Update the list of data ids stored - * - * @param dataId data id to add to the list - * @returns - */ - private async updateIndexedLocation(dataId: string): Promise { - let listDataIds: string[] | undefined = await this.indexedLocation.get('list'); - if (!listDataIds) { - listDataIds = []; - } - - // push it if not already done - if (!listDataIds.includes(dataId)) { - listDataIds.push(dataId); - await this.indexedLocation.set('list', listDataIds); - } - } -} diff --git a/packages/data-access/src/types.ts b/packages/data-access/src/types.ts new file mode 100644 index 0000000000..2bef486e18 --- /dev/null +++ b/packages/data-access/src/types.ts @@ -0,0 +1,7 @@ +import { DataAccessTypes, LogTypes } from '@requestnetwork/types'; + +export type DataAccessBaseOptions = { + network: string; + logger?: LogTypes.ILogger; + pendingStore?: DataAccessTypes.IPendingStore; +}; diff --git a/packages/data-access/test/data-access.test.ts b/packages/data-access/test/data-access.test.ts deleted file mode 100644 index 9f58ed0309..0000000000 --- a/packages/data-access/test/data-access.test.ts +++ /dev/null @@ -1,855 +0,0 @@ -import { EventEmitter } from 'events'; - -import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; - -import RequestDataAccessBlock from '../src/block'; -import DataAccess from '../src/data-access'; -import TransactionIndex from '../src/transaction-index'; - -// We use this function to flush the call stack -// If we don't use this function, the fake timer will be increased before the interval function being called -const flushCallStack = (): Promise => { - return new Promise((resolve): any => { - setTimeout(resolve, 0); - jest.advanceTimersByTime(1); - }); -}; - -const transactionDataMock1String = JSON.stringify({ - attribut1: 'plop', - attribut2: 'value', -}); -const transactionDataMock2String = JSON.stringify({ - attribut1: 'foo', - attribut2: 'bar', -}); -const transactionDataMock3String = JSON.stringify({ - attribut1: 'jean', - attribut2: 'bon', -}); - -const transactionMock1: DataAccessTypes.ITransaction = { - data: transactionDataMock1String, -}; -const transactionMock2: DataAccessTypes.ITransaction = { - data: transactionDataMock2String, -}; -const transactionMock3: DataAccessTypes.ITransaction = { - data: transactionDataMock3String, -}; - -const arbitraryId1 = '011111111111111111111111111111111111111111111111111111111111111111'; -const arbitraryId2 = '012222222222222222222222222222222222222222222222222222222222222222'; - -const arbitraryTopic1 = '01aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; -const arbitraryTopic2 = '01cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'; - -const emptyblock = RequestDataAccessBlock.createEmptyBlock(); -const blockWith1tx = RequestDataAccessBlock.pushTransaction( - emptyblock, - transactionMock1, - arbitraryId1, - [arbitraryTopic1, arbitraryTopic2], -); -const blockWith2tx = RequestDataAccessBlock.pushTransaction( - blockWith1tx, - transactionMock2, - arbitraryId2, - [arbitraryTopic2], -); - -const blockWith1txBis = RequestDataAccessBlock.pushTransaction( - emptyblock, - transactionMock3, - arbitraryId1, - [arbitraryTopic1], -); - -const dataIdBlock2tx = 'dataIdBlock2tx'; - -const getDataResult: StorageTypes.IEntriesWithLastTimestamp = { - entries: [ - { - content: JSON.stringify(blockWith2tx), - id: dataIdBlock2tx, - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }, - ], - lastTimestamp: 0, -}; - -const dataIdBlock1txBis = 'dataIdBlock1txBis'; -const getIgnoredDataResult: StorageTypes.IEntry[] = [ - { - content: JSON.stringify(blockWith1txBis), - id: dataIdBlock1txBis, - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - }, -]; - -const appendResult: any = { - content: '', - id: dataIdBlock2tx, - meta: { - state: StorageTypes.ContentState.PENDING, - timestamp: 1, - }, -}; - -const appendResultConfirmed = { - content: '', - id: dataIdBlock2tx, - meta: { - state: StorageTypes.ContentState.CONFIRMED, - timestamp: 1, - }, -}; - -const emptyDataResult: StorageTypes.IEntriesWithLastTimestamp = { - entries: [], - lastTimestamp: 0, -}; - -const defaultTestData: Promise = - Promise.resolve(getDataResult); - -const defaultFakeStorage: StorageTypes.IStorage = { - _getStatus: jest.fn((): any => ({ - fake: 'status', - })), - append: jest.fn((): any => { - const appendResultWithEvent = Object.assign(new EventEmitter(), appendResult); - setTimeout( - () => { - appendResultWithEvent.emit('confirmed', appendResultConfirmed); - }, - // eslint-disable-next-line no-magic-numbers - 10, - ); - return appendResultWithEvent; - }), - getData: (): Promise => defaultTestData, - getIgnoredData: async (): Promise => [], - initialize: jest.fn(), - read: (param: string): any => { - const dataIdBlock2txFake: any = { - meta: {}, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - readMany(params: string[]): Promise { - return Promise.all(params.map(this.read)); - }, -}; - -/* eslint-disable no-magic-numbers */ -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('data-access', () => { - let testContext: any; - - beforeEach(() => { - testContext = {}; - }); - - beforeEach(async () => { - jest.useFakeTimers('modern'); - }); - - // afterEach(async () => { - // sinon.restore(); - // }); - - describe('constructor', () => { - it('cannot initialize with getData without result', async () => { - const customFakeStorage = { - ...defaultFakeStorage, - getData: (): Promise => - Promise.resolve({ - lastTimestamp: 0, - } as any), - }; - - const dataAccess = new DataAccess(customFakeStorage); - - await expect(dataAccess.initialize()).rejects.toThrowError( - 'data from storage do not follow the standard', - ); - }); - - it('cannot initialize twice', async () => { - const dataAccess = new DataAccess(defaultFakeStorage); - await dataAccess.initialize(); - - await expect(dataAccess.initialize()).rejects.toThrowError('already initialized'); - }); - - it('cannot getChannelsByTopic if not initialized', async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - const dataAccess = new DataAccess(fakeStorage); - - await expect(dataAccess.getChannelsByTopic(arbitraryTopic1)).rejects.toThrowError( - 'DataAccess must be initialized', - ); - }); - }); - - describe('getTransactionsByChannelId', () => { - let dataAccess: any; - - beforeEach(async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - dataAccess = new DataAccess(fakeStorage); - await dataAccess.initialize(); - }); - - it('can getTransactionsByChannelId() with boundaries', async () => { - expect( - await dataAccess.getTransactionsByChannelId(arbitraryId1, { from: 9, to: 100 }), - ).toMatchObject({ - meta: { - storageMeta: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - }, - ], - transactionsStorageLocation: [dataIdBlock2tx], - }, - result: { - transactions: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - transaction: transactionMock1, - }, - ], - }, - }); - }); - - it('can getTransactionsByChannelId() with boundaries too restrictive', async () => { - expect( - await dataAccess.getTransactionsByChannelId(arbitraryId1, { from: 11, to: 100 }), - ).toMatchObject({ - meta: { - storageMeta: [], - transactionsStorageLocation: [], - }, - result: { transactions: [] }, - }); - }); - }); - - describe('getChannelByTopic', () => { - let dataAccess: any; - - beforeEach(async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - dataAccess = new DataAccess(fakeStorage); - await dataAccess.initialize(); - }); - - it('can getChannelByTopic() with boundaries', async () => { - expect( - await dataAccess.getChannelsByTopic(arbitraryTopic1, { from: 9, to: 100 }), - ).toMatchObject({ - meta: { - storageMeta: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - }, - ], - }, - transactionsStorageLocation: { [arbitraryId1]: [dataIdBlock2tx] }, - }, - result: { - transactions: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - transaction: transactionMock1, - }, - ], - }, - }, - }); - }); - - it('can getChannelByTopic() with boundaries too restrictive', async () => { - expect( - await dataAccess.getChannelsByTopic(arbitraryTopic1, { from: 11, to: 100 }), - ).toMatchObject({ - meta: { - storageMeta: {}, - transactionsStorageLocation: {}, - }, - result: { transactions: {} }, - }); - }); - }); - - describe('getChannelsByMultipleTopics', () => { - let dataAccess: any; - - beforeEach(async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - dataAccess = new DataAccess(fakeStorage); - await dataAccess.initialize(); - }); - - it('can getChannelsByMultipleTopics() with boundaries', async () => { - const ret = await dataAccess.getChannelsByMultipleTopics([arbitraryTopic1, arbitraryTopic2], { - from: 9, - to: 100, - }); - - expect(ret.meta).toMatchObject({ - storageMeta: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - }, - ], - [arbitraryId2]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - }, - ], - }, - transactionsStorageLocation: { - [arbitraryId1]: [dataIdBlock2tx], - [arbitraryId2]: [dataIdBlock2tx], - }, - }); - expect(ret.result).toMatchObject({ - transactions: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - transaction: transactionMock1, - }, - ], - [arbitraryId2]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 10, - transaction: transactionMock2, - }, - ], - }, - }); - }); - - it('can getChannelByTopic() with boundaries too restrictive', async () => { - expect( - await dataAccess.getChannelsByMultipleTopics([arbitraryTopic1, arbitraryTopic2], { - from: 11, - to: 100, - }), - ).toMatchObject({ - meta: { - storageMeta: {}, - transactionsStorageLocation: {}, - }, - result: { transactions: {} }, - }); - }); - }); - - describe('persistTransaction', () => { - it('can persistTransaction()', async () => { - const dataAccess = new DataAccess(defaultFakeStorage); - await dataAccess.initialize(); - - const errFunction = jest.fn(); - const result = await dataAccess.persistTransaction(transactionMock1, arbitraryId1, [ - arbitraryTopic1, - ]); - result.on('error', errFunction).on('confirmed', (resultConfirmed1) => { - expect(resultConfirmed1).toMatchObject({ - meta: { - storageMeta: { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: 1, - }, - topics: [arbitraryTopic1], - transactionStorageLocation: dataIdBlock2tx, - }, - result: {}, - }); - }); - - jest.advanceTimersByTime(11); - - expect(errFunction).not.toHaveBeenCalled(); - /* eslint-disable */ - /* eslint-disable quote-props */ - expect(defaultFakeStorage.append).toHaveBeenCalledWith( - JSON.stringify({ - header: { - channelIds: { - [arbitraryId1]: [0], - }, - topics: { - [arbitraryId1]: [arbitraryTopic1], - }, - version: '0.1.0', - }, - transactions: [ - { - data: '{"attribut1":"plop","attribut2":"value"}', - }, - ], - }), - ); - expect(result.meta).toMatchObject({ - storageMeta: { - state: DataAccessTypes.TransactionState.PENDING, - timestamp: 1, - }, - topics: [arbitraryTopic1], - transactionStorageLocation: dataIdBlock2tx, - }); - }); - - it('cannot persistTransaction() if not initialized', async () => { - const dataAccess = new DataAccess(defaultFakeStorage); - - await expect( - dataAccess.persistTransaction(transactionMock1, arbitraryId1, [arbitraryTopic1]), - ).rejects.toThrowError('DataAccess must be initialized'); - }); - - it('cannot persistTransaction() if a topic is not well formatted', async () => { - const dataAccess = new DataAccess(defaultFakeStorage); - const notFormattedTopic = 'This topic is not formatted'; - await dataAccess.initialize(); - - await expect( - dataAccess.persistTransaction(transactionMock1, arbitraryId1, [ - notFormattedTopic, - arbitraryTopic2, - ]), - ).rejects.toThrowError( - `The following topics are not well formatted: ["This topic is not formatted"]`, - ); - }); - - it('cannot persistTransaction() and emit error if confirmation failed', async () => { - const mockStorageEmittingError: StorageTypes.IStorage = { - _getStatus: jest.fn(), - append: jest.fn((): any => { - const appendResultWithEvent = Object.assign(new EventEmitter(), appendResult); - setTimeout( - () => { - appendResultWithEvent.emit('error', 'error for test purpose'); - }, - // eslint-disable-next-line no-magic-numbers - 10, - ); - return appendResultWithEvent; - }), - getData: (): Promise => defaultTestData, - getIgnoredData: async (): Promise => [], - initialize: jest.fn(), - read: (param: string): any => { - const dataIdBlock2txFake: any = { - meta: {}, - }; - const resultRead: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return resultRead[param]; - }, - readMany(params: string[]): Promise { - return Promise.all(params.map(testContext.read)); - }, - }; - - const dataAccess = new DataAccess(mockStorageEmittingError); - await dataAccess.initialize(); - - const result = await dataAccess.persistTransaction(transactionMock1, arbitraryId1, [ - arbitraryTopic1, - ]); - result.on('error', (error) => { - // result Confirmed wrong - expect(error).toEqual('error for test purpose'); - }); - jest.advanceTimersByTime(11); - - // result wrong - expect(result.meta).toMatchObject({ - storageMeta: { - state: DataAccessTypes.TransactionState.PENDING, - timestamp: 1, - }, - topics: [arbitraryTopic1], - transactionStorageLocation: dataIdBlock2tx, - }); - }); - }); - - describe('_getStatus', () => { - let dataAccess: any; - - beforeEach(async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - dataAccess = new DataAccess(fakeStorage); - await dataAccess.initialize(); - }); - - it('can _getStatus()', async () => { - expect(await dataAccess._getStatus()).toMatchObject({ - filesIgnored: { count: 0, list: undefined }, - filesRetrieved: { count: 1, lastTimestamp: 10, list: undefined }, - lastSynchronizationTimestamp: 0, - storage: { fake: 'status' }, - synchronizationConfig: { - intervalTime: 10000, - successiveFailureThreshold: 5, - }, - }); - }); - it('can _getStatus() with details', async () => { - expect(await dataAccess._getStatus(true)).toMatchObject({ - filesIgnored: { count: 0, list: {} }, - filesRetrieved: { count: 1, lastTimestamp: 10, list: ['dataIdBlock2tx'] }, - lastSynchronizationTimestamp: 0, - storage: { fake: 'status' }, - synchronizationConfig: { - intervalTime: 10000, - successiveFailureThreshold: 5, - }, - }); - }); - }); - - it('synchronizeNewDataId() should throw an error if not initialized', async () => { - const dataAccess = new DataAccess(defaultFakeStorage); - - await expect(dataAccess.synchronizeNewDataIds()).rejects.toThrowError( - 'DataAccess must be initialized', - ); - }); - - it('synchronizeNewDataId() should ignore data not following the block standard', async () => { - const blockWithoutHeader = { - transactions: [{ data: '' }], - }; - - const testDataNotJsonData: Promise = Promise.resolve({ - entries: [ - { - content: JSON.stringify(blockWithoutHeader), - id: 'whatever', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 10 }, - }, - ], - lastTimestamp: 0, - }); - - const fakeStorageWithNotJsonData: StorageTypes.IStorage = { - append: jest.fn(), - getData: (): Promise => testDataNotJsonData, - getIgnoredData: async (): Promise => [], - _getStatus: jest.fn(), - initialize: jest.fn(), - read: jest.fn(), - readMany: jest.fn(), - }; - - const transactionIndex = new TransactionIndex(); - const dataAccess = new DataAccess(fakeStorageWithNotJsonData, { transactionIndex }); - const spy = jest.spyOn(transactionIndex, 'addTransaction').mockImplementation(); - await dataAccess.initialize(); - await dataAccess.synchronizeNewDataIds(); - - expect(spy).not.toHaveBeenCalled(); - }); - - it('allows to get new transactions after synchronizeNewDataId() call', async () => { - // We create a fakeStorage where getData() called at initialization returns empty structure - const fakeStorage = { - ...defaultFakeStorage, - getData: async (options: any): Promise => { - if (!options) { - return Promise.resolve(emptyDataResult); - } - return getDataResult; - }, - getIgnoredData: async (): Promise => getIgnoredDataResult, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - dataIdBlock1txBis: getIgnoredDataResult[0], - }; - return result[param]; - }, - }; - - const dataAccess = new DataAccess(fakeStorage); - await dataAccess.initialize(); - - expect(await dataAccess.getChannelsByTopic(arbitraryTopic1)).toMatchObject({ - meta: { - storageMeta: {}, - transactionsStorageLocation: {}, - }, - result: { transactions: {} }, - }); - - // Transactions should be available after synchronization - await dataAccess.synchronizeNewDataIds(); - - // result with arbitraryTopic1 wrong - await expect(dataAccess.getChannelsByTopic(arbitraryTopic1)).resolves.toMatchObject({ - meta: { - storageMeta: { - [arbitraryId1]: [ - { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - ], - }, - transactionsStorageLocation: { [arbitraryId1]: [dataIdBlock2tx, dataIdBlock1txBis] }, - }, - result: { - transactions: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - transaction: transactionMock1, - timestamp: 1, - }, - { - state: DataAccessTypes.TransactionState.CONFIRMED, - transaction: transactionMock3, - timestamp: 1, - }, - ], - }, - }, - }); - // result with arbitraryTopic2 wrong - await expect(dataAccess.getChannelsByTopic(arbitraryTopic2)).resolves.toMatchObject({ - meta: { - storageMeta: { - [arbitraryId1]: [ - { state: DataAccessTypes.TransactionState.CONFIRMED, timestamp: 1 }, - { state: DataAccessTypes.TransactionState.CONFIRMED, timestamp: 1 }, - ], - [arbitraryId2]: [{ state: DataAccessTypes.TransactionState.CONFIRMED, timestamp: 1 }], - }, - transactionsStorageLocation: { - [arbitraryId1]: [dataIdBlock2tx, dataIdBlock1txBis], - [arbitraryId2]: [dataIdBlock2tx], - }, - }, - result: { - transactions: { - [arbitraryId1]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - transaction: transactionMock1, - timestamp: 1, - }, - { - state: DataAccessTypes.TransactionState.CONFIRMED, - transaction: transactionMock3, - timestamp: 1, - }, - ], - [arbitraryId2]: [ - { - state: DataAccessTypes.TransactionState.CONFIRMED, - transaction: transactionMock2, - timestamp: 1, - }, - ], - }, - }, - }); - }); - - it('startSynchronizationTimer() should throw an error if not initialized', async () => { - const fakeStorageSpied: StorageTypes.IStorage = { - append: jest.fn().mockReturnValue(appendResult), - getData: jest.fn(() => Promise.resolve({} as any)), - getIgnoredData: async (): Promise => [], - _getStatus: jest.fn(), - initialize: jest.fn(), - read: jest.fn(), - readMany: jest.fn(), - }; - const dataAccess = new DataAccess(fakeStorageSpied); - - expect(() => dataAccess.startAutoSynchronization()).toThrowError( - 'DataAccess must be initialized', - ); - }); - - it('allows to get new transactions automatically if startSynchronizationTimer() is called', async () => { - const fakeStorage = { - ...defaultFakeStorage, - read: (param: string): any => { - const dataIdBlock2txFake: StorageTypes.IEntry = { - content: JSON.stringify(blockWith2tx), - id: '1', - meta: { state: StorageTypes.ContentState.CONFIRMED, timestamp: 1 }, - }; - const result: any = { - dataIdBlock2tx: dataIdBlock2txFake, - }; - return result[param]; - }, - }; - - const dataAccess = new DataAccess(fakeStorage, { - synchronizationIntervalTime: 1000, - }); - dataAccess.synchronizeNewDataIds = jest.fn(); - await dataAccess.initialize(); - - expect(dataAccess.synchronizeNewDataIds).not.toHaveBeenCalled(); - - dataAccess.startAutoSynchronization(); - jest.advanceTimersByTime(1100); - await flushCallStack(); - - // Should have been called once after 1100ms - expect(dataAccess.synchronizeNewDataIds).toHaveBeenCalledTimes(1); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - - // Should have been called once after 2100ms - expect(dataAccess.synchronizeNewDataIds).toHaveBeenCalledTimes(2); - - await dataAccess.stopAutoSynchronization(); - jest.advanceTimersByTime(1000); - await flushCallStack(); - - // Not called anymore after stopAutoSynchronization() - expect(dataAccess.synchronizeNewDataIds).toHaveBeenCalledTimes(2); - }); - - it(`should not get twice the same data during synchronization`, async () => { - let lastTimestampReturnedByGetData: number = 0; - - const fakeStorageSpied: StorageTypes.IStorage = { - ...defaultFakeStorage, - getData: jest.fn( - (): Promise => - Promise.resolve({ - entries: [], - lastTimestamp: lastTimestampReturnedByGetData, - }), - ), - }; - - lastTimestampReturnedByGetData = 500; - const dataAccess = new DataAccess(fakeStorageSpied, { - synchronizationIntervalTime: 1000, - }); - await dataAccess.initialize(); - - // At initialization, getData is called with no time boundaries - expect(fakeStorageSpied.getData).toHaveBeenNthCalledWith(1, undefined); - - dataAccess.startAutoSynchronization(); - - // Mock Date.now to parse the value "to" of the time boundaries - Date.now = (): number => 1000000; - lastTimestampReturnedByGetData = 800; - jest.advanceTimersByTime(1100); - await flushCallStack(); - - expect(fakeStorageSpied.getData).toHaveBeenNthCalledWith(2, { from: 501, to: 1000 }); - - await dataAccess.stopAutoSynchronization(); - }); -}); diff --git a/packages/data-access/test/ignored-location.test.ts b/packages/data-access/test/ignored-location.test.ts deleted file mode 100644 index 394bd1bcee..0000000000 --- a/packages/data-access/test/ignored-location.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-disable @typescript-eslint/await-thenable */ -/* eslint-disable no-magic-numbers */ - -import IgnoredLocation from '../src/ignored-location'; - -const arbitraryDataId1 = 'dataId1'; -const arbitraryDataId2 = 'dataId2'; -const arbitraryReason = 'reason1'; -const arbitraryReason2 = 'reason2'; - -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('IgnoredLocation', () => { - describe('pushReasonByLocation', () => { - it('can pushReasonByLocation()', async () => { - const ignoredLocation = new IgnoredLocation(); - await ignoredLocation.pushReasonByLocation(arbitraryDataId1, arbitraryReason); - - expect(await ignoredLocation.getReasonFromLocation(arbitraryDataId1)).toBe(arbitraryReason); - }); - }); - describe('removeReasonByLocation', () => { - it('can removeReasonByLocation()', async () => { - const ignoredLocation = new IgnoredLocation(); - await ignoredLocation.pushReasonByLocation(arbitraryDataId1, arbitraryReason); - await ignoredLocation.removeReasonByLocation(arbitraryDataId1); - - expect(await ignoredLocation.getReasonFromLocation(arbitraryDataId1)).toBeNull(); - }); - }); - - describe('getIgnoredLocations', () => { - it('can getIgnoredLocations()', async () => { - const ignoredLocation = new IgnoredLocation(); - await ignoredLocation.pushReasonByLocation(arbitraryDataId1, arbitraryReason); - await ignoredLocation.pushReasonByLocation(arbitraryDataId2, arbitraryReason2); - - expect(await ignoredLocation.getIgnoredLocations()).toEqual({ - [arbitraryDataId1]: arbitraryReason, - [arbitraryDataId2]: arbitraryReason2, - }); - }); - it('can getIgnoredLocations() if empty', async () => { - const ignoredLocation = new IgnoredLocation(); - - expect(await ignoredLocation.getIgnoredLocations()).toEqual({}); - }); - }); -}); diff --git a/packages/data-access/test/interval-timer.test.ts b/packages/data-access/test/interval-timer.test.ts deleted file mode 100644 index 6471e240bd..0000000000 --- a/packages/data-access/test/interval-timer.test.ts +++ /dev/null @@ -1,270 +0,0 @@ -import { LogTypes } from '@requestnetwork/types'; - -import IntervalTimer from '../src/interval-timer'; - -let intervalTimer: IntervalTimer; - -// We use this function to flush the call stack -// If we don't use this function, the fake timer will be increased before the interval function being called -const flushCallStack = (): Promise => { - return new Promise((resolve): any => { - setTimeout(resolve, 0); - jest.advanceTimersByTime(1); - }); -}; - -const emptyLogger = { - debug: (_string: string): void => {}, - error: (_string: string): void => {}, - info: (_string: string): void => {}, - warn: (_string: string): void => {}, -} as LogTypes.ILogger; - -let intervalFunctionWithErrorCount: number; - -// Mock to simulate cases where several interval function fail in a row -const intervalFunctionWithErrorMock = async (): Promise => { - intervalFunctionWithErrorCount++; - - switch (intervalFunctionWithErrorCount) { - case 1: - return; - case 2: - throw Error('Error 1'); - case 3: - throw Error('Error 2'); - case 4: - throw Error('Error 3'); - case 5: - throw Error('Error 4'); - case 6: - throw Error('Error 5'); - case 7: - return; - default: - return; - } -}; - -/* eslint-disable no-magic-numbers */ -/* eslint-disable no-empty,@typescript-eslint/no-empty-function */ -describe('interval-timer', () => { - beforeEach(async () => { - intervalTimer = new IntervalTimer(async (): Promise => {}, 1000, emptyLogger); - jest.useFakeTimers('modern'); - intervalFunctionWithErrorCount = 0; - }); - afterEach(() => { - jest.clearAllMocks(); - jest.clearAllTimers(); - }); - - it('should throw an error if started twice without stop() being called', async () => { - intervalTimer.start(); - expect(() => intervalTimer.start()).toThrowError('IntervalTimer already started'); - await intervalTimer.stop(); - }); - - it('should throw an error if stopped without start() being called', async () => { - await expect(async () => await intervalTimer.stop()).rejects.toEqual( - "Can't stop IntervalTimer if it has not been started", - ); - }); - - it('should periodically call the interval function provided when start() is called', async () => { - const callback = jest.fn(async () => {}); - - intervalTimer = new IntervalTimer(callback, 1000, emptyLogger); - intervalTimer.start(); - - expect(callback).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(500); - expect(callback).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(600); // 1100 - expect(callback).toHaveBeenCalledTimes(1); - - await flushCallStack(); - - jest.advanceTimersByTime(1000); // 2100 - expect(callback).toHaveBeenCalledTimes(2); - - await flushCallStack(); - - jest.advanceTimersByTime(1000); // 3100 - expect(callback).toHaveBeenCalledTimes(3); - - await flushCallStack(); - - jest.advanceTimersByTime(1000); // 4100 - expect(callback).toHaveBeenCalledTimes(4); - - await flushCallStack(); - - jest.advanceTimersByTime(1000); // 5100 - expect(callback).toHaveBeenCalledTimes(5); - }); - - it('should stop calling the interval function when stop() is called', async () => { - const callback = jest.fn(); - - intervalTimer = new IntervalTimer(callback, 1000, emptyLogger); - intervalTimer.start(); - expect(callback).toHaveBeenCalledTimes(0); - - // for fake timers to work with native Promise, we have to wrap the timer in a Promise, see: - // https://github.com/facebook/jest/issues/7151#issuecomment-622134853 - await Promise.resolve().then(() => jest.advanceTimersByTime(1100)); - expect(callback).toHaveBeenCalledTimes(1); - - await intervalTimer.stop(); - await Promise.resolve().then(() => jest.advanceTimersByTime(1000)); // 2100 - expect(callback).toHaveBeenCalledTimes(1); - }); - - it('allows to restart the periodical call of the interval function', async () => { - const callback = jest.fn(); - - intervalTimer = new IntervalTimer(callback, 1000, emptyLogger); - intervalTimer.start(); - expect(callback).toHaveBeenCalledTimes(0); - - // for fake timers to work with native Promise, we have to wrap the timer in a Promise, see: - // https://github.com/facebook/jest/issues/7151#issuecomment-622134853 - await Promise.resolve().then(() => jest.advanceTimersByTime(1100)); - expect(callback).toHaveBeenCalledTimes(1); - - await intervalTimer.stop(); - await Promise.resolve().then(() => jest.advanceTimersByTime(1000)); // 2100 - expect(callback).toHaveBeenCalledTimes(1); - - intervalTimer.start(); - await Promise.resolve().then(() => jest.advanceTimersByTime(1100)); - expect(callback).toHaveBeenCalledTimes(2); - }); - - it('should not stop if the interval function fail', async () => { - // Trigger the rejection of the interval function - let makeReject = false; - - // This value is used to check if the interval function has been rejected - let hasBeenRejected = false; - - const callback = jest.fn(async () => { - if (makeReject) { - hasBeenRejected = true; - throw Error('makeReject set'); - } - return; - }); - - intervalTimer = new IntervalTimer(callback, 1000, emptyLogger); - intervalTimer.start(); - - expect(callback).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1100); - expect(callback).toHaveBeenCalledTimes(1); - - // Force the rejection of the interval function for the next call - makeReject = true; - await flushCallStack(); - - jest.advanceTimersByTime(1000); - expect(callback).toHaveBeenCalledTimes(2); - - makeReject = false; - await flushCallStack(); - - // The interval function should have been rejected - expect(hasBeenRejected).toBeTruthy(); - - // The interval function should continue to be called - jest.advanceTimersByTime(1000); - expect(callback).toHaveBeenCalledTimes(3); - }); - - it('intervalFunctionSuccessiveFailureCount should be incremented when the interval function fails', async () => { - intervalTimer = new IntervalTimer(intervalFunctionWithErrorMock, 1000, emptyLogger, 5); - intervalTimer.start(); - - // Simulate clock to call interval function - jest.advanceTimersByTime(1001); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(1); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(2); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(3); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(4); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(5); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(intervalTimer.intervalFunctionSuccessiveFailureCount).toEqual(0); - }); - - it('should display log messages when interval function fails', async () => { - // Mock to test logger message - const mockLogger = { - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - warn: jest.fn(), - }; - - intervalTimer = new IntervalTimer(intervalFunctionWithErrorMock, 1000, mockLogger, 5); - intervalTimer.start(); - - // Simulate clock to call interval function - jest.advanceTimersByTime(1001); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(0); - expect(mockLogger.error).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(1); - expect(mockLogger.error).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(2); - expect(mockLogger.error).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(3); - expect(mockLogger.error).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(4); - expect(mockLogger.error).toHaveBeenCalledTimes(0); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(5); - expect(mockLogger.error).toHaveBeenCalledTimes(1); - - jest.advanceTimersByTime(1000); - await flushCallStack(); - expect(mockLogger.warn).toHaveBeenCalledTimes(5); - expect(mockLogger.error).toHaveBeenCalledTimes(1); - }); -}); diff --git a/packages/data-access/test/location-by-topic.test.ts b/packages/data-access/test/location-by-topic.test.ts deleted file mode 100644 index 436e3402c7..0000000000 --- a/packages/data-access/test/location-by-topic.test.ts +++ /dev/null @@ -1,190 +0,0 @@ -import LocationByTopic from '../src/transaction-index/location-by-topic'; - -const arbitraryId1 = 'id1'; -const arbitraryId2 = 'id2'; -const arbitraryId3 = 'id3'; - -const arbitraryDataId1 = 'dataid1'; -const arbitraryDataId2 = 'dataid2'; - -const arbitraryTxTopic1 = 'topic1'; -const arbitraryTxTopic2 = 'topic2'; -const arbitraryTxTopic3 = 'topic3'; - -const arbitraryBlockHeader1 = { - channelIds: { id1: [0, 2], id2: [1] }, - topics: { - id1: [arbitraryTxTopic1], - id2: [arbitraryTxTopic1, arbitraryTxTopic2], - }, - version: '0.1.0', -}; -const arbitraryBlockHeader2 = { - channelIds: { id1: [0], id3: [1, 2] }, - topics: { - id3: [arbitraryTxTopic3], - }, - version: '0.1.0', -}; - -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('LocationByTopic', () => { - describe('pushStorageLocationIndexedWithBlockTopics', () => { - it('can index new block', async () => { - const localIndex = new LocationByTopic(); - - const spy = jest.fn((_returns: any): Promise => Promise.resolve(true as true)); - // Mock the set function to parse the calls - localIndex.storageLocationByChannelId.set = spy; - - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - expect(spy).toHaveBeenCalled(); - }); - - it('can index channel ids', async () => { - const localIndex = new LocationByTopic(); - - const spy = jest.fn((_returns: any): Promise => Promise.resolve(true as true)); - // Mock the set function to parse the calls - localIndex.channelIdByTopics.set = spy; - - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - expect(spy).toHaveBeenCalled(); - }); - }); - - describe('getStorageLocationsFromChannelId', () => { - it('can getStorageLocationsFromChannelId with one block', async () => { - const localIndex = new LocationByTopic(); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - // getStorageLocationsFromChannelId is wrong - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId1), - ).resolves.toMatchObject([arbitraryDataId1]); - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId2), - ).resolves.toMatchObject([arbitraryDataId1]); - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId3), - ).resolves.toMatchObject([]); - }); - it('can getStorageLocationsFromChannelId with two blocks', async () => { - const localIndex = new LocationByTopic(); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId2, - arbitraryBlockHeader2, - ); - - // getStorageLocationsFromChannelId is wrong - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId1), - ).resolves.toMatchObject([arbitraryDataId1, arbitraryDataId2]); - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId2), - ).resolves.toMatchObject([arbitraryDataId1]); - await expect( - localIndex.getStorageLocationsFromChannelId(arbitraryId3), - ).resolves.toMatchObject([arbitraryDataId2]); - }); - }); - - describe('getChannelIdsFromTopic', () => { - it('can get ChannelIds From one Topic', async () => { - const localIndex = new LocationByTopic(); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId2, - arbitraryBlockHeader2, - ); - - expect(await localIndex.getChannelIdsFromTopic(arbitraryTxTopic1)).toMatchObject([ - arbitraryId1, - arbitraryId2, - ]); - expect(await localIndex.getChannelIdsFromTopic(arbitraryTxTopic2)).toMatchObject([ - arbitraryId2, - ]); - expect(await localIndex.getChannelIdsFromTopic(arbitraryTxTopic3)).toMatchObject([ - arbitraryId3, - ]); - expect(await localIndex.getChannelIdsFromTopic('topic not used')).toMatchObject([]); - }); - }); - - describe('getChannelIdsFromMultipleTopics', () => { - it('can get ChannelIds From Multiple Topic giving only one topic', async () => { - const localIndex = new LocationByTopic(); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId2, - arbitraryBlockHeader2, - ); - - expect( - await localIndex.getChannelIdsFromMultipleTopics([arbitraryTxTopic1, arbitraryTxTopic2]), - ).toMatchObject([arbitraryId1, arbitraryId2]); - expect( - await localIndex.getChannelIdsFromMultipleTopics([ - arbitraryTxTopic1, - arbitraryTxTopic2, - arbitraryTxTopic3, - ]), - ).toMatchObject([arbitraryId1, arbitraryId2, arbitraryId3]); - expect( - await localIndex.getChannelIdsFromMultipleTopics(['topic not used', arbitraryTxTopic3]), - ).toMatchObject([arbitraryId3]); - expect(await localIndex.getChannelIdsFromMultipleTopics([])).toMatchObject([]); - }); - - it('can get ChannelIds From Multiple Topic giving only one topic', async () => { - const localIndex = new LocationByTopic(); - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId1, - arbitraryBlockHeader1, - ); - - await localIndex.pushStorageLocationIndexedWithBlockTopics( - arbitraryDataId2, - arbitraryBlockHeader2, - ); - - expect(await localIndex.getChannelIdsFromMultipleTopics([arbitraryTxTopic1])).toMatchObject([ - arbitraryId1, - arbitraryId2, - ]); - expect(await localIndex.getChannelIdsFromMultipleTopics([arbitraryTxTopic2])).toMatchObject([ - arbitraryId2, - ]); - expect(await localIndex.getChannelIdsFromMultipleTopics([arbitraryTxTopic3])).toMatchObject([ - arbitraryId3, - ]); - expect(await localIndex.getChannelIdsFromMultipleTopics(['topic not used'])).toMatchObject( - [], - ); - }); - }); -}); diff --git a/packages/data-access/test/pending-store.test.ts b/packages/data-access/test/pending-store.test.ts new file mode 100644 index 0000000000..3ee4600b7e --- /dev/null +++ b/packages/data-access/test/pending-store.test.ts @@ -0,0 +1,49 @@ +import { StorageTypes } from '@requestnetwork/types'; +import { PendingStore } from '../src/pending-store'; + +describe(PendingStore, () => { + const pendingStore = new PendingStore(); + it('can add an item', () => { + pendingStore.add('abcd', { + storageResult: { + content: 'foobar', + id: 'contentId', + meta: { state: StorageTypes.ContentState.PENDING, timestamp: 1 }, + }, + topics: ['topic1', 'topic2'], + transaction: { data: 'xxx' }, + }); + }); + + it('can retrieve an item by channelId', () => { + expect(pendingStore.get('abcd')).toMatchObject({ + storageResult: expect.objectContaining({ id: 'contentId' }), + topics: ['topic1', 'topic2'], + transaction: { data: 'xxx' }, + }); + }); + + it('can retrieve items by a single topic', () => { + expect(pendingStore.findByTopics(['topic1'])).toMatchObject([ + { + storageResult: expect.objectContaining({ id: 'contentId' }), + topics: ['topic1', 'topic2'], + transaction: { data: 'xxx' }, + }, + ]); + }); + + it('can retrieve items by a multiple topic', () => { + expect(pendingStore.findByTopics(['topic1'])).toMatchObject([ + { + storageResult: expect.objectContaining({ id: 'contentId' }), + topics: ['topic1', 'topic2'], + transaction: { data: 'xxx' }, + }, + ]); + }); + + it("doesn't retrieve items with a wrong topic", () => { + expect(pendingStore.findByTopics(['topic3'])).toMatchObject([]); + }); +}); diff --git a/packages/data-access/test/timestamp-by-location.test.ts b/packages/data-access/test/timestamp-by-location.test.ts deleted file mode 100644 index 2bc7f84781..0000000000 --- a/packages/data-access/test/timestamp-by-location.test.ts +++ /dev/null @@ -1,100 +0,0 @@ -/* eslint-disable @typescript-eslint/await-thenable */ -/* eslint-disable no-magic-numbers */ - -import TimestampByLocationTransactionIndex from '../src/transaction-index/timestamp-by-location'; - -const arbitraryDataId1 = 'dataId1'; - -const arbitraryTimestamp = 10; - -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('LocationTimestamp', () => { - it('can pushTimestampByLocation() and getTimestampFromLocation()', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - await timestampLocation.pushTimestampByLocation(arbitraryDataId1, arbitraryTimestamp); - - const result = await timestampLocation.isDataInBoundaries(arbitraryDataId1, { - from: arbitraryTimestamp, - to: arbitraryTimestamp, - }); - // 'timestampLocation is wrong' - expect(result).toBe(true); - }); - - describe('isDataInBoundaries', () => { - it('can isDataInBoundaries()', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - await timestampLocation.pushTimestampByLocation(arbitraryDataId1, arbitraryTimestamp); - - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 1, to: 100 }), - ).resolves.toBe(true); - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 1 }), - ).resolves.toBe(true); - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { to: 100 }), - ).resolves.toBe(true); - await expect(timestampLocation.isDataInBoundaries(arbitraryDataId1)).resolves.toBe(true); - - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 1, to: 9 }), - ).resolves.toBe(false); - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 11, to: 100 }), - ).resolves.toBe(false); - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 11 }), - ).resolves.toBe(false); - await expect(timestampLocation.isDataInBoundaries(arbitraryDataId1, { to: 9 })).resolves.toBe( - false, - ); - }); - - it('cannot isDataInBoundaries() on dataId not pushed', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - - await expect( - timestampLocation.isDataInBoundaries(arbitraryDataId1, { from: 1, to: 100 }), - ).rejects.toThrowError(`Unknown timestamp for the dataId ${arbitraryDataId1}`); - }); - }); - - describe('getLatestTimestamp', () => { - it('return null if empty', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - const latest = await timestampLocation.getLastTransactionTimestamp(); - expect(latest).toBeNull(); - }); - - it('return correct data', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - await timestampLocation.pushTimestampByLocation('a', 2); - await timestampLocation.pushTimestampByLocation('b', 3); - await timestampLocation.pushTimestampByLocation('c', 1); - - const latest = await timestampLocation.getLastTransactionTimestamp(); - expect(latest).toBe(3); - }); - }); - - describe('getTimestampFromLocation', () => { - it('can get getTimestamp From Location', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - await timestampLocation.pushTimestampByLocation('a', 2); - await timestampLocation.pushTimestampByLocation('b', 3); - await timestampLocation.pushTimestampByLocation('c', 1); - - expect(await timestampLocation.getTimestampFromLocation('b')).toBe(3); - expect(await timestampLocation.getTimestampFromLocation('c')).toBe(1); - expect(await timestampLocation.getTimestampFromLocation('a')).toBe(2); - }); - it('cannot get getTimestamp From Location not existing', async () => { - const timestampLocation = new TimestampByLocationTransactionIndex(); - await timestampLocation.pushTimestampByLocation('a', 2); - - expect(await timestampLocation.getTimestampFromLocation('a')).toBe(2); - expect(await timestampLocation.getTimestampFromLocation('b')).toBe(null); - }); - }); -}); diff --git a/packages/data-access/test/transaction-index.test.ts b/packages/data-access/test/transaction-index.test.ts deleted file mode 100644 index 2fddea0663..0000000000 --- a/packages/data-access/test/transaction-index.test.ts +++ /dev/null @@ -1,347 +0,0 @@ -/* eslint-disable @typescript-eslint/await-thenable */ -/* eslint-disable no-magic-numbers */ - -import { DataAccessTypes } from '@requestnetwork/types'; - -import TransactionIndex from '../src/transaction-index/index'; -import TimestampByLocation from '../src/transaction-index/timestamp-by-location'; - -const testBlock: DataAccessTypes.IBlockHeader = { - channelIds: { 'request-1': [1] }, - topics: { 'request-1': ['topic-1'] }, - version: '2.0', -}; - -describe('TransactionIndex', () => { - let transactionIndex: TransactionIndex; - beforeEach(async () => { - transactionIndex = new TransactionIndex(); - }); - - describe('addTransaction', () => { - it('addTransaction() should be fullfilled', async () => { - await transactionIndex.addTransaction('', testBlock, 0); - }); - - it('calls locationByTopic and timestampByLocation', async () => { - const pushStorageLocationIndexedWithBlockTopicsMock = jest.fn(); - (transactionIndex as any).locationByTopic.pushStorageLocationIndexedWithBlockTopics = - pushStorageLocationIndexedWithBlockTopicsMock; - - const pushTimestampByLocationMock = jest.fn(); - (transactionIndex as any).timestampByLocation.pushTimestampByLocation = - pushTimestampByLocationMock; - - await transactionIndex.addTransaction('abcd', testBlock, 2); - - expect(pushStorageLocationIndexedWithBlockTopicsMock).toHaveBeenCalledWith('abcd', testBlock); - expect(pushTimestampByLocationMock).toHaveBeenCalledWith('abcd', 2); - }); - }); - - describe('getStorageLocationList', () => { - beforeEach(async () => { - // mock location by topic - (transactionIndex as any).locationByTopic.getStorageLocationsFromChannelId = jest.fn(() => [ - 'a', - 'b', - 'c', - ]); - - const timestampByLocation: TimestampByLocation = (transactionIndex as any) - .timestampByLocation; - await timestampByLocation.pushTimestampByLocation('a', 9); - await timestampByLocation.pushTimestampByLocation('b', 10); - await timestampByLocation.pushTimestampByLocation('c', 11); - }); - - it('getStorageLocationList() should be fullfilled', async () => { - await expect(transactionIndex.getStorageLocationList('')); - }); - - it('should return all if timestamp not specified', async () => { - const storageLocationList = await transactionIndex.getStorageLocationList(''); - expect(storageLocationList).toEqual(['a', 'b', 'c']); - }); - - it('should filter data if timestamp specified', async () => { - const storageLocationList1 = await transactionIndex.getStorageLocationList('', { - to: 10, - }); - expect(storageLocationList1).toEqual(['a', 'b']); - - const storageLocationList2 = await transactionIndex.getStorageLocationList('', { - from: 10, - to: 11, - }); - expect(storageLocationList2).toEqual(['b', 'c']); - - const storageLocationList3 = await transactionIndex.getStorageLocationList('', { - from: 11, - to: 12, - }); - expect(storageLocationList3).toEqual(['c']); - - const storageLocationList4 = await transactionIndex.getStorageLocationList('', { - from: 12, - to: 13, - }); - expect(storageLocationList4).toEqual([]); - }); - }); - - describe('getChannelIdsForTopic', () => { - it('getChannelIdsForTopic() should be fullfilled', async () => { - await expect(transactionIndex.getChannelIdsForTopic('')); - }); - - it('getChannelIdsForTopic() should support multiple channel ids for topic', async () => { - await transactionIndex.addTransaction( - 'dataId1', - { - channelIds: { - 'channel-1': [1], - 'channel-2': [2], - }, - topics: { - 'channel-1': ['topic-a'], - 'channel-2': ['topic-a'], - }, - version: '2.0', - }, - 1, - ); - const channels = await transactionIndex.getChannelIdsForTopic('topic-a'); - expect(channels).toEqual(['channel-1', 'channel-2']); - }); - - it('getChannelIdsForTopic() should support multiple channel ids for topic with time boundaries', async () => { - await transactionIndex.addTransaction( - 'dataId1', - { - channelIds: { - 'channel-1': [1], - 'channel-2': [2], - }, - topics: { - 'channel-1': ['topic-a'], - 'channel-2': ['topic-a'], - }, - version: '2.0', - }, - 2, - ); - - await transactionIndex.addTransaction( - 'dataId2', - { - channelIds: { - 'channel-2': [1], - 'channel-3': [2], - }, - topics: {}, - version: '2.0', - }, - 10, - ); - - await transactionIndex.addTransaction( - 'dataId3', - { - channelIds: { - 'channel-3': [0], - }, - topics: { - 'channel-3': ['topic-b'], - }, - version: '2.0', - }, - 20, - ); - - expect(await transactionIndex.getChannelIdsForTopic('topic-a', { from: 3 })).toEqual([ - 'channel-2', - ]); - expect(await transactionIndex.getChannelIdsForTopic('topic-a', { to: 3 })).toEqual([ - 'channel-1', - 'channel-2', - ]); - - expect(await transactionIndex.getChannelIdsForTopic('topic-a', { from: 11 })).toEqual([]); - expect(await transactionIndex.getChannelIdsForTopic('topic-a', { to: 1 })).toEqual([]); - - expect(await transactionIndex.getChannelIdsForTopic('topic-b', { to: 11 })).toEqual([ - 'channel-3', - ]); - expect(await transactionIndex.getChannelIdsForTopic('topic-b', { from: 11 })).toEqual([ - 'channel-3', - ]); - }); - }); - - describe('getChannelIdsForMultipleTopics', () => { - it('getChannelIdsForMultipleTopics() should be full filled', async () => { - await expect(transactionIndex.getChannelIdsForMultipleTopics([])); - }); - - it('getChannelIdsForMultipleTopics() should support multiple channel ids for multiple topics', async () => { - await transactionIndex.addTransaction( - 'dataId1', - { - channelIds: { - 'channel-1': [1], - 'channel-2': [2], - 'channel-3': [3], - }, - topics: { - 'channel-1': ['topic-a'], - 'channel-2': ['topic-a'], - 'channel-3': ['topic-b'], - 'channel-4': ['topic-b', 'topic-c'], - }, - version: '2.0', - }, - 1, - ); - expect(await transactionIndex.getChannelIdsForMultipleTopics(['topic-a'])).toEqual([ - 'channel-1', - 'channel-2', - ]); - expect(await transactionIndex.getChannelIdsForMultipleTopics(['topic-b'])).toEqual([ - 'channel-3', - 'channel-4', - ]); - expect(await transactionIndex.getChannelIdsForMultipleTopics(['topic-c'])).toEqual([ - 'channel-4', - ]); - }); - - it('getChannelIdsForMultipleTopics() should support multiple channel ids for multiple topics with boundaries', async () => { - await transactionIndex.addTransaction( - 'dataId1', - { - channelIds: { - 'channel-1': [1], - 'channel-2': [2], - }, - topics: { - 'channel-1': ['topic-a'], - 'channel-2': ['topic-b'], - }, - version: '2.0', - }, - 1, - ); - await transactionIndex.addTransaction( - 'dataId2', - { - channelIds: { - 'channel-1': [1], - 'channel-3': [2], - }, - topics: { - 'channel-3': ['topic-c', 'topic-b'], - }, - version: '2.0', - }, - 3, - ); - await transactionIndex.addTransaction( - 'dataId3', - { - channelIds: { - 'channel-4': [1], - 'channel-5': [2], - }, - topics: { - 'channel-4': ['topic-c'], - 'channel-5': ['topic-d', 'topic-a'], - }, - version: '2.0', - }, - 10, - ); - - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-a', 'topic-b'], { from: 2 }), - ).toEqual(['channel-1', 'channel-5', 'channel-3']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-a', 'topic-b'], { from: 4 }), - ).toEqual(['channel-5']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-a', 'topic-b'], { to: 2 }), - ).toEqual(['channel-1', 'channel-2']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-a', 'topic-b'], { - from: 2, - to: 4, - }), - ).toEqual(['channel-1', 'channel-3']); - - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-b', 'topic-c'], { from: 2 }), - ).toEqual(['channel-3', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-b', 'topic-c'], { from: 4 }), - ).toEqual(['channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-b', 'topic-c'], { to: 2 }), - ).toEqual(['channel-2']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-b', 'topic-c'], { - from: 2, - to: 4, - }), - ).toEqual(['channel-3']); - - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-d', 'topic-c'], { from: 2 }), - ).toEqual(['channel-5', 'channel-3', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-d', 'topic-c'], { from: 4 }), - ).toEqual(['channel-5', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-d', 'topic-c'], { to: 2 }), - ).toEqual([]); - expect( - await transactionIndex.getChannelIdsForMultipleTopics(['topic-d', 'topic-c'], { - from: 2, - to: 4, - }), - ).toEqual(['channel-3']); - - expect( - await transactionIndex.getChannelIdsForMultipleTopics([ - 'topic-a', - 'topic-b', - 'topic-c', - 'topic-d', - ]), - ).toEqual(['channel-1', 'channel-5', 'channel-2', 'channel-3', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics( - ['topic-a', 'topic-b', 'topic-c', 'topic-d'], - { from: 2 }, - ), - ).toEqual(['channel-1', 'channel-5', 'channel-3', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics( - ['topic-a', 'topic-b', 'topic-c', 'topic-d'], - { from: 4 }, - ), - ).toEqual(['channel-5', 'channel-4']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics( - ['topic-a', 'topic-b', 'topic-c', 'topic-d'], - { to: 2 }, - ), - ).toEqual(['channel-1', 'channel-2']); - expect( - await transactionIndex.getChannelIdsForMultipleTopics( - ['topic-a', 'topic-b', 'topic-c', 'topic-d'], - { from: 2, to: 4 }, - ), - ).toEqual(['channel-1', 'channel-3']); - }); - }); -}); diff --git a/packages/ethereum-storage/README.md b/packages/ethereum-storage/README.md index f31a80c9b9..e843d98c6b 100644 --- a/packages/ethereum-storage/README.md +++ b/packages/ethereum-storage/README.md @@ -15,32 +15,16 @@ npm install @requestnetwork/ethereum-storage ## Usage ```js -import { EthereumStorage } from '@requestnetwork/ethereum-storage'; -import { StorageTypes } from '@requestnetwork/types'; - -const web3HttpProvider = require('web3-providers-http'); - -const provider = new web3HttpProvider('http://localhost:8545'); - -const web3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: provider, -}; - -const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 1000, -}; - -const ethereumStorage = new EthereumStorage( - // Give an external url of the storage (use to indicate where the buffer data are stored) - 'url.buffer.ethereum.storage', - ipfsGatewayConnection, - web3Connection, -); +import { + EthereumStorage, + EthereumTransactionSubmitter, + IpfsStorage, +} from '@requestnetwork/ethereum-storage'; + +const ethereumStorage = new EthereumStorage({ + ipfsStorage: new IpfsStorage(), + txSubmitter: new EthereumTransactionSubmitter(), +}); const data = 'Some data'; diff --git a/packages/ethereum-storage/package.json b/packages/ethereum-storage/package.json index 439184317c..1c082446e6 100644 --- a/packages/ethereum-storage/package.json +++ b/packages/ethereum-storage/package.json @@ -41,30 +41,20 @@ }, "dependencies": { "@rainbow-me/fee-suggestions": "2.1.0", - "@requestnetwork/currency": "0.10.0", "@requestnetwork/smart-contracts": "0.30.0", "@requestnetwork/types": "0.37.0", "@requestnetwork/utils": "0.37.0", "axios": "0.27.2", - "bluebird": "3.7.2", "ethers": "5.5.1", "form-data": "3.0.0", "ipfs-unixfs": "6.0.7", - "keyv": "4.0.3", "qs": "6.10.3", "shelljs": "0.8.5", "tslib": "2.5.0", - "typed-emitter": "1.4.0", - "web3-eth": "1.3.6", - "web3-utils": "1.3.6", "yargs": "17.6.2" }, "devDependencies": { - "@openzeppelin/test-helpers": "0.5.6", - "@truffle/hdwallet-provider": "1.2.3", - "@types/bluebird": "3.5.33", "@types/jest": "26.0.13", - "@types/keyv": "3.1.1", "@types/node": "16.11.7", "axios-mock-adapter": "1.19.0", "jest": "26.4.2", diff --git a/packages/ethereum-storage/src/config.ts b/packages/ethereum-storage/src/config.ts index f7e0fc3b76..81dc1b006d 100644 --- a/packages/ethereum-storage/src/config.ts +++ b/packages/ethereum-storage/src/config.ts @@ -14,18 +14,13 @@ const config = { url: 'http://localhost:8545', }, }, - retryDelay: 0, safeGasPriceLimit: '500000000000', transactionPollingTimeout: 300, blockConfirmations: 2, }, ipfs: { - defaultNode: { - host: 'localhost', - port: 5001, - protocol: 'http' as StorageTypes.IpfsGatewayProtocol, - timeout: 30000, - }, + url: 'http://localhost:5001', + timeout: 30000, errorHandling: { delayBetweenRetries: 500, maxRetries: 3, @@ -37,21 +32,15 @@ const config = { 'ipfs-survival.request.network': 'Qmb6a5DH45k8JwLdLVZUhRhv1rnANpsbXjtsH41esGhNCh', }, maxIpfsReadRetry: 1, - pinRequest: { - delayBetweenCalls: 1000, - maxSize: 500, - timeout: 30000, - }, }, - maxConcurrency: 5, }; -/** - * Retrieve from config the default information to connect to ipfs - * @returns IIpfsGatewayConnection the host, port, protocol and timeout threshold to connect to the gateway - */ -export function getDefaultIpfs(): StorageTypes.IIpfsGatewayConnection { - return config.ipfs.defaultNode; +export function getDefaultIpfsUrl(): string { + return config.ipfs.url; +} + +export function getDefaultIpfsTimeout(): number { + return config.ipfs.timeout; } /** @@ -94,14 +83,6 @@ export function getDefaultEthereumBlockConfirmations(): number { return config.ethereum.blockConfirmations; } -/** - * Retrieve from config the time to wait between query retries - * @returns the query retry delay - */ -export function getEthereumRetryDelay(): number { - return config.ethereum.retryDelay; -} - /** * Retrieve from config the maximum number of query retries * @returns the maximum amount of query retries @@ -120,26 +101,6 @@ export function getSafeGasPriceLimit(): string { return process?.env?.SAFE_GAS_PRICE_LIMIT || config.ethereum.safeGasPriceLimit; } -/** - * Retrieve from config the maximum number of concurrent calls made from the ethereum-storage - * @returns the maximum amount concurrent calls - */ -export function getMaxConcurrency(): number { - return config.maxConcurrency; -} - -/** - * Retrieve from config the default pin request maximum size, timeout and wait time between calls - * @returns array of the swarm addresses - */ -export function getPinRequestConfig(): StorageTypes.IPinRequestConfiguration { - return { - delayBetweenCalls: config.ipfs.pinRequest.delayBetweenCalls, - maxSize: config.ipfs.pinRequest.maxSize, - timeout: config.ipfs.pinRequest.timeout, - }; -} - /** * Retrieve from config the maximum number of retries on failed IPFS calls * @returns array of the swarm addresses diff --git a/packages/ethereum-storage/src/ethereum-blocks.ts b/packages/ethereum-storage/src/ethereum-blocks.ts deleted file mode 100644 index 01623fe035..0000000000 --- a/packages/ethereum-storage/src/ethereum-blocks.ts +++ /dev/null @@ -1,297 +0,0 @@ -import { LogTypes, StorageTypes } from '@requestnetwork/types'; -import { cachedThrottle, retry, SimpleLogger } from '@requestnetwork/utils'; - -/** - * Manages every info linked to the ethereum blocks (blockNumber, blockTimestamp, confirmations ... ) - */ -export default class EthereumBlocks { - // 'web3-eth' object - public eth: any; - - /** - * Gets last block number - * The return value of this function will be cached for `lastBlockNumberDelay` milliseconds - * - * @return blockNumber of the last block - */ - public getLastBlockNumber: () => Promise; - - // The time to wait between query retries - public retryDelay: number; - - // Maximum number of retries for a query - public maxRetries: number; - - /** - * Cache of the blockTimestamp indexed by blockNumber - * to ask only once the timestamp of a block from a node - * - */ - protected blockTimestamp: number[] = []; - - // All the block before this one are ignored - // Basically, the block where the contract has been created - private firstSignificantBlockNumber: number; - - // The minimum amount of time to wait between fetches of lastBlockNumber - private getLastBlockNumberMinDelay: number; - - /** - * Logger instance - */ - private logger: LogTypes.ILogger; - - /** - * Constructor - * @param eth eth object from web3 - * @param firstSignificantBlockNumber all the block before this one will be ignored - * @param getLastBlockNumberMinDelay the minimum delay to wait between fetches of lastBlockNumber - */ - public constructor( - eth: any, - firstSignificantBlockNumber: number, - retryDelay: number, - maxRetries: number, - getLastBlockNumberMinDelay = 0, - logger?: LogTypes.ILogger, - ) { - this.eth = eth; - - this.firstSignificantBlockNumber = firstSignificantBlockNumber; - - this.getLastBlockNumberMinDelay = getLastBlockNumberMinDelay; - - this.logger = logger || new SimpleLogger(); - - // Get retry parameter values from config - this.retryDelay = retryDelay; - this.maxRetries = maxRetries; - - // Setup the throttled and retriable getLastBlockNumber function - this.getLastBlockNumber = cachedThrottle( - () => - retry( - () => { - this.logger.debug(`Getting last block number`, ['ethereum', 'ethereum-blocks']); - return this.eth.getBlockNumber(); - }, - { - maxRetries: this.maxRetries, - retryDelay: this.retryDelay, - }, - )(), - this.getLastBlockNumberMinDelay, - ); - } - - /** - * Gets timestamp of a block - * @param blockNumber number of the block - * @return timestamp of a block - */ - public async getBlockTimestamp(blockNumber: number): Promise { - // If we already have it, give it - if (this.blockTimestamp[blockNumber]) { - return this.blockTimestamp[blockNumber]; - } - - // if we don't know the information, let's get it - // Use retry to rerun if getBlock fails - const block = await retry((bn: number) => this.eth.getBlock(bn), { - maxRetries: this.maxRetries, - retryDelay: this.retryDelay, - })(blockNumber); - if (!block) { - throw Error(`block ${blockNumber} not found`); - } - this.blockTimestamp[blockNumber] = block.timestamp; - - return this.blockTimestamp[blockNumber]; - } - - /** - * Gets the two block numbers surrounding a timestamp - * if the timestamp match exactly a blockTimestamp, returns twice this block number - * If the timestamp is before than the significant block return, returns twice this significant block number - * - * @param timestamp timestamp to search from - * @return {blockBefore, blockAfter} or null if the timestamp is after the last ethereum block - */ - public async getBlockNumbersFromTimestamp( - timestamp: number, - ): Promise { - // check if we have the blockTimestamp of the first significant block number - if (!this.blockTimestamp[this.firstSignificantBlockNumber]) { - // update the blockTimestamp cache with the first significant block - await this.getBlockTimestamp(this.firstSignificantBlockNumber); - } - - // update the second last block number in memory - // we get the number of the second last block instead of the last block - // because the information of the last block may not be retrieved by the web3 provider - const secondLastBlockNumber: number = await this.getSecondLastBlockNumber(); - - // check if we have the blockTimestamp of the number of the second last block - if (!this.blockTimestamp[secondLastBlockNumber]) { - // update the blockTimestamp cache with the second last block - await this.getBlockTimestamp(secondLastBlockNumber); - } - - // if timestamp before first significant block, return the significant block - if (timestamp <= this.blockTimestamp[this.firstSignificantBlockNumber]) { - return { - blockAfter: this.firstSignificantBlockNumber, - blockBefore: this.firstSignificantBlockNumber, - }; - } - - // if timestamp after second last block, return secondLastBlockNumber - if (timestamp > this.blockTimestamp[secondLastBlockNumber]) { - return { - blockAfter: secondLastBlockNumber, - blockBefore: secondLastBlockNumber, - }; - } - - // Before doing the dichotomic search, we restrict the search to the two closest block we already know - // the boundaries start with the first significant block and the last block - const { result, lowBlockNumber, highBlockNumber } = this.getKnownBlockNumbersFromTimestamp( - timestamp, - secondLastBlockNumber, - ); - - // if the result is not found on the known blocks, we search by dichotomy between the two closest known blocks - return ( - result || - this.getBlockNumbersFromTimestampByDichotomy(timestamp, lowBlockNumber, highBlockNumber) - ); - } - - /** - * Gets second last block number - * @return blockNumber of the second last block - */ - public async getSecondLastBlockNumber(): Promise { - return (await this.getLastBlockNumber()) - 1; - } - - /** - * Gets the number of confirmation from a blockNumber - * @return blockNumber of the last block - */ - public async getConfirmationNumber(blockNumber: number): Promise { - try { - return (await this.getLastBlockNumber()) - blockNumber; - } catch (e) { - throw Error(`Error getting the confirmation number: ${e}`); - } - } - - /** - * Get a block from ethereum - * - * @param blockNumber The block number - * @returns An Ethereum block - */ - public async getBlock(blockNumber: number | string): Promise { - return retry(this.eth.getBlock, { - context: this.eth, - maxRetries: this.maxRetries, - retryDelay: this.retryDelay, - })(blockNumber); - } - - /** - * Gets the two known block numbers surrounding a timestamp - * - * @param timestamp timestamp to search from - * @param lastBlockNumber last block number known - * @returns - */ - private getKnownBlockNumbersFromTimestamp( - timestamp: number, - lastBlockNumber: number, - ): { - result: StorageTypes.IBlockNumbersInterval | null; - lowBlockNumber: number; - highBlockNumber: number; - } { - let lowBlockNumber = this.firstSignificantBlockNumber; - let highBlockNumber = lastBlockNumber; - - let currentBlockNumber = this.firstSignificantBlockNumber; - let currentBlockTimestamp; - let result: StorageTypes.IBlockNumbersInterval | null = null; - - let foundKnownBoundaries = false; - // We iterate on the known blocks from the first significant block until we found a blockTimestamp bigger than the timestamp - while (!foundKnownBoundaries) { - currentBlockTimestamp = this.blockTimestamp[currentBlockNumber]; - // if the block is unknown yet, we skip it - if (currentBlockTimestamp) { - // if we are lucky a block we know has the exact same timestamp - if (currentBlockTimestamp === timestamp) { - result = { blockBefore: currentBlockNumber, blockAfter: currentBlockNumber }; - foundKnownBoundaries = true; - } else { - // otherwise we restrict the boundaries - lowBlockNumber = highBlockNumber; - highBlockNumber = currentBlockNumber; - - // If the current timestamp is bigger than the one we research, - // it means we have the best boundaries from what we already know - if (currentBlockTimestamp > timestamp) { - foundKnownBoundaries = true; - } - } - } - currentBlockNumber++; - } - - return { result, lowBlockNumber, highBlockNumber }; - } - - /** - * Gets the two block numbers surrounding a timestamp - * This is done by a dichotomic search between two blocks - * - * @param timestamp timestamp to search from - * @param lowBlockNumber low boundary - * @param highBlockNumber high boundary - * @returns - */ - private async getBlockNumbersFromTimestampByDichotomy( - timestamp: number, - lowBlockNumber: number, - highBlockNumber: number, - ): Promise { - let result: StorageTypes.IBlockNumbersInterval | null = null; - - // if blocks not found yet, we do a dichotomic search between the two closest known blocks - while (!result) { - // Picks the block in the middle of the two closest known blocks - const currentBlockNumber = - lowBlockNumber + Math.floor((highBlockNumber - lowBlockNumber) / 2); - // Gets the timestamp of the block and stores it - const currentBlockTimestamp = await this.getBlockTimestamp(currentBlockNumber); - - // Restricts the boundaries - if (currentBlockTimestamp < timestamp) { - lowBlockNumber = currentBlockNumber; - } else if (currentBlockTimestamp > timestamp) { - highBlockNumber = currentBlockNumber; - } else { - // If we are lucky, the timestamp is equal to the block timestamp - result = { blockBefore: currentBlockNumber, blockAfter: currentBlockNumber }; - break; - } - - // If we are not lucky, we wait to have the two block surrounding the timestamp - if (highBlockNumber === lowBlockNumber + 1) { - result = { blockBefore: lowBlockNumber, blockAfter: highBlockNumber }; - } - } - - return result; - } -} diff --git a/packages/ethereum-storage/src/ethereum-entries-to-ipfs-content.ts b/packages/ethereum-storage/src/ethereum-entries-to-ipfs-content.ts deleted file mode 100644 index 3704c75c3f..0000000000 --- a/packages/ethereum-storage/src/ethereum-entries-to-ipfs-content.ts +++ /dev/null @@ -1,227 +0,0 @@ -import * as Bluebird from 'bluebird'; - -import { LogTypes, StorageTypes } from '@requestnetwork/types'; -import { getMaxIpfsReadRetry } from './config'; - -import IgnoredDataIds from './ignored-dataIds'; -import IpfsConnectionError from './ipfs-connection-error'; - -/** - * Verify the hashes are present on IPFS for the corresponding ethereum entry - * Filtered incorrect hashes - * @param ethereumEntries Ethereum entries from the smart contract - * @returns Filtered list of dataId with metadata - */ -export default async function EthereumEntriesToIpfsContent( - ethereumEntries: StorageTypes.IEthereumEntry[], - ipfsStorage: StorageTypes.IIpfsStorage, - ignoredDataIdsIndex: IgnoredDataIds, - logger: LogTypes.ILogger, - maxConcurrency: number, -): Promise { - const totalCount: number = ethereumEntries.length; - let successCount = 0; - let successCountOnFirstTry = 0; - let ipfsConnectionErrorCount = 0; - let wrongFeesCount = 0; - let incorrectFileCount = 0; - - // Contains results from readHashOnIPFS function - // We store hashAndSize in this array in order to know which hashes have not been found on IPFS - let allIpfsContentOrErrors: Array<{ - ipfsContent: StorageTypes.IEntry | null; - entryWithError: StorageTypes.IEthereumEntry | null; - }>; - - // Final array of dataIds, content and meta - const finalIpfsContents: StorageTypes.IEntry[] = []; - let ethereumEntriesToProcess: StorageTypes.IEthereumEntry[] = ethereumEntries.slice(); - - // Try to read the hashes on IPFS - // The operation is done at least once and retried depending on the readOnIPFSRetry config - for (let tryIndex = 0; tryIndex < 1 + getMaxIpfsReadRetry(); tryIndex++) { - // Reset for each retry - ipfsConnectionErrorCount = 0; - - if (tryIndex > 0) { - logger.debug(`Retrying to read hashes on IPFS`, ['ipfs']); - } - - allIpfsContentOrErrors = await Bluebird.map( - ethereumEntriesToProcess, - // Read hash on IPFS and retrieve content corresponding to the hash - // Reject on error when no file is found on IPFS - // or when the declared size doesn't correspond to the size of the content stored on ipfs - async (ethereumEntry: StorageTypes.IEthereumEntry) => { - return getIpfsContent(ethereumEntry, tryIndex + 1, ipfsStorage, logger); - }, - { - concurrency: maxConcurrency, - }, - ); - - // flush the list of entries to process - ethereumEntriesToProcess = []; - - // Store found hashes in entries - // The hashes to retry to read are the hashes where readHashOnIPFS returned null - for (const { ipfsContent, entryWithError } of allIpfsContentOrErrors) { - if (ipfsContent) { - // content found and not error - finalIpfsContents.push(ipfsContent); - } else if (entryWithError) { - const errorType = entryWithError.error?.type; - if (errorType === StorageTypes.ErrorEntries.INCORRECT_FILE) { - incorrectFileCount++; - // no retry needed, just store it - await ignoredDataIdsIndex.save(entryWithError); - } else if (errorType === StorageTypes.ErrorEntries.WRONG_FEES) { - wrongFeesCount++; - // no retry needed, just store it - await ignoredDataIdsIndex.save(entryWithError); - } else if (errorType === StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR) { - ipfsConnectionErrorCount++; - // push it for a retry - ethereumEntriesToProcess.push(entryWithError); - } else { - throw new Error( - `Unexpected Error for the hash: ${entryWithError.hash}, ${errorType}, ${entryWithError.error?.message}`, - ); - } - } - } - - successCount = finalIpfsContents.length; - - logger.debug(`${successCount}/${totalCount} retrieved dataIds after try ${tryIndex + 1}`, [ - 'ipfs', - ]); - - if (tryIndex === 0) { - successCountOnFirstTry = successCount; - } - } - - // Save the entries not successfully retrieved after the retries - for (const remainingEntry of ethereumEntriesToProcess) { - // store the ipfs ignored after the retried - await ignoredDataIdsIndex.save(remainingEntry); - } - - // Clean the ignored dataIds - for (const ipfsContent of finalIpfsContents) { - // store the id successfully retrieved from the ignored ones - await ignoredDataIdsIndex.delete(ipfsContent.id); - } - - logger.info( - `getData on ${totalCount} events, ${successCount} retrieved (${ - successCount - successCountOnFirstTry - } after retries), ${ipfsConnectionErrorCount} not found, ${incorrectFileCount} incorrect files, ${wrongFeesCount} with wrong fees`, - ['metric', 'successfullyRetrieved'], - ); - - return finalIpfsContents; -} - -/** - * Tries to get the ipfs content or return the error - * @param ethereumEntry entry information to get the ipfs from - * @returns the ipfsContent and meta or the entry with the error - */ -async function getIpfsContent( - ethereumEntry: StorageTypes.IEthereumEntry, - tryIndex: number, - ipfsStorage: StorageTypes.IIpfsStorage, - logger: LogTypes.ILogger, -): Promise<{ - ipfsContent: StorageTypes.IEntry | null; - entryWithError: StorageTypes.IEthereumEntry | null; -}> { - // Check if the event log is incorrect - if ( - typeof ethereumEntry.hash === 'undefined' || - typeof ethereumEntry.feesParameters === 'undefined' - ) { - throw Error('The event log has no hash or feesParameters'); - } - if (typeof ethereumEntry.meta === 'undefined') { - throw Error('The event log has no metadata'); - } - - // Get content from ipfs and verify provided size is correct - let ipfsObject; - - try { - const startTime = Date.now(); - // Send ipfs request - ipfsObject = await ipfsStorage.read( - ethereumEntry.hash, - ethereumEntry.feesParameters.contentSize, - ); - logger.debug( - `read ${ethereumEntry.hash}, try; ${tryIndex}. Took ${Date.now() - startTime} ms`, - ['ipfs'], - ); - } catch (error) { - const errorMessage = error.message || error; - - // Check the type of the error - if (error instanceof IpfsConnectionError) { - logger.info(`IPFS connection error when trying to fetch: ${ethereumEntry.hash}`, ['ipfs']); - logger.debug(`IPFS connection error : ${errorMessage}`, ['ipfs']); - // An ipfs connection error occurred (for example a timeout), therefore we would eventually retry to find the has - return { - entryWithError: { - ...ethereumEntry, - error: { message: errorMessage, type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR }, - }, - ipfsContent: null, - }; - } else { - logger.info(`Incorrect file for hash: ${ethereumEntry.hash}`, ['ipfs']); - - // No need to retry to find this hash - return { - entryWithError: { - ...ethereumEntry, - error: { message: errorMessage, type: StorageTypes.ErrorEntries.INCORRECT_FILE }, - }, - ipfsContent: null, - }; - } - } - - const contentSizeDeclared = ethereumEntry.feesParameters.contentSize; - - // Check if the declared size is higher or equal to the size of the actual file - // If the declared size is higher, it's not considered as a problem since it means the hash submitter has paid a bigger fee than he had to - if (!ipfsObject || ipfsObject.ipfsSize > contentSizeDeclared) { - logger.info(`Incorrect declared size for hash: ${ethereumEntry.hash}`, ['ipfs']); - - // No need to retry to find this hash - return { - entryWithError: { - ...ethereumEntry, - error: { message: `Incorrect declared size`, type: StorageTypes.ErrorEntries.WRONG_FEES }, - }, - ipfsContent: null, - }; - } - - // Get meta data from ethereum - const ethereumMetadata = ethereumEntry.meta; - - const ipfsContent = { - content: ipfsObject.content, - id: ethereumEntry.hash, - meta: { - ethereum: ethereumMetadata, - ipfs: { size: ipfsObject.ipfsSize }, - state: StorageTypes.ContentState.CONFIRMED, - storageType: StorageTypes.StorageSystemType.ETHEREUM_IPFS, - timestamp: ethereumMetadata.blockTimestamp, - }, - }; - return { ipfsContent, entryWithError: null }; -} diff --git a/packages/ethereum-storage/src/ethereum-metadata-cache.ts b/packages/ethereum-storage/src/ethereum-metadata-cache.ts deleted file mode 100644 index 53071fe501..0000000000 --- a/packages/ethereum-storage/src/ethereum-metadata-cache.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { StorageTypes } from '@requestnetwork/types'; - -import * as Keyv from 'keyv'; - -/** - * Allows to save and retrieve ethereum metadata associated to a data id - * Metadata represents general information about the Ethereum network used like network name and smart contract address - * and specific information of the data id like number and timestamp of the block of the transaction of the data id - * This module has been created to avoid multiple call of getPastEvents web3 function - */ -export default class EthereumMetadataCache { - /** - * Store the ethereum metadata for a data id in a dictionary - * This attribute is left public for mocking purpose to facilitate tests on the module - */ - public metadataCache: Keyv; - - public listDataIds: Keyv; - - /** - * Constructor - * @param smartContractManager Instance of SmartContractManager used to get metadata in case they're not registered yet - * @param store a Keyv store to persist the metadata - */ - public constructor(store?: Keyv.Store) { - this.metadataCache = new Keyv({ - namespace: 'ethereumMetadata', - store, - }); - - this.listDataIds = new Keyv({ - namespace: 'listDataIds', - store, - }); - } - - /** - * Saves in the cache the Ethereum metadata related to a dataId - * @param dataId dataId to index ethereum metadata - * @param meta Ethereum metadata related to the dataId - */ - public async saveDataIdMeta(dataId: string, meta: StorageTypes.IEthereumMetadata): Promise { - // We save the metadata only if it doesn't exist yet - // A user can add the same dataId into the smart contract indefinitely - // Therefore, only the first occurrence of the dataId has valid metadata - // Finding several occurrences of the same dataId is not abnormal and we don't throw an error in this case - // PROT-503: We should ensure the corresponding metadata is the metadata of the first occurrence of the dataId - if (!(await this.metadataCache.get(dataId))) { - await this.metadataCache.set(dataId, meta); - await this.updateDataId(dataId); - } - } - - /** - * Retrieve Ethereum metadata from cache - * If metadata of the specified dataId are not found in the cache - * we get them and save them in the cache - * @param dataId dataId to get Ethereum metadata from - * @returns Ethereum metadata of the dataId - */ - public async getDataIdMeta(dataId: string): Promise { - return this.metadataCache.get(dataId); - } - /** - * Get the list of data ids stored - * - * @returns the list of data ids stored - */ - public async getDataIds(): Promise { - const listDataIds: string[] | undefined = await this.listDataIds.get('list'); - if (!listDataIds) { - return []; - } - return listDataIds; - } - - /** - * Update the list of data ids stored - * - * @param dataId data id to add to the list - * @returns - */ - private async updateDataId(dataId: string): Promise { - let listDataIds: string[] | undefined = await this.listDataIds.get('list'); - if (!listDataIds) { - listDataIds = []; - } - if (!listDataIds.includes(dataId)) { - listDataIds.push(dataId); - await this.listDataIds.set('list', listDataIds); - } - } -} diff --git a/packages/ethereum-storage/src/ethereum-storage-ethers.ts b/packages/ethereum-storage/src/ethereum-storage-ethers.ts deleted file mode 100644 index 7c1ef5e9ee..0000000000 --- a/packages/ethereum-storage/src/ethereum-storage-ethers.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { EventEmitter } from 'events'; -import { BigNumber, ContractReceipt, providers, Signer } from 'ethers'; -import TypedEmitter from 'typed-emitter'; -import { CurrencyTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; -import { requestHashSubmitterArtifact } from '@requestnetwork/smart-contracts'; -import { EthereumTransactionSubmitter } from './ethereum-tx-submitter'; -import { getCurrentTimestampInSecond, SimpleLogger } from '@requestnetwork/utils'; -import { getDefaultEthereumBlockConfirmations } from './config'; - -export type GasDefinerProps = { - gasPriceMin?: BigNumber; -}; - -export type SubmitterProps = GasDefinerProps & { - network: CurrencyTypes.EvmChainName; - signer: Signer; - logger?: LogTypes.ILogger; -}; - -type StorageProps = SubmitterProps & { - ipfsStorage: StorageTypes.IIpfsStorage; - blockConfirmations?: number; -}; - -export type StorageEventEmitter = TypedEmitter<{ - confirmed: (receipt: ContractReceipt) => void; - error: (error: unknown) => void; -}>; - -export class EthereumStorageEthers implements StorageTypes.IStorageWrite { - private readonly logger: LogTypes.ILogger; - private readonly ipfsStorage: StorageTypes.IIpfsStorage; - - private readonly network: CurrencyTypes.EvmChainName; - private readonly txSubmitter: EthereumTransactionSubmitter; - private readonly blockConfirmations: number | undefined; - - constructor({ - network, - signer, - ipfsStorage, - logger, - gasPriceMin, - blockConfirmations, - }: StorageProps) { - this.logger = logger || new SimpleLogger(); - this.ipfsStorage = ipfsStorage; - this.network = network; - this.txSubmitter = new EthereumTransactionSubmitter({ network, signer, logger, gasPriceMin }); - this.blockConfirmations = blockConfirmations; - } - - async initialize(): Promise { - await this.ipfsStorage.initialize(); - await this.txSubmitter.initialize(); - this.logger.debug(`${EthereumStorageEthers.name} storage initialized`); - } - - async append(content: string): Promise { - const { ipfsHash, ipfsSize } = await this.ipfsStorage.ipfsAdd(content); - - const tx = await this.txSubmitter.submit(ipfsHash, ipfsSize); - - const eventEmitter = new EventEmitter() as StorageEventEmitter; - const result: StorageTypes.IEntry = { - id: ipfsHash, - content, - meta: { - ipfs: { size: ipfsSize }, - local: { location: ipfsHash }, - ethereum: { - nonce: tx.nonce, - transactionHash: tx.hash, - blockConfirmation: tx.confirmations, - blockNumber: Number(tx.blockNumber), - // wrong value, but this metadata will not be used, as it's in Pending state - blockTimestamp: -1, - networkName: this.network, - smartContractAddress: this.txSubmitter.hashSubmitterAddress, - }, - state: StorageTypes.ContentState.PENDING, - storageType: StorageTypes.StorageSystemType.LOCAL, - timestamp: getCurrentTimestampInSecond(), - }, - }; - - this.logger.debug(`TX ${tx.hash} submitted, waiting for confirmation...`); - - void tx - .wait(this.blockConfirmations || getDefaultEthereumBlockConfirmations()) - .then((receipt: providers.TransactionReceipt) => { - this.logger.debug( - `TX ${receipt.transactionHash} confirmed at block ${receipt.blockNumber}`, - ); - eventEmitter.emit('confirmed', receipt); - }) - .catch((e: Error) => eventEmitter.emit('error', e)); - - return Object.assign(eventEmitter, result); - } - - public async _getStatus(): Promise { - const ipfs = await this.ipfsStorage.getConfig(); - const { address, creationBlockNumber } = requestHashSubmitterArtifact.getDeploymentInformation( - this.network, - ); - - return { - ethereum: { - networkName: this.network, - hashSubmitterAddress: address, - creationBlockNumberHashStorage: creationBlockNumber, - }, - ipfs, - }; - } -} diff --git a/packages/ethereum-storage/src/ethereum-storage.ts b/packages/ethereum-storage/src/ethereum-storage.ts index 52180f5bf5..8044c02acd 100644 --- a/packages/ethereum-storage/src/ethereum-storage.ts +++ b/packages/ethereum-storage/src/ethereum-storage.ts @@ -1,419 +1,87 @@ -import { LogTypes, StorageTypes } from '@requestnetwork/types'; -import * as Bluebird from 'bluebird'; import { EventEmitter } from 'events'; -import { getMaxConcurrency } from './config'; - -import ethereumEntriesToIpfsContent from './ethereum-entries-to-ipfs-content'; -import EthereumMetadataCache from './ethereum-metadata-cache'; -import IgnoredDataIds from './ignored-dataIds'; -import SmartContractManager from './smart-contract-manager'; - -import * as Keyv from 'keyv'; -import { BigNumber } from 'ethers'; +import { providers } from 'ethers'; +import { LogTypes, StorageTypes } from '@requestnetwork/types'; import { getCurrentTimestampInSecond, SimpleLogger } from '@requestnetwork/utils'; +import { getDefaultEthereumBlockConfirmations } from './config'; -// time to wait before considering the web3 provider is not reachable -const WEB3_PROVIDER_TIMEOUT = 10000; - -/** - * EthereumStorage - * @notice Manages storage layer of the Request Network Protocol v2 - */ -export class EthereumStorage implements StorageTypes.IStorage { - /** - * Manager for the storage smart contract - * This attribute is left public for mocking purpose to facilitate tests on the module - */ - public smartContractManager: SmartContractManager; - - /** - * Storage for IPFS - */ - private ipfsStorage: StorageTypes.IIpfsStorage; - - /** - * Cache to store Ethereum metadata - */ - public ethereumMetadataCache: EthereumMetadataCache; - - /** Data ids ignored by the node */ - public ignoredDataIds: IgnoredDataIds; +type StorageProps = { + ipfsStorage: StorageTypes.IIpfsStorage; + blockConfirmations?: number; + txSubmitter: StorageTypes.ITransactionSubmitter; + logger?: LogTypes.ILogger; +}; - /** - * Maximum number of concurrent calls - */ - public maxConcurrency: number; +export class EthereumStorage implements StorageTypes.IStorageWrite { + private readonly logger: LogTypes.ILogger; + private readonly ipfsStorage: StorageTypes.IIpfsStorage; - /** - * Timestamp of the dataId not mined on ethereum yet - */ - private buffer: { [id: string]: number | undefined }; + private readonly txSubmitter: StorageTypes.ITransactionSubmitter; + private readonly blockConfirmations: number | undefined; - /** - * Url where can be reached the data buffered by this storage - */ - private externalBufferUrl: string; - - /** - * Logger instance - */ - private logger: LogTypes.ILogger; - - private isInitialized = false; - - /** - * Constructor - * @param ipfsGatewayConnection Information structure to connect to the ipfs gateway - * @param web3Connection Information structure to connect to the Ethereum network - * @param [options.getLastBlockNumberDelay] the minimum delay to wait between fetches of lastBlockNumber - * @param metadataStore a Keyv store to persist the metadata in ethereumMetadataCache - */ - public constructor( - externalBufferUrl: string, - ipfsStorage: StorageTypes.IIpfsStorage, - web3Connection?: StorageTypes.IWeb3Connection, - { - getLastBlockNumberDelay, - logger, - maxConcurrency, - maxRetries, - retryDelay, - gasPriceMin, - }: { - getLastBlockNumberDelay?: number; - logger?: LogTypes.ILogger; - maxConcurrency?: number; - maxRetries?: number; - retryDelay?: number; - gasPriceMin?: BigNumber; - } = {}, - metadataStore?: Keyv.Store, - ) { - this.maxConcurrency = maxConcurrency || getMaxConcurrency(); + constructor({ ipfsStorage, logger, blockConfirmations, txSubmitter }: StorageProps) { this.logger = logger || new SimpleLogger(); this.ipfsStorage = ipfsStorage; - this.smartContractManager = new SmartContractManager(web3Connection, { - getLastBlockNumberDelay, - logger: this.logger, - maxConcurrency: this.maxConcurrency, - maxRetries, - retryDelay, - gasPriceMin, - }); - this.ethereumMetadataCache = new EthereumMetadataCache(metadataStore); - this.ignoredDataIds = new IgnoredDataIds(metadataStore); - this.buffer = {}; - this.externalBufferUrl = externalBufferUrl; + this.txSubmitter = txSubmitter; + this.blockConfirmations = blockConfirmations; } - /** - * Function to initialize the storage - * Checks the connection with ipfs - * Checks the connection with Ethereum - * Adds the known IPFS node (ipfs swarm connect) - */ - public async initialize(): Promise { - if (this.isInitialized) { - throw new Error('ethereum-storage is already initialized'); - } - - // check ethereum node connection - will throw if the ethereum node is not reachable - this.logger.info('Checking ethereum node connection', ['ethereum', 'sanity']); - try { - await this.smartContractManager.checkWeb3ProviderConnection(WEB3_PROVIDER_TIMEOUT); - } catch (error) { - throw Error(`Ethereum node is not accessible: ${error}`); - } - - // check if contracts are deployed on ethereum - this.logger.info('Checking ethereum node contract deployment', ['ethereum', 'sanity']); - try { - await this.smartContractManager.checkContracts(); - } catch (error) { - throw Error(error); - } - - // Check IPFS node state - will throw in case of error + async initialize(): Promise { await this.ipfsStorage.initialize(); - - this.isInitialized = true; + await this.txSubmitter.initialize(); + this.logger.debug(`${EthereumStorage.name} storage initialized`); } - /** - * Update Ethereum network connection information and reconnect - * Missing value are filled with default config value - * @param web3Connection Information structure to connect to the Ethereum network - */ - public async updateEthereumNetwork(web3Connection: StorageTypes.IWeb3Connection): Promise { - this.smartContractManager = new SmartContractManager(web3Connection); - // check ethereum node connection - will throw if the ethereum node is not reachable - - try { - await this.smartContractManager.checkWeb3ProviderConnection(WEB3_PROVIDER_TIMEOUT); - } catch (error) { - throw Error(`Ethereum node is not accessible: ${error}`); - } - } - - /** - * Append content into the storage: add the content to ipfs and the hash on Ethereum - * @param content Content to add into the storage - * @returns Promise resolving id used to retrieve the content - */ - public async append(content: string): Promise { - if (!this.isInitialized) { - throw new Error('Ethereum storage must be initialized'); - } - + async append(content: string): Promise { const { ipfsHash, ipfsSize } = await this.ipfsStorage.ipfsAdd(content); - const timestamp = getCurrentTimestampInSecond(); - const result: StorageTypes.IAppendResult = Object.assign(new EventEmitter(), { - content, + const tx = await this.txSubmitter.submit(ipfsHash, ipfsSize); + + const eventEmitter = new EventEmitter() as StorageTypes.AppendResultEmitter; + const result: StorageTypes.IEntry = { id: ipfsHash, + content, meta: { ipfs: { size: ipfsSize }, - local: { location: this.externalBufferUrl }, + local: { location: ipfsHash }, + ethereum: { + nonce: tx.nonce, + transactionHash: tx.hash, + blockConfirmation: tx.confirmations, + blockNumber: Number(tx.blockNumber), + // wrong value, but this metadata will not be used, as it's in Pending state + blockTimestamp: -1, + networkName: this.txSubmitter.network || '', + smartContractAddress: this.txSubmitter.hashSubmitterAddress || '', + }, state: StorageTypes.ContentState.PENDING, storageType: StorageTypes.StorageSystemType.LOCAL, - timestamp, + timestamp: getCurrentTimestampInSecond(), }, - }); - // store in the buffer the timestamp - this.buffer[ipfsHash] = timestamp; - - const feesParameters: StorageTypes.IFeesParameters = { contentSize: ipfsSize }; - - this.smartContractManager - .addHashAndSizeToEthereum(ipfsHash, feesParameters) - .then(async (ethereumMetadata: StorageTypes.IEthereumMetadata) => { - const resultAfterBroadcast: StorageTypes.IEntry = { - content, - id: ipfsHash, - meta: { - ethereum: ethereumMetadata, - ipfs: { size: ipfsSize }, - state: StorageTypes.ContentState.CONFIRMED, - storageType: StorageTypes.StorageSystemType.ETHEREUM_IPFS, - timestamp: ethereumMetadata.blockTimestamp, - }, - }; - // Save the metadata of the new ipfsHash into the Ethereum metadata cache - await this.ethereumMetadataCache.saveDataIdMeta(ipfsHash, ethereumMetadata); - - result.emit('confirmed', resultAfterBroadcast); - }) - .catch((error) => { - result.emit('error', error); - }); - - return result; - } - - /** - * Read content from the storage - * @param Id Id used to retrieve content - * @returns Promise resolving content from id - */ - public async read(id: string): Promise { - if (!this.isInitialized) { - throw new Error('Ethereum storage must be initialized'); - } - if (!id) { - throw Error('No id provided'); - } - - // Get Ethereum metadata - let bufferTimestamp: number | undefined; - - // Check if the data as been added on ethereum - const ethereumMetadata = await this.ethereumMetadataCache.getDataIdMeta(id); - - // Clear buffer if needed - if (!ethereumMetadata) { - bufferTimestamp = this.buffer[id]; - if (!bufferTimestamp) { - throw Error('No content found from this id'); - } - } else { - delete this.buffer[id]; - } - - const ipfsObject = await this.ipfsStorage.read(id); - - const meta = ethereumMetadata - ? { - ethereum: ethereumMetadata, - ipfs: { size: ipfsObject.ipfsSize }, - state: StorageTypes.ContentState.CONFIRMED, - storageType: StorageTypes.StorageSystemType.ETHEREUM_IPFS, - timestamp: ethereumMetadata.blockTimestamp, - } - : { - ipfs: { size: ipfsObject.ipfsSize }, - local: { location: this.externalBufferUrl }, - state: StorageTypes.ContentState.PENDING, - storageType: StorageTypes.StorageSystemType.LOCAL, - timestamp: bufferTimestamp || 0, - }; - - return { - content: ipfsObject.content, - id, - meta, }; - } - /** - * Read a list of content from the storage - * - * @param dataIds A list of dataIds used to retrieve the content - * @returns Promise resolving the list of contents - */ - public async readMany(dataIds: string[]): Promise { - const totalCount = dataIds.length; - // Concurrently get all the content from the id's in the parameters - return Bluebird.map( - dataIds, - async (dataId, currentIndex) => { - const startTime = Date.now(); - const data = await this.read(dataId); + this.logger.debug(`TX ${tx.hash} submitted, waiting for confirmation...`); + + void tx + .wait(this.blockConfirmations || getDefaultEthereumBlockConfirmations()) + .then((receipt: providers.TransactionReceipt) => { this.logger.debug( - `[${currentIndex + 1}/${totalCount}] read ${dataId}. Took ${Date.now() - startTime} ms`, - ['read'], + `TX ${receipt.transactionHash} confirmed at block ${receipt.blockNumber}`, ); - return data; - }, - { - concurrency: this.maxConcurrency, - }, - ); - } - - /** - * Get all data stored on the storage - * - * @param options timestamp boundaries for the data retrieval - * @returns Promise resolving stored data - */ - public async getData( - options?: StorageTypes.ITimestampBoundaries, - ): Promise { - if (!this.isInitialized) { - throw new Error('Ethereum storage must be initialized'); - } - this.logger.info('Fetching dataIds from Ethereum', ['ethereum']); - const { ethereumEntries, lastTimestamp } = - await this.smartContractManager.getEntriesFromEthereum(options); - - // If no hash was found on ethereum, we return an empty list - if (!ethereumEntries.length) { - this.logger.info('No new data found.', ['ethereum']); - return { - entries: [], - lastTimestamp, - }; - } - - this.logger.debug('Fetching data from IPFS and checking correctness', ['ipfs']); - - const entries = await ethereumEntriesToIpfsContent( - ethereumEntries, - this.ipfsStorage, - this.ignoredDataIds, - this.logger, - this.maxConcurrency, - ); - - const ids = entries.map((entry) => entry.id) || []; - // Pin data asynchronously - // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.ipfsStorage.pinDataToIPFS(ids); - - // Save existing ethereum metadata to the ethereum metadata cache - for (const entry of entries) { - const ethereumMetadata = entry.meta.ethereum; - if (ethereumMetadata) { - // PROT-504: The saving of dataId's metadata should be encapsulated when retrieving dataId inside smart contract (getPastEvents) - await this.ethereumMetadataCache.saveDataIdMeta(entry.id, ethereumMetadata); - } - } - - return { - entries, - lastTimestamp, - }; - } - - /** - * Try to get some previous ignored data - * - * @param options timestamp boundaries for the data retrieval - * @returns Promise resolving stored data - */ - public async getIgnoredData(): Promise { - if (!this.isInitialized) { - throw new Error('Ethereum storage must be initialized'); - } - this.logger.info('Getting some previous ignored dataIds', ['ethereum']); - - const ethereumEntries: StorageTypes.IEthereumEntry[] = - await this.ignoredDataIds.getDataIdsToRetry(); - - // If no hash was found on ethereum, we return an empty list - if (!ethereumEntries.length) { - this.logger.info('No new data found.', ['ethereum']); - return []; - } - - this.logger.debug('Fetching data from IPFS and checking correctness', ['ipfs']); - - const entries = await ethereumEntriesToIpfsContent( - ethereumEntries, - this.ipfsStorage, - this.ignoredDataIds, - this.logger, - this.maxConcurrency, - ); - - const ids = entries.map((entry) => entry.id) || []; - // Pin data asynchronously - void this.ipfsStorage.pinDataToIPFS(ids); - - // Save existing ethereum metadata to the ethereum metadata cache - for (const entry of entries) { - const ethereumMetadata = entry.meta.ethereum; - if (ethereumMetadata) { - // PROT-504: The saving of dataId's metadata should be encapsulated when retrieving dataId inside smart contract (getPastEvents) - await this.ethereumMetadataCache.saveDataIdMeta(entry.id, ethereumMetadata); - } - } + eventEmitter.emit('confirmed', result); + }) + .catch((e: Error) => eventEmitter.emit('error', e)); - return entries; + return Object.assign(eventEmitter, result); } - /** - * Get Information on the dataIds retrieved and ignored by the ethereum storage - * - * @param detailed if true get the list of the files hash - * @returns Promise resolving object with dataIds retrieved and ignored - */ - public async _getStatus(detailed = false): Promise { - const dataIds = await this.ethereumMetadataCache.getDataIds(); - const dataIdsWithReason = await this.ignoredDataIds.getDataIdsWithReasons(); - - const ethereum = this.smartContractManager.getConfig(); + public async _getStatus(): Promise { const ipfs = await this.ipfsStorage.getConfig(); return { - dataIds: { - count: dataIds.length, - values: detailed ? dataIds : undefined, - }, - ethereum, - ignoredDataIds: { - count: Object.keys(dataIdsWithReason).length, - values: detailed ? dataIdsWithReason : undefined, + ethereum: { + networkName: this.txSubmitter.network, + hashSubmitterAddress: this.txSubmitter.hashSubmitterAddress, + creationBlockNumberHashStorage: this.txSubmitter.creationBlockNumber, }, ipfs, }; diff --git a/packages/ethereum-storage/src/ethereum-tx-submitter.ts b/packages/ethereum-storage/src/ethereum-tx-submitter.ts index 0ea970dfdb..43de10bb82 100644 --- a/packages/ethereum-storage/src/ethereum-tx-submitter.ts +++ b/packages/ethereum-storage/src/ethereum-tx-submitter.ts @@ -1,15 +1,21 @@ -import { ContractTransaction, providers, utils } from 'ethers'; -import { LogTypes } from '@requestnetwork/types'; +import { BigNumber, ContractTransaction, providers, Signer, utils } from 'ethers'; +import { CurrencyTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; import { requestHashSubmitterArtifact } from '@requestnetwork/smart-contracts'; import { RequestOpenHashSubmitter } from '@requestnetwork/smart-contracts/types'; -import { SubmitterProps } from './ethereum-storage-ethers'; import { GasFeeDefiner } from './gas-fee-definer'; import { SimpleLogger, isEip1559Supported } from '@requestnetwork/utils'; +export type SubmitterProps = { + signer: Signer; + gasPriceMin?: BigNumber; + network: CurrencyTypes.EvmChainName; + logger?: LogTypes.ILogger; +}; + /** * Handles the submission of a hash on the request HashSubmitter contract */ -export class EthereumTransactionSubmitter { +export class EthereumTransactionSubmitter implements StorageTypes.ITransactionSubmitter { private readonly logger: LogTypes.ILogger; private enableEip1559 = true; private readonly hashSubmitter: RequestOpenHashSubmitter; diff --git a/packages/ethereum-storage/src/ethereum-utils.ts b/packages/ethereum-storage/src/ethereum-utils.ts index fae9c21aab..b82ee26c11 100644 --- a/packages/ethereum-storage/src/ethereum-utils.ts +++ b/packages/ethereum-storage/src/ethereum-utils.ts @@ -1,8 +1,7 @@ -import { CurrencyTypes, StorageTypes } from '@requestnetwork/types'; +import { StorageTypes } from '@requestnetwork/types'; import * as config from './config'; import { BigNumber } from 'ethers'; -import { EvmChains } from '@requestnetwork/currency'; /** * Collection of utils functions related to Ethereum Storage @@ -16,20 +15,18 @@ import { EvmChains } from '@requestnetwork/currency'; */ export const getEthereumStorageNetworkNameFromId = ( networkId: StorageTypes.EthereumNetwork, -): CurrencyTypes.EvmChainName => { - const chainName = EvmChains.getChainName(networkId); +): string => { + const chainName = StorageTypes.EthereumNetwork[networkId]; if (!chainName) { // this should never happen throw new Error(`Unsupported storage chain: ${networkId}`); } - return chainName; + return chainName.toLowerCase(); }; -export const getEthereumStorageNetworkIdFromName = ( - name: CurrencyTypes.EvmChainName, -): number | undefined => { - const networkId = EvmChains.getChainId(name); - return Object.values(StorageTypes.EthereumNetwork).includes(networkId) ? networkId : undefined; +export const getEthereumStorageNetworkIdFromName = (name: string): number | undefined => { + const networkName = name.toUpperCase() as keyof typeof StorageTypes.EthereumNetwork; + return StorageTypes.EthereumNetwork[networkName]; }; /** diff --git a/packages/ethereum-storage/src/gas-fee-definer.ts b/packages/ethereum-storage/src/gas-fee-definer.ts index ae275e65b7..7cdf857b4f 100644 --- a/packages/ethereum-storage/src/gas-fee-definer.ts +++ b/packages/ethereum-storage/src/gas-fee-definer.ts @@ -2,7 +2,6 @@ import { suggestFees } from '@rainbow-me/fee-suggestions'; import { BigNumber, providers, constants } from 'ethers'; import { normalizeGasFees } from '@requestnetwork/utils'; import { FeeTypes, LogTypes } from '@requestnetwork/types'; -import { GasDefinerProps } from './ethereum-storage-ethers'; export class GasFeeDefiner { private readonly logger: LogTypes.ILogger; @@ -13,7 +12,11 @@ export class GasFeeDefiner { logger, provider, gasPriceMin, - }: GasDefinerProps & { logger: LogTypes.ILogger; provider: providers.JsonRpcProvider }) { + }: { + logger: LogTypes.ILogger; + gasPriceMin?: BigNumber; + provider: providers.JsonRpcProvider; + }) { this.logger = logger; this.provider = provider; this.gasPriceMin = gasPriceMin || constants.Zero; diff --git a/packages/ethereum-storage/src/gas-price-definer.ts b/packages/ethereum-storage/src/gas-price-definer.ts index bf6c1209a1..2a9c20ae5b 100644 --- a/packages/ethereum-storage/src/gas-price-definer.ts +++ b/packages/ethereum-storage/src/gas-price-definer.ts @@ -3,11 +3,10 @@ import EtherchainProvider from './gas-price-providers/etherchain-provider'; import EtherscanProvider from './gas-price-providers/etherscan-provider'; import EthGasStationProvider from './gas-price-providers/ethgasstation-provider'; -import { CurrencyTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; +import { LogTypes, StorageTypes } from '@requestnetwork/types'; import { BigNumber } from 'ethers'; import XDaiFixedProvider from './gas-price-providers/xdai-fixed-provider'; -import { GasDefinerProps } from './ethereum-storage-ethers'; import { SimpleLogger } from '@requestnetwork/utils'; import { getEthereumStorageNetworkIdFromName } from './ethereum-utils'; @@ -47,7 +46,7 @@ export class GasPriceDefiner { public constructor({ logger, gasPriceMin, - }: GasDefinerProps & { logger?: LogTypes.ILogger } = {}) { + }: { gasPriceMin?: BigNumber; logger?: LogTypes.ILogger } = {}) { this.logger = logger || new SimpleLogger(); this.gasPriceMin = gasPriceMin; } @@ -61,7 +60,7 @@ export class GasPriceDefiner { */ public async getGasPrice( type: StorageTypes.GasPriceType, - networkName: CurrencyTypes.EvmChainName, + networkName: string, ): Promise { const network = getEthereumStorageNetworkIdFromName(networkName); if (network) { diff --git a/packages/ethereum-storage/src/ignored-dataIds.ts b/packages/ethereum-storage/src/ignored-dataIds.ts deleted file mode 100644 index aa37f1216b..0000000000 --- a/packages/ethereum-storage/src/ignored-dataIds.ts +++ /dev/null @@ -1,193 +0,0 @@ -import * as Keyv from 'keyv'; - -import { StorageTypes } from '@requestnetwork/types'; - -/** - * Interval time between iteration for the retry - */ -const INTERVAL_RETRY_MS = 60000; // every minute - -/** - * Allows to save and retrieve the dataIds ignored with the reason - */ -export default class IgnoredDataIds { - /** - * Store the reason we ignored data ids in a dictionary - */ - public ignoredDataIds: Keyv; - - /** - * as KeyV don't allow to get the list of the keys, we need to store it manually - * TODO (PROT-1189): replace KeyV by a database service - */ - public listIgnoredDataIds: Keyv; - - /** - * Constructor - * @param store a Keyv store to persist the metadata - */ - public constructor(store?: Keyv.Store) { - this.ignoredDataIds = new Keyv({ - namespace: 'dataIdIgnored', - store, - }); - - this.listIgnoredDataIds = new Keyv({ - namespace: 'listIgnoredDataIds', - store, - }); - } - - /** - * Saves in the cache the reason to ignore the dataId - * @param dataId dataId - * @param reason reason we ignored the dataId - * @param toRetry will be retry later if true - */ - public async save(entry: StorageTypes.IEthereumEntry): Promise { - const previous = await this.ignoredDataIds.get(entry.hash); - - if (!previous) { - // add the dataId id if new in the store - await this.ignoredDataIds.set(entry.hash, { - entry, - iteration: 1, - lastTryTimestamp: Date.now(), - toRetry: entry.error?.type === StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }); - // update the list - await this.addToDataIdsList(entry.hash); - } else { - // if already in the store - if (previous.toRetry) { - // update it only if it was mean to be retry - await this.ignoredDataIds.set(entry.hash, { - entry, - iteration: (previous.iteration as number) + 1, - lastTryTimestamp: Date.now(), - toRetry: entry.error?.type === StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }); - } - } - } - - /** - * Removes the ignored dataId from the cache - * @param dataId dataId - */ - public async delete(dataId: string): Promise { - await this.ignoredDataIds.delete(dataId); - // update the list - await this.deleteFromDataIdsList(dataId); - } - - /** - * Retrieve reason from cache - * @param dataId dataId to get Ethereum metadata from - * @returns the reason or null - */ - public async getReason(dataId: string): Promise { - return (await this.ignoredDataIds.get(dataId))?.entry.error?.message; - } - - /** - * Get the list of data ids stored - * - * @returns the list of data ids stored - */ - public async getDataIds(): Promise { - const listDataId: string[] | undefined = await this.listIgnoredDataIds.get('list'); - return listDataId || []; - } - - /* - * Get the list of data ids that should be retry - * - * @returns the list of data ids - */ - public async getDataIdsToRetry(): Promise { - const listDataId: string[] | undefined = await this.listIgnoredDataIds.get('list'); - - const result: StorageTypes.IEthereumEntry[] = []; - - if (listDataId) { - for (const dataId of Array.from(listDataId)) { - const data: StorageTypes.IIgnoredDataId | undefined = await this.ignoredDataIds.get(dataId); - if (data && this.shouldRetry(data)) { - result.push(data.entry); - } - } - } - - return result; - } - - /** - * Get the list of data ids stored with reason - * - * @returns the list of data ids stored with reason - */ - public async getDataIdsWithReasons(): Promise { - const listDataId: string[] | undefined = await this.listIgnoredDataIds.get('list'); - - if (!listDataId) { - return {}; - } - const result: any = {}; - - for (const dataId of Array.from(listDataId)) { - result[dataId] = await this.ignoredDataIds.get(dataId); - } - - return result; - } - - /** - * Check if it is the time to retry the entry - * @param entry to check - * @returns true if it is time to retry - */ - private shouldRetry(entry: StorageTypes.IIgnoredDataId): boolean { - // The entry should be retry periodically in an exponential interval of time - // Every time we retry to exponentially increase the time of the next try - return ( - entry.toRetry && - (entry.lastTryTimestamp as number) + - Math.floor(Math.exp(entry.iteration)) * INTERVAL_RETRY_MS <= - Date.now() - ); - } - - /** - * Update the list of data ids stored with reason - * - * @param dataId data id to add to the list - * @returns - */ - private async addToDataIdsList(dataId: string): Promise { - let listDataIds: string[] | undefined = await this.listIgnoredDataIds.get('list'); - if (!listDataIds) { - listDataIds = []; - } - // update the list only if the dataId is not already stored - if (!listDataIds.includes(dataId)) { - listDataIds.push(dataId); - await this.listIgnoredDataIds.set('list', listDataIds); - } - } - - /** - * Update the list of data ids stored with reason - * - * @param dataId data id to add to the list - * @returns - */ - private async deleteFromDataIdsList(dataId: string): Promise { - let listDataIds: string[] | undefined = await this.listIgnoredDataIds.get('list'); - if (!listDataIds) { - return; - } - listDataIds = listDataIds.filter((e) => e !== dataId); - await this.listIgnoredDataIds.set('list', listDataIds); - } -} diff --git a/packages/ethereum-storage/src/index.ts b/packages/ethereum-storage/src/index.ts index a80514931c..12e3633945 100644 --- a/packages/ethereum-storage/src/index.ts +++ b/packages/ethereum-storage/src/index.ts @@ -1,9 +1,8 @@ -export { EthereumStorage } from './ethereum-storage'; export { getEthereumStorageNetworkNameFromId, getEthereumStorageNetworkIdFromName, } from './ethereum-utils'; -export { EthereumStorageEthers } from './ethereum-storage-ethers'; +export { EthereumStorage } from './ethereum-storage'; export { EthereumTransactionSubmitter } from './ethereum-tx-submitter'; export { GasPriceDefiner } from './gas-price-definer'; export { GasFeeDefiner } from './gas-fee-definer'; diff --git a/packages/ethereum-storage/src/ipfs-connection-error.ts b/packages/ethereum-storage/src/ipfs-connection-error.ts deleted file mode 100644 index ae6de7ecc9..0000000000 --- a/packages/ethereum-storage/src/ipfs-connection-error.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Error class used to differentiate error on which we want to retry or not to read ipfs hashes - * We should retry to read an ipfs hash if the error come from a connection issue - */ -// eslint-disable-next-line max-classes-per-file -export default class IpfsConnectionError extends Error {} diff --git a/packages/ethereum-storage/src/ipfs-manager.ts b/packages/ethereum-storage/src/ipfs-manager.ts index a4d2cd8484..8bb7a9a41a 100644 --- a/packages/ethereum-storage/src/ipfs-manager.ts +++ b/packages/ethereum-storage/src/ipfs-manager.ts @@ -3,7 +3,7 @@ import * as qs from 'qs'; import axios, { AxiosError, AxiosInstance, AxiosRequestConfig } from 'axios'; import { LogTypes, StorageTypes } from '@requestnetwork/types'; -import { getDefaultIpfs, getIpfsErrorHandlingConfig } from './config'; +import { getDefaultIpfsTimeout, getDefaultIpfsUrl, getIpfsErrorHandlingConfig } from './config'; import * as FormData from 'form-data'; import { retry, SimpleLogger } from '@requestnetwork/utils'; @@ -19,13 +19,18 @@ type IpfsPaths = { 'bootstrap/list': { Peers: string[] }; }; +export type IpfsOptions = { + ipfsUrl?: string; + ipfsTimeout?: number; + ipfsErrorHandling?: StorageTypes.IIpfsErrorHandlingConfiguration; + logger?: LogTypes.ILogger; +}; /** * Manages Ipfs communication used as storage */ export default class IpfsManager { private readonly logger: LogTypes.ILogger; private readonly axiosInstance: AxiosInstance; - private readonly ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection; private readonly ipfsErrorHandling: StorageTypes.IIpfsErrorHandlingConfiguration; public readonly BASE_PATH: string = 'api/v0'; @@ -36,18 +41,15 @@ export default class IpfsManager { * If no values are provided default values from config are used * Private network is used for default values */ - public constructor(options?: { - ipfsGatewayConnection?: StorageTypes.IIpfsGatewayConnection; - ipfsErrorHandling?: StorageTypes.IIpfsErrorHandlingConfiguration; - logger?: LogTypes.ILogger; - }) { - this.ipfsGatewayConnection = options?.ipfsGatewayConnection || getDefaultIpfs(); + public constructor(options?: IpfsOptions) { + const ipfsUrl = options?.ipfsUrl || getDefaultIpfsUrl(); + const ipfsTimeout = options?.ipfsTimeout || getDefaultIpfsTimeout(); this.ipfsErrorHandling = options?.ipfsErrorHandling || getIpfsErrorHandlingConfig(); this.logger = options?.logger || new SimpleLogger(); this.axiosInstance = axios.create({ - baseURL: `${this.ipfsGatewayConnection.protocol}://${this.ipfsGatewayConnection.host}:${this.ipfsGatewayConnection.port}/${this.BASE_PATH}/`, - timeout: this.ipfsGatewayConnection.timeout, + baseURL: `${ipfsUrl}/${this.BASE_PATH}/`, + timeout: ipfsTimeout, paramsSerializer: function (params) { return qs.stringify(params, { arrayFormat: 'repeat' }); }, @@ -219,12 +221,10 @@ export default class IpfsManager { public async getConfig(): Promise { return { delayBetweenRetries: this.ipfsErrorHandling.delayBetweenRetries, - host: this.ipfsGatewayConnection.host, + url: this.axiosInstance.defaults.baseURL || '', + timeout: this.axiosInstance.defaults.timeout, id: await this.getIpfsNodeId(), maxRetries: this.ipfsErrorHandling.maxRetries, - port: this.ipfsGatewayConnection.port, - protocol: this.ipfsGatewayConnection.protocol, - timeout: this.ipfsGatewayConnection.timeout, }; } diff --git a/packages/ethereum-storage/src/ipfs-storage.ts b/packages/ethereum-storage/src/ipfs-storage.ts index 6df4a11b1a..8ad9c7a64b 100644 --- a/packages/ethereum-storage/src/ipfs-storage.ts +++ b/packages/ethereum-storage/src/ipfs-storage.ts @@ -1,21 +1,16 @@ import { LogTypes, StorageTypes } from '@requestnetwork/types'; - -import { getIpfsExpectedBootstrapNodes, getPinRequestConfig } from './config'; -import IpfsManager from './ipfs-manager'; import { SimpleLogger } from '@requestnetwork/utils'; -export type IpfsStorageProps = { - logger?: LogTypes.ILogger; - ipfsGatewayConnection?: StorageTypes.IIpfsGatewayConnection; -}; +import { getIpfsExpectedBootstrapNodes } from './config'; +import IpfsManager, { IpfsOptions } from './ipfs-manager'; export class IpfsStorage implements StorageTypes.IIpfsStorage { private ipfsManager: IpfsManager; private logger: LogTypes.ILogger; - constructor({ ipfsGatewayConnection, logger }: IpfsStorageProps) { - this.ipfsManager = new IpfsManager({ ipfsGatewayConnection, logger }); - this.logger = logger || new SimpleLogger(); + constructor(options: IpfsOptions = {}) { + this.ipfsManager = new IpfsManager(options); + this.logger = options.logger || new SimpleLogger(); } public async initialize(): Promise { @@ -35,11 +30,16 @@ export class IpfsStorage implements StorageTypes.IIpfsStorage { } // Add content to IPFS and get the hash back - let ipfsHash; + let ipfsHash: string; try { ipfsHash = await this.ipfsManager.add(data); } catch (error) { - throw Error(`Ipfs add request error: ${error}`); + throw new Error(`Ipfs add request error: ${error}`); + } + try { + await this.ipfsManager.pin([ipfsHash]); + } catch (error) { + throw new Error(`Ipfs pin request error: ${error}`); } const ipfsSize = await this.getSize(ipfsHash); @@ -65,35 +65,6 @@ export class IpfsStorage implements StorageTypes.IIpfsStorage { return ipfsSize; } - /** - * Pin an array of IPFS hashes - * - * @param hashes An array of IPFS hashes to pin - */ - public async pinDataToIPFS( - hashes: string[], - { - delayBetweenCalls, - maxSize, - timeout, - }: StorageTypes.IPinRequestConfiguration = getPinRequestConfig(), - ): Promise { - // How many slices we need from the total list of hashes to be under pinRequestMaxSize - const slices = Math.ceil(hashes.length / maxSize); - - // Iterate over the hashes list, slicing it at pinRequestMaxSize sizes and pinning it - for (let i = 0; i < slices; i++) { - await new Promise((res): NodeJS.Timeout => setTimeout(() => res(), delayBetweenCalls)); - const slice = hashes.slice(i * maxSize, (i + 1) * maxSize); - try { - await this.ipfsManager.pin(slice, timeout); - this.logger.debug(`Pinned ${slice.length} hashes to IPFS node.`); - } catch (error) { - this.logger.warn(`Failed pinning some hashes the IPFS node: ${error}`, ['ipfs']); - } - } - } - /** * Retrieve content from ipfs from its hash * @param hash Hash of the content @@ -111,7 +82,7 @@ export class IpfsStorage implements StorageTypes.IIpfsStorage { /** * Gets current configuration */ - public async getConfig(): Promise { + public async getConfig(): Promise { return this.ipfsManager.getConfig(); } diff --git a/packages/ethereum-storage/src/smart-contract-manager.ts b/packages/ethereum-storage/src/smart-contract-manager.ts deleted file mode 100644 index 10765f588f..0000000000 --- a/packages/ethereum-storage/src/smart-contract-manager.ts +++ /dev/null @@ -1,682 +0,0 @@ -import * as SmartContracts from '@requestnetwork/smart-contracts'; -import { CurrencyTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; -import * as Bluebird from 'bluebird'; -import * as config from './config'; -import EthereumBlocks from './ethereum-blocks'; -import { GasPriceDefiner } from './gas-price-definer'; -import { BigNumber } from 'ethers'; -import { - flatten2DimensionsArray, - retry, - SimpleLogger, - timeoutPromise, -} from '@requestnetwork/utils'; -import { getEthereumStorageNetworkNameFromId } from './ethereum-utils'; - -// eslint-disable-next-line @typescript-eslint/no-var-requires -const web3Eth = require('web3-eth'); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const web3Utils = require('web3-utils'); - -// Maximum number of attempt to create ethereum metadata when transaction to add hash and size to Ethereum is confirmed -// 23 is the number of call of the transaction's confirmation event function -// if higher the promise may block since the confirmation event function will not be called anymore -const CREATING_ETHEREUM_METADATA_MAX_ATTEMPTS = 23; - -// Regular expression to detect if the Web3 API returns "query returned more than XXX results" error -const MORE_THAN_XXX_RESULTS_REGEX = new RegExp('query returned more than [1-9][0-9]* results'); - -// String to match if the Web3 API throws "Transaction was not mined within XXX seconds" error -const TRANSACTION_POLLING_TIMEOUT = 'Transaction was not mined within'; - -const LENGTH_BYTES32_STRING = 64; - -/** - * Manages the smart contract used by the storage layer - * to store the hashes of the data on Ethereum - */ -export default class SmartContractManager { - public eth: any; - public requestHashStorage: any; - public requestHashSubmitter: any; - - /** - * Handles the block numbers and blockTimestamp - */ - public ethereumBlocks: EthereumBlocks; - - /** - * Maximum number of concurrent calls - */ - public maxConcurrency: number; - - protected networkName: CurrencyTypes.EvmChainName = 'private'; - protected hashStorageAddress: string; - protected hashSubmitterAddress: string; - - // Block where the contract has been created - // This value is stored in config file for each network - // This value is used to optimize past event retrieval - private creationBlockNumberHashStorage: number; - - // Timeout threshold when connecting to Web3 provider - private timeout: number; - - /** - * Logger instance - */ - private logger: LogTypes.ILogger; - - /** - * Maximum number of retries to attempt for web3 API calls - */ - private maxRetries: number | undefined; - - /** - * Delay between retries for web3 API calls - */ - private retryDelay: number | undefined; - - /** - * Handler to get gas price - */ - private readonly gasPriceDefiner: GasPriceDefiner; - - /** - * Constructor - * @param web3Connection Object to connect to the Ethereum network - * @param [options.getLastBlockNumberDelay] the minimum delay to wait between fetches of lastBlockNumber - * If values are missing, private network is used as http://localhost:8545 - */ - public constructor( - web3Connection?: StorageTypes.IWeb3Connection, - { - maxConcurrency, - getLastBlockNumberDelay, - logger, - maxRetries, - retryDelay, - gasPriceMin, - }: { - maxConcurrency: number; - logger?: LogTypes.ILogger; - getLastBlockNumberDelay?: number; - maxRetries?: number; - retryDelay?: number; - gasPriceMin?: BigNumber; - } = { - maxConcurrency: Number.MAX_SAFE_INTEGER, - }, - ) { - this.maxConcurrency = maxConcurrency; - this.logger = logger || new SimpleLogger(); - - this.maxRetries = maxRetries; - this.retryDelay = retryDelay; - - web3Connection = web3Connection || {}; - - try { - this.eth = new web3Eth( - web3Connection.web3Provider || - new web3Eth.providers.HttpProvider(config.getDefaultEthereumProvider()), - ); - } catch (error) { - throw Error(`Can't initialize web3-eth ${error}`); - } - - // Set the default transaction polling timeout to the value in our config - this.eth.transactionPollingTimeout = config.getTransactionPollingTimeout(); - - // Checks if networkId is defined - // If not defined we use default value from config - this.networkName = - typeof web3Connection.networkId === 'undefined' - ? config.getDefaultEthereumNetwork() - : getEthereumStorageNetworkNameFromId(web3Connection.networkId); - - this.hashStorageAddress = SmartContracts.requestHashStorageArtifact.getAddress( - this.networkName, - ); - - this.hashSubmitterAddress = SmartContracts.requestHashSubmitterArtifact.getAddress( - this.networkName, - ); - - // Initialize smart contract instance - this.requestHashStorage = new this.eth.Contract( - SmartContracts.requestHashStorageArtifact.getContractAbi(), - this.hashStorageAddress, - ); - this.requestHashSubmitter = new this.eth.Contract( - SmartContracts.requestHashSubmitterArtifact.getContractAbi(), - this.hashSubmitterAddress, - ); - - this.timeout = web3Connection.timeout || config.getDefaultEthereumProviderTimeout(); - - this.creationBlockNumberHashStorage = - SmartContracts.requestHashStorageArtifact.getCreationBlockNumber(this.networkName) || 0; - - this.ethereumBlocks = new EthereumBlocks( - this.eth, - this.creationBlockNumberHashStorage, - this.retryDelay || config.getEthereumRetryDelay(), - this.maxRetries || config.getEthereumMaxRetries(), - getLastBlockNumberDelay, - this.logger, - ); - - this.gasPriceDefiner = new GasPriceDefiner({ logger, gasPriceMin }); - } - - /** - * Check if the web3 provider is accessible - * @param timeout Time to wait before considering the provider is not reachable - * @return Promise resolving if the web3 provider is accessible, throw otherwise - */ - public async checkWeb3ProviderConnection(timeout: number): Promise { - return new Promise((resolve, reject): void => { - const connectionTimer: any = setTimeout(() => { - reject( - Error( - 'The Web3 provider is not reachable, did you use the correct protocol (http/https)?', - ), - ); - }, timeout); - - this.eth.net - .isListening() - .then((isListening: boolean) => { - // The timeout must be disabled - clearTimeout(connectionTimer); - - if (isListening) { - resolve(); - } else { - reject(Error('The Web3 provider is not listening')); - } - }) - .catch((error: Error) => { - // The timeout must be disabled - clearTimeout(connectionTimer); - - reject(Error(`Error when trying to reach Web3 provider: ${error}`)); - }); - }); - } - - /** - * Check if the contracts are deployed and configured on ethereum - * @return Promise resolving if the contracts are deployed and configured, throws otherwise - */ - public async checkContracts(): Promise { - try { - const isSubmitterWhitelisted = await this.requestHashStorage.methods - .isWhitelisted(this.hashSubmitterAddress) - .call(); - - if (!isSubmitterWhitelisted) { - throw Error('The hash submitter not whitelisted in request Hash Storage contract'); - } - - // throw if requestHashSubmitter is not deployed - await this.requestHashSubmitter.methods.getFeesAmount(0).call(); - } catch (error) { - throw Error(`Contracts are not deployed or not well configured: ${error}`); - } - } - - /** - * Get the account used for transaction (account[0] of the wallet) - * @return Promise resolving the default account - */ - public async getMainAccount(): Promise { - // Get the accounts on the provider - // Throws an error if timeout is reached - const accounts = await timeoutPromise( - this.eth.getAccounts(), - this.timeout, - 'Web3 getAccounts connection timeout', - ); - - if (!accounts || !accounts[0]) { - throw Error('No account found'); - } - return accounts[0]; - } - - /** - * Adds hash to smart contract from content hash and content feesParameters - * @param contentHash Hash of the content to store, this hash should be used to retrieve the content - * @param feesParameters parameters used to compute storage fee - * @param gasPrice Replace the default gas price - * @returns Promise resolved when transaction is confirmed on Ethereum - */ - public async addHashAndSizeToEthereum( - contentHash: string, - feesParameters: StorageTypes.IFeesParameters, - gasPrice?: BigNumber, - nonce?: number, - ): Promise { - // Get the account for the transaction - const account = await this.getMainAccount(); - - // Get the fee from the size of the content - // Throws an error if timeout is reached - const fee = await timeoutPromise( - this.requestHashSubmitter.methods.getFeesAmount(feesParameters.contentSize).call(), - this.timeout, - 'Web3 getFeesAmount connection timeout', - ); - - // Determines the gas price to use - // If the gas price is provided as a parameter, we use this value - // If the gas price is not provided and we use mainnet, we determine it from gas price api providers - // We use the fast value provided by the api providers - // Otherwise, we use default value from config - const gasPriceToUse = - gasPrice || - (await this.gasPriceDefiner.getGasPrice(StorageTypes.GasPriceType.FAST, this.networkName)); - - // parse the fees parameters to hex bytes - const feesParametersAsBytes = web3Utils.padLeft( - web3Utils.toHex(feesParameters.contentSize), - LENGTH_BYTES32_STRING, - ); - - // Send transaction to contract - // TODO(PROT-181): Implement a log manager for the library - // use it for the different events (error, transactionHash, receipt and confirmation) - return new Promise((resolve, reject) => { - // This boolean is set to true once the ethereum metadata has been created and the promise has been resolved - // When set to true, we use it to ignore next confirmation event function call - let ethereumMetadataCreated = false; - - // Keep the transaction hash for future needs - let transactionHash = ''; - const transactionParameters = { - from: account, - gas: '100000', - gasPrice: gasPriceToUse, - nonce, - value: fee, - }; - this.requestHashSubmitter.methods - .submitHash(contentHash, feesParametersAsBytes) - .send(transactionParameters) - .on('transactionHash', (hash: any) => { - // Store the transaction hash in case we need it in the future - transactionHash = hash; - this.logger.debug( - `Ethereum SubmitHash transaction: ${JSON.stringify({ - hash, - ...transactionParameters, - })}`, - ); - }) - .on('error', async (transactionError: string) => { - // If failed because of polling timeout, try to resubmit the transaction with more gas - if ( - transactionError.toString().includes(TRANSACTION_POLLING_TIMEOUT) && - transactionHash - ) { - // If we didn't set the nonce, find the current transaction nonce - if (!nonce) { - const tx = await this.eth.getTransaction(transactionHash); - nonce = tx.nonce; - } - - // Get the new gas price for the transaction - const newGasPrice = await this.gasPriceDefiner.getGasPrice( - StorageTypes.GasPriceType.FAST, - this.networkName, - ); - - // If the new gas price is higher than the previous, resubmit the transaction - if (newGasPrice.gt(gasPriceToUse)) { - // Retry transaction with the new gas price and propagate back the result - try { - resolve( - await this.addHashAndSizeToEthereum( - contentHash, - feesParameters, - newGasPrice, - nonce, - ), - ); - } catch (error) { - reject(error); - } - } else { - // The transaction is stuck, but it doesn't seem to be a gas issue. Nothing better to do than to wait... - this.logger.warn( - `Transaction ${transactionHash} hasn't been mined for more than ${config.getTransactionPollingTimeout()} seconds. It may be stuck.`, - ); - } - } else { - const logObject = JSON.stringify({ - contentHash, - fee, - feesParametersAsBytes, - from: account, - gasPrice: gasPriceToUse, - nonce, - }); - this.logger.error(`Failed transaction: ${logObject}`); - reject(Error(`Ethereum transaction error: ${transactionError}`)); - } - }) - .on('confirmation', (confirmationNumber: number, receiptAfterConfirmation: any) => { - if (!ethereumMetadataCreated) { - this.logger.debug( - `Confirmation nb ${confirmationNumber} for transaction: ${receiptAfterConfirmation.transactionHash}`, - ); - - const gasFee = BigNumber.from(receiptAfterConfirmation.gasUsed).mul(gasPriceToUse); - const cost = gasFee.add(BigNumber.from(fee)); - - // Try to create ethereum metadata - // If the promise rejects, which is likely to happen because the last block is not fetchable - // we retry the next event function call - this.createEthereumMetaData( - receiptAfterConfirmation.blockNumber, - receiptAfterConfirmation.transactionHash, - cost.toString(), - fee, - gasFee.toString(), - confirmationNumber, - ) - .then((ethereumMetadata: StorageTypes.IEthereumMetadata) => { - ethereumMetadataCreated = true; - resolve(ethereumMetadata); - }) - .catch((e) => { - this.logger.debug( - `Could not create ethereum metadata for transaction ${ - receiptAfterConfirmation.transactionHash - }: ${e.message || e}`, - ); - if (confirmationNumber >= CREATING_ETHEREUM_METADATA_MAX_ATTEMPTS) { - reject(Error(`Maximum number of confirmation reached: ${e}`)); - } - }); - } - }); - }); - } - - /** - * Get all entries from storage smart contract past events - * - * @param options timestamp boundaries for the hash retrieval - * @return hashes with with metadata - */ - public async getEntriesFromEthereum( - options?: StorageTypes.ITimestampBoundaries, - ): Promise { - let fromBlock = this.creationBlockNumberHashStorage; - let toBlock: number | undefined; - - // get fromBlock from the timestamp given in options - if (options && options.from) { - const optionFromBlockNumbers = await this.ethereumBlocks.getBlockNumbersFromTimestamp( - options.from, - ); - fromBlock = optionFromBlockNumbers.blockAfter; - } - - // get toBlock from the timestamp given in options or use the latest block - if (options && options.to) { - const optionToBlockNumbers = await this.ethereumBlocks.getBlockNumbersFromTimestamp( - options.to, - ); - toBlock = optionToBlockNumbers.blockBefore; - } else { - toBlock = await this.ethereumBlocks.getLastBlockNumber(); - } - - if (toBlock < fromBlock) { - throw Error( - `toBlock must be larger than fromBlock: fromBlock:${fromBlock} toBlock:${toBlock}`, - ); - } - - // Get the toBlock timestamp and returns it with the data - // This is important because the the upper layers using this function shouldn't - // know what a block is and they (probably) will use timestamps as abstractions. - // We need to return this value, so the upper layers can use as "last sync time". - // Using now as "last sync time" will lead to issues, because new blocks can be - // added between the last block created and now. - const lastTimestamp = await this.ethereumBlocks.getBlockTimestamp(toBlock); - - return { - ethereumEntries: await this.getEthereumEntriesFromEvents(fromBlock, toBlock), - lastTimestamp, - }; - } - - /** - * Get hashes and sizes with metadata inside storage smart contract past events - * - * @param fromBlock number of the block to start to get events - * @param toBlock number of the block to stop to get events - * @return Hashes and sizes with metadata - */ - public async getEthereumEntriesFromEvents( - fromBlock: number, - toBlock?: number | string, - ): Promise { - fromBlock = - fromBlock < this.creationBlockNumberHashStorage - ? this.creationBlockNumberHashStorage - : fromBlock; - toBlock = toBlock || 'latest'; - - // Read all event logs - let events = await this.recursiveGetPastEvents(fromBlock, toBlock); - - this.logger.debug(`${events.length} events fetched in getEthereumEntriesFromEvents`, [ - 'ethereum', - ]); - - // TODO PROT-235: getPastEvents returns all events, not just NewHash - events = events.filter((eventItem: any) => eventItem.event === 'NewHash'); - - const eventsWithMetaData = await Bluebird.map( - events, - (eventItem: any) => this.checkAndAddMetaDataToEvent(eventItem), - { - concurrency: this.maxConcurrency, - }, - ); - - return eventsWithMetaData; - } - - /** - * Gets current configuration - * - * @return the current configuration attributes - */ - public getConfig(): any { - return { - creationBlockNumberHashStorage: this.creationBlockNumberHashStorage, - currentProvider: this.eth.currentProvider.host, - hashStorageAddress: this.hashStorageAddress, - hashSubmitterAddress: this.hashSubmitterAddress, - maxConcurrency: this.maxConcurrency, - maxRetries: this.maxRetries, - networkName: this.networkName, - retryDelay: this.retryDelay, - }; - } - - /** - * Get events inside storage smart contract for a specified block range - * Some web3 providers, including Infura, send error if the past event number for a specific range is over 1000 - * In this case we divide the range and call the function recursively - * - * @param fromBlock number of the block to start to get events - * @param toBlock number of the block to stop to get events - * @return Past events of requestHashStorage of the specified range - */ - private async recursiveGetPastEvents( - fromBlock: number, - toBlock: number | string, - ): Promise { - const toBlockNumber: number = await this.getBlockNumberFromNumberOrString(toBlock); - - // Reading event logs - // If getPastEvents doesn't throw, we can return the returned events from the function - let events: any; - try { - events = await retry( - (args) => - timeoutPromise( - this.requestHashStorage.getPastEvents(args), - this.timeout, - 'Web3 getPastEvents connection timeout', - ), - { - maxRetries: this.maxRetries || config.getEthereumMaxRetries(), - retryDelay: this.retryDelay || config.getEthereumRetryDelay(), - }, - )({ - event: 'NewHash', - fromBlock, - toBlock: toBlockNumber, - }); - - this.logger.debug(`Events from ${fromBlock} to ${toBlock} fetched`, ['ethereum']); - - return events; - } catch (e) { - // Checks if the API returns "query returned more than XXX results" error - // In this case we perform a dichotomy in order to fetch past events with a smaller range - if (e.toString().match(MORE_THAN_XXX_RESULTS_REGEX)) { - const intervalHalf = Math.floor((fromBlock + toBlockNumber) / 2); - const eventsFirstHalfPromise = this.recursiveGetPastEvents(fromBlock, intervalHalf); - const eventsSecondHalfPromise = this.recursiveGetPastEvents( - intervalHalf + 1, - toBlockNumber, - ); - - return Promise.all([eventsFirstHalfPromise, eventsSecondHalfPromise]) - .then((halves) => flatten2DimensionsArray(halves)) - .catch((err) => { - throw err; - }); - } else { - throw e; - } - } - } - - /** - * Throws an error if the event is not correctly formatted (missing field) - * Attaches to the event the corresponding metadata - * @param event event of type NewHash - * @returns processed event - */ - private async checkAndAddMetaDataToEvent(event: any): Promise { - // Check if the event object is correct - // We check "typeof field === 'undefined'"" instead of "!field" - // because you can add empty string as hash or 0 as size in the storage smart contract - if ( - typeof event.returnValues === 'undefined' || - typeof event.returnValues.hash === 'undefined' || - typeof event.returnValues.feesParameters === 'undefined' - ) { - throw Error(`event is incorrect: doesn't have a hash or feesParameters`); - } - - const contentSize = web3Utils.hexToNumber(event.returnValues.feesParameters); - const meta = await this.createEthereumMetaData(event.blockNumber, event.transactionHash); - - return { - feesParameters: { contentSize }, - hash: event.returnValues.hash, - meta, - }; - } - - /** - * Create the ethereum metadata - * @param blockNumber block number of the ethereum transaction - * @param transactionHash transactionHash of the ethereum transaction - * @param cost total cost of the transaction (gas fee + request network fee) - * @param fee Request network fee - * @param gasFee gas fee of the ethereum transaction - * @return IEthereumMetadata the metadata formatted - */ - private async createEthereumMetaData( - blockNumber: number, - transactionHash: string, - cost?: string, - fee?: string, - gasFee?: string, - blockConfirmation?: number, - ): Promise { - if (!blockConfirmation) { - // Get the number confirmations of the block hosting the transaction - try { - blockConfirmation = await this.ethereumBlocks.getConfirmationNumber(blockNumber); - } catch (error) { - throw Error(`Error getting block confirmation number: ${error}`); - } - } - - // Get timestamp of the block hosting the transaction - let blockTimestamp; - try { - blockTimestamp = await this.ethereumBlocks.getBlockTimestamp(blockNumber); - } catch (error) { - throw Error(`Error getting block ${blockNumber} timestamp: ${error}`); - } - - return { - blockConfirmation, - blockNumber, - blockTimestamp, - cost, - fee, - gasFee, - networkName: this.networkName, - smartContractAddress: this.hashStorageAddress, - transactionHash, - }; - } - - /** - * Get the number of a block given its number or string describing it - * We need this function because recursive calls of getPastEvents need to use variable of type number - * - * @param block block number or string describing the block (latest, genesis, pending) - * @return number of the block - */ - private async getBlockNumberFromNumberOrString(block: number | string): Promise { - if (typeof block === 'number') { - // If the block number is already of type number, we return it - return block; - } else { - let blockObject; - try { - // Otherwise, we get the number of the block with getBlock web3 function - // Use Utils.retry to rerun if getBlock fails - blockObject = await this.ethereumBlocks.getBlock(block); - } catch (e) { - // getBlock can throw in certain case - // For example, if the block describer is "pending", we're not able to get the number of the block - // Therefore, this function should throw - throw Error(`Cannot get the number of the block: ${e}`); - } - - if (!blockObject || !blockObject.number) { - throw Error(`Block ${block} has no number`); - } - - return blockObject.number; - } - } -} diff --git a/packages/ethereum-storage/test/dataids-ignored.test.ts b/packages/ethereum-storage/test/dataids-ignored.test.ts deleted file mode 100644 index 4eb1f8e526..0000000000 --- a/packages/ethereum-storage/test/dataids-ignored.test.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { StorageTypes } from '@requestnetwork/types'; -import IgnoredDataIds from '../src/ignored-dataIds'; - -const entry: StorageTypes.IEthereumEntry = { - error: { - message: 'this is a little test !', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { contentSize: 3 }, - hash: 'QmNXA5DyFZkdf4XkUT81nmJSo3nS2bL25x7YepxeoDa6tY', - meta: {} as any, -}; -const entry2: StorageTypes.IEthereumEntry = { - error: { message: 'this is a second test !', type: StorageTypes.ErrorEntries.INCORRECT_FILE }, - feesParameters: { contentSize: 3 }, - hash: 'hash2', - meta: {} as any, -}; - -let ignoredDataIds: IgnoredDataIds; - -/* eslint-disable no-magic-numbers */ -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('Ignored DataIds', () => { - beforeEach(() => { - ignoredDataIds = new IgnoredDataIds(); - }); - - describe('save', () => { - it('can save()', async () => { - await ignoredDataIds.save(entry); - expect(await ignoredDataIds.getReason(entry.hash)).toBe(entry.error!.message); - }); - it('can save() something already saved that can be retried', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - await ignoredDataIds.save(entry); - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({ - [entry.hash]: { - entry, - iteration: 1, - lastTryTimestamp: 0, - toRetry: true, - }, - }); - - jest.advanceTimersByTime(10); - await ignoredDataIds.save(entry); - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({ - [entry.hash]: { - entry, - iteration: 2, - lastTryTimestamp: 10, - toRetry: true, - }, - }); - }); - it('can save() something already saved that cannot be retried', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - await ignoredDataIds.save(entry2); - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({ - [entry2.hash]: { - entry: entry2, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - }); - - jest.advanceTimersByTime(10); - - await ignoredDataIds.save(entry2); - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({ - [entry2.hash]: { - entry: entry2, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - }); - jest.useRealTimers(); - }); - }); - - describe('getDataIdsWithReasons', () => { - it('can getDataIdsWithReasons()', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - await ignoredDataIds.save(entry); - await ignoredDataIds.save(entry2); - - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({ - [entry.hash]: { - entry, - iteration: 1, - lastTryTimestamp: 0, - toRetry: true, - }, - [entry2.hash]: { - entry: entry2, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - }); - jest.useRealTimers(); - }); - it('can getDataIdsWithReasons() if empty', async () => { - expect(await ignoredDataIds.getDataIdsWithReasons()).toEqual({}); - }); - }); - - describe('getDataIdsToRetry', () => { - it('can getDataIdsToRetry()', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - await ignoredDataIds.save(entry); - expect(await ignoredDataIds.getDataIdsToRetry()).toEqual([]); - - jest.advanceTimersByTime(120001); - expect(await ignoredDataIds.getDataIdsToRetry()).toEqual([entry]); - - jest.useRealTimers(); - }); - }); - - describe('delete', () => { - it('can delete()', async () => { - await ignoredDataIds.save(entry); - expect(await ignoredDataIds.getReason(entry.hash)).toBe(entry.error!.message); - expect(await ignoredDataIds.getDataIds()).toEqual([entry.hash]); - - await ignoredDataIds.delete(entry.hash); - expect(await ignoredDataIds.getReason(entry.hash)).toBeUndefined(); - expect(await ignoredDataIds.getDataIds()).toEqual([]); - }); - }); - - describe('getDataIds', () => { - it('can getDataIds()', async () => { - await ignoredDataIds.save(entry); - await ignoredDataIds.save(entry2); - - expect(await ignoredDataIds.getDataIds()).toEqual([entry.hash, entry2.hash]); - }); - it('can getDataIds() if empty', async () => { - expect(await ignoredDataIds.getDataIds()).toEqual([]); - }); - }); -}); diff --git a/packages/ethereum-storage/test/ethereum-blocks.test.ts b/packages/ethereum-storage/test/ethereum-blocks.test.ts deleted file mode 100644 index 375f8168a1..0000000000 --- a/packages/ethereum-storage/test/ethereum-blocks.test.ts +++ /dev/null @@ -1,194 +0,0 @@ -import EthereumBlocks from '../src/ethereum-blocks'; - -/* eslint-disable no-magic-numbers */ -const mockBlocksEthereum = [ - 7, 100, 209, 306, 401, 509, 606, 703, 803, 907, 1004, 1109, 1202, 1302, 1403, 1504, 1604, 1708, - 1802, 1906, 2002, 2103, 2203, 2308, 2402, 2501, 2600, 2700, 2806, 2908, 3009, 3100, 3206, 3303, - 3400, 3500, 3606, 3702, 3805, 3900, 4008, 4104, 4206, 4305, 4401, 4504, 4609, 4707, 4809, 4909, - 5000, 5109, 5205, 5305, 5407, 5509, 5604, 5704, 5805, 5903, 6003, 6101, 6207, 6309, 6402, 6501, - 6601, 6702, 6808, 6902, 7009, 7106, 7207, 7306, 7401, 7502, 7605, 7701, 7800, 7908, 8009, 8104, - 8202, 8302, 8407, 8502, 8602, 8706, 8802, 8903, 9006, 9104, 9205, 9300, 9406, 9503, 9603, 9700, - 9807, 9906, -]; - -const mockEth = { - getBlock: jest.fn((i: number): any => { - return mockBlocksEthereum[i] ? { timestamp: mockBlocksEthereum[i] } : undefined; - }), - // eslint-disable-next-line - getBlockNumber: jest.fn(() => Promise.resolve(99)), -}; - -/* eslint-disable @typescript-eslint/no-unused-expressions */ -/* eslint-disable */ -describe('EthereumBlocks', () => { - beforeEach(() => { - jest.clearAllMocks(); - }); - describe('getLastBlockNumber', () => { - it('getLastBlockNumber', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - expect(await ethereumBlocks.getLastBlockNumber()).toEqual(99); - }); - - // TODO - it('respects the delay', async () => { - // Generates a random block number - const randEth = { - getBlockNumber: (): number => Math.floor(Math.random() * 10e7), - }; - const ethereumBlocks = new EthereumBlocks(randEth, 10, 0, 0, 10000); - - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - const block1 = await ethereumBlocks.getLastBlockNumber(); - const block2 = await ethereumBlocks.getLastBlockNumber(); - expect(block1).toEqual(block2); - - jest.advanceTimersByTime(10000); - - const block3 = await ethereumBlocks.getLastBlockNumber(); - expect(block3).not.toEqual(block1); - jest.useRealTimers(); - }); - - it('always fetches new with 0 as delay', async () => { - // Generates a random block number - const randEth = { - getBlockNumber: (): number => Math.floor(Math.random() * 10e7), - }; - const ethereumBlocks = new EthereumBlocks(randEth, 10, 0, 0, 0); - - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - const block1 = await ethereumBlocks.getLastBlockNumber(); - const block2 = await ethereumBlocks.getLastBlockNumber(); - expect(block1).not.toEqual(block2); - jest.advanceTimersByTime(10000); - - const block3 = await ethereumBlocks.getLastBlockNumber(); - expect(block3).not.toEqual(block1); - jest.useRealTimers(); - }); - }); - - describe('getSecondLastBlockNumber', () => { - it('getSecondLastBlockNumber', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - - await expect(ethereumBlocks.getSecondLastBlockNumber()).resolves.toEqual(98); - }); - }); - - describe('getBlockTimestamp', () => { - it('can getBlockTimestamp', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - expect(await ethereumBlocks.getBlockTimestamp(50)).toEqual(mockBlocksEthereum[50]); - }); - - it('can getBlockTimestamp without asking twice the same block number', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - expect(await ethereumBlocks.getBlockTimestamp(50)).toEqual(mockBlocksEthereum[50]); - expect(await ethereumBlocks.getBlockTimestamp(50)).toEqual(mockBlocksEthereum[50]); - expect(mockEth.getBlock).toHaveBeenCalledTimes(1); - }); - - it('cannot getBlockTimestamp of a block that doest not exist', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - await expect(ethereumBlocks.getBlockTimestamp(101)).rejects.toThrowError( - `block 101 not found`, - ); - }); - }); - - describe('getConfirmationNumber', () => { - it('can getConfirmationNumber', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - expect(await ethereumBlocks.getConfirmationNumber(30)).toEqual(69); - }); - - it('must throw an error when eth crash', async () => { - const mockEthThrower = { - // eslint-disable-next-line - getBlockNumber: () => { - throw Error('Exception to be caught'); - }, - }; - - const ethereumBlocks = new EthereumBlocks(mockEthThrower, 10, 0, 0); - await expect(ethereumBlocks.getConfirmationNumber(11)).rejects.toThrowError( - `Error getting the confirmation number: Error: Exception to be caught`, - ); - }); - }); - - describe('getBlockNumbersFromTimestamp', () => { - it('getBlockNumbersFromTimestamp', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(3190)).toMatchObject({ - blockBefore: 31, - blockAfter: 32, - }); - }); - - it('getBlockNumbersFromTimestamp some already known block', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - await ethereumBlocks.getBlockTimestamp(15); - await ethereumBlocks.getBlockTimestamp(20); - await ethereumBlocks.getBlockTimestamp(60); - await ethereumBlocks.getBlockTimestamp(65); - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(3190)).toMatchObject({ - blockBefore: 31, - blockAfter: 32, - }); - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(3009)).toMatchObject({ - blockAfter: 30, - blockBefore: 30, - }); - }); - - it('getBlockNumbersFromTimestamp of edge case', async () => { - const ethereumBlocks = new EthereumBlocks(mockEth, 10, 0, 0); - - // first dichotomy research - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(4401)).toMatchObject({ - blockAfter: 44, - blockBefore: 44, - }); - - // before anything - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(-1)).toMatchObject({ - blockAfter: 10, - blockBefore: 10, - }); - - // before first block - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(1)).toMatchObject({ - blockAfter: 10, - blockBefore: 10, - }); - - // at first block - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(1004)).toMatchObject({ - blockAfter: 10, - blockBefore: 10, - }); - - // at last block - // getBlockNumbersFromTimestamp should return the second last block number - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(9906)).toMatchObject({ - blockAfter: 98, - blockBefore: 98, - }); - - // with timestamp over last block - // getBlockNumbersFromTimestamp should return the second last block number - expect(await ethereumBlocks.getBlockNumbersFromTimestamp(99999)).toMatchObject({ - blockAfter: 98, - blockBefore: 98, - }); - }); - }); -}); diff --git a/packages/ethereum-storage/test/ethereum-entries-to-ipfs-content.test.ts b/packages/ethereum-storage/test/ethereum-entries-to-ipfs-content.test.ts deleted file mode 100644 index 2ba90d6a04..0000000000 --- a/packages/ethereum-storage/test/ethereum-entries-to-ipfs-content.test.ts +++ /dev/null @@ -1,380 +0,0 @@ -import { StorageTypes } from '@requestnetwork/types'; -import { SimpleLogger } from '@requestnetwork/utils'; - -import ethereumEntriesToIpfsContent from '../src/ethereum-entries-to-ipfs-content'; -import IgnoredDataIndex from '../src/ignored-dataIds'; -import IpfsConnectionError from '../src/ipfs-connection-error'; - -/* eslint-disable no-magic-numbers */ - -let ignoredDataIndex: IgnoredDataIndex; -let ipfsManager: any; - -/* eslint-disable @typescript-eslint/no-unused-expressions */ -describe('ethereum-entries-to-ipfs-content', () => { - beforeEach(async () => { - ignoredDataIndex = new IgnoredDataIndex(); - ipfsManager = {}; - }); - - it('can retry the right hashes', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - const connectionErrorSpy = jest.fn(() => { - throw new IpfsConnectionError(`Ipfs read request response error: test purpose`); - }); - const incorrectErrorSpy = jest.fn(() => { - throw new Error('Incorrect file test'); - }); - const biggerErrorSpy = jest.fn(() => ({ - content: 'bigger', - ipfsLinks: [], - ipfsSize: 5, - })); - const okSpy = jest.fn(() => ({ - content: 'ok', - ipfsLinks: [], - ipfsSize: 2, - })); - - ipfsManager.read = jest.fn(async (hash: string): Promise => { - if (hash === 'hConnectionError') { - return connectionErrorSpy(); - } else if (hash === 'hIncorrectFile') { - return incorrectErrorSpy(); - } else if (hash === 'hBiggerFile') { - return biggerErrorSpy(); - } else { - return okSpy(); - } - }); - - const ethereumEntriesToProcess: StorageTypes.IEthereumEntry[] = [ - { hash: 'hConnectionError', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hIncorrectFile', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hBiggerFile', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hOk', feesParameters: { contentSize: 3 }, meta: {} as any }, - ]; - const result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - - expect(result.length).toBe(1); - expect(result[0]!.content).toBe('ok'); - expect(result[0]!.id).toBe('hOk'); - - const ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({ - hBiggerFile: { - entry: { - error: { - message: 'Incorrect declared size', - type: StorageTypes.ErrorEntries.WRONG_FEES, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hBiggerFile', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - hConnectionError: { - entry: { - error: { - message: 'Ipfs read request response error: test purpose', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hConnectionError', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: true, - }, - hIncorrectFile: { - entry: { - error: { - message: 'Incorrect file test', - type: StorageTypes.ErrorEntries.INCORRECT_FILE, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hIncorrectFile', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - }); - - expect(ipfsManager.read).toHaveBeenCalledTimes(5); - expect(connectionErrorSpy).toHaveBeenCalledTimes(2); - expect(incorrectErrorSpy).toHaveBeenCalledTimes(1); - expect(biggerErrorSpy).toHaveBeenCalledTimes(1); - expect(okSpy).toHaveBeenCalledTimes(1); - - jest.useRealTimers(); - }); - - it('can retry right hashes but find it after the retry', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - const connectionErrorSpy = jest.fn(() => { - throw new IpfsConnectionError(`Ipfs read request response error: test purpose`); - }); - const incorrectErrorSpy = jest.fn(() => { - throw new Error('Incorrect file test'); - }); - const biggerErrorSpy = jest.fn(() => ({ - content: 'bigger', - ipfsLinks: [], - ipfsSize: 5, - })); - const okSpy = jest.fn(() => ({ - content: 'ok', - ipfsLinks: [], - ipfsSize: 2, - })); - - let tryCount = 0; - ipfsManager.read = jest.fn(async (hash: string): Promise => { - if (hash === 'hConnectionError' && tryCount === 0) { - tryCount++; - return connectionErrorSpy(); - } else if (hash === 'hIncorrectFile') { - return incorrectErrorSpy(); - } else if (hash === 'hBiggerFile') { - return biggerErrorSpy(); - } else { - return okSpy(); - } - }); - - const ethereumEntriesToProcess: StorageTypes.IEthereumEntry[] = [ - { hash: 'hConnectionError', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hIncorrectFile', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hBiggerFile', feesParameters: { contentSize: 3 }, meta: {} as any }, - { hash: 'hOk', feesParameters: { contentSize: 3 }, meta: {} as any }, - ]; - const result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - - expect(result.length).toBe(2); - expect(result[0]!.content).toBe('ok'); - expect(result[0]!.id).toBe('hOk'); - expect(result[1]!.content).toBe('ok'); - expect(result[1]!.id).toBe('hConnectionError'); - - const ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({ - hBiggerFile: { - entry: { - error: { - message: 'Incorrect declared size', - type: StorageTypes.ErrorEntries.WRONG_FEES, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hBiggerFile', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - hIncorrectFile: { - entry: { - error: { - message: 'Incorrect file test', - type: StorageTypes.ErrorEntries.INCORRECT_FILE, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hIncorrectFile', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: false, - }, - }); - - expect(ipfsManager.read).toHaveBeenCalledTimes(5); - expect(connectionErrorSpy).toHaveBeenCalledTimes(1); - expect(incorrectErrorSpy).toHaveBeenCalledTimes(1); - expect(biggerErrorSpy).toHaveBeenCalledTimes(1); - expect(okSpy).toHaveBeenCalledTimes(2); - - jest.useRealTimers(); - }); - - it('can store hash as ignored then remove it', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - ipfsManager.read = jest.fn(() => { - throw new IpfsConnectionError(`Ipfs read request response error: test purpose`); - }); - - const ethereumEntriesToProcess: StorageTypes.IEthereumEntry[] = [ - { hash: 'hConnectionError', feesParameters: { contentSize: 3 }, meta: {} as any }, - ]; - let result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - - expect(result.length).toBe(0); - - let ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({ - hConnectionError: { - entry: { - error: { - message: 'Ipfs read request response error: test purpose', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hConnectionError', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: true, - }, - }); - - expect(ipfsManager.read).toHaveBeenCalledTimes(2); - - // Then we find it: - ipfsManager.read = jest.fn( - async (_hash: string): Promise => ({ - content: 'ok', - ipfsLinks: [], - ipfsSize: 2, - }), - ); - result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - expect(result.length).toBe(1); - expect(result[0]!.content).toBe('ok'); - expect(result[0]!.id).toBe('hConnectionError'); - - ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({}); - - jest.useRealTimers(); - }); - - it('can store hash as ignored it twice', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - - ipfsManager.read = jest.fn(() => { - throw new IpfsConnectionError(`Ipfs read request response error: test purpose`); - }); - - const ethereumEntriesToProcess: StorageTypes.IEthereumEntry[] = [ - { hash: 'hConnectionError', feesParameters: { contentSize: 3 }, meta: {} as any }, - ]; - let result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - expect(result.length).toBe(0); - - let ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({ - hConnectionError: { - entry: { - error: { - message: 'Ipfs read request response error: test purpose', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hConnectionError', - meta: {}, - }, - iteration: 1, - lastTryTimestamp: 0, - toRetry: true, - }, - }); - - expect(ipfsManager.read).toHaveBeenCalledTimes(2); - - jest.advanceTimersByTime(100); - result = await ethereumEntriesToIpfsContent( - ethereumEntriesToProcess, - ipfsManager, - ignoredDataIndex, - new SimpleLogger(), - 5, - ); - expect(result.length).toBe(0); - - ignoredData = await ignoredDataIndex.getDataIdsWithReasons(); - - expect(ignoredData).toEqual({ - hConnectionError: { - entry: { - error: { - message: 'Ipfs read request response error: test purpose', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hConnectionError', - meta: {}, - }, - iteration: 2, - lastTryTimestamp: 100, - toRetry: true, - }, - }); - - jest.useRealTimers(); - }); -}); diff --git a/packages/ethereum-storage/test/ethereum-metadata-cache.test.ts b/packages/ethereum-storage/test/ethereum-metadata-cache.test.ts deleted file mode 100644 index 713818bbe7..0000000000 --- a/packages/ethereum-storage/test/ethereum-metadata-cache.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { StorageTypes } from '@requestnetwork/types'; -import EthereumMetadataCache from '../src/ethereum-metadata-cache'; - -let metadataCache: EthereumMetadataCache; - -const metadataExample1: StorageTypes.IEthereumMetadata = { - blockConfirmation: 3, - blockNumber: 0, - blockTimestamp: 1000, - networkName: 'ropsten', - smartContractAddress: '0xaaa', - transactionHash: '0xbbb', -}; - -const metadataExample2: StorageTypes.IEthereumMetadata = { - blockConfirmation: 5, - blockNumber: 0, - blockTimestamp: 4000, - networkName: 'rinkeby', - smartContractAddress: '0xccc', - transactionHash: '0xddd', -}; - -const metadataExample3: StorageTypes.IEthereumMetadata = { - blockConfirmation: 3, - blockNumber: 0, - blockTimestamp: 300, - networkName: 'kovan', - smartContractAddress: '0xeee', - transactionHash: '0xfff', -}; - -const hashExample1 = '0xabc'; -const hashExample2 = '0xefg'; -const hashExample3 = '0xhij'; - -describe('EthereumMetadataCache', () => { - beforeEach(() => { - jest.restoreAllMocks(); - metadataCache = new EthereumMetadataCache(); - }); - - it('allows to save metadata', async () => { - await expect(metadataCache.metadataCache.get(hashExample1)).resolves.toBeUndefined(); - await expect(metadataCache.metadataCache.get(hashExample2)).resolves.toBeUndefined(); - await expect(metadataCache.metadataCache.get(hashExample3)).resolves.toBeUndefined(); - - await metadataCache.saveDataIdMeta(hashExample1, metadataExample1); - await metadataCache.saveDataIdMeta(hashExample2, metadataExample2); - await metadataCache.saveDataIdMeta(hashExample3, metadataExample3); - - await expect(metadataCache.metadataCache.get(hashExample1)).resolves.toEqual(metadataExample1); - await expect(metadataCache.metadataCache.get(hashExample2)).resolves.toEqual(metadataExample2); - await expect(metadataCache.metadataCache.get(hashExample3)).resolves.toEqual(metadataExample3); - }); - - it('cannot erase metadata of dataId with new metadata', async () => { - await metadataCache.saveDataIdMeta(hashExample1, metadataExample1); - await metadataCache.saveDataIdMeta(hashExample1, metadataExample2); - - await expect(metadataCache.metadataCache.get(hashExample1)).resolves.toEqual(metadataExample1); - }); -}); diff --git a/packages/ethereum-storage/test/ethereum-storage.test.ts b/packages/ethereum-storage/test/ethereum-storage.test.ts deleted file mode 100644 index bcc4ffa622..0000000000 --- a/packages/ethereum-storage/test/ethereum-storage.test.ts +++ /dev/null @@ -1,776 +0,0 @@ -import * as SmartContracts from '@requestnetwork/smart-contracts'; -import { StorageTypes } from '@requestnetwork/types'; -import { getCurrentTimestampInSecond } from '@requestnetwork/utils'; -import { EventEmitter } from 'events'; - -import { EthereumStorage } from '../src/ethereum-storage'; -import { IpfsStorage } from '../src/ipfs-storage'; -import IpfsConnectionError from '../src/ipfs-connection-error'; - -/* eslint-disable no-magic-numbers */ - -// eslint-disable-next-line @typescript-eslint/no-var-requires -const web3HttpProvider = require('web3-providers-http'); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const web3Utils = require('web3-utils'); - -const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 1000, -}; - -const provider = new web3HttpProvider('http://localhost:8545'); -const web3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: provider, -}; - -const invalidHostNetworkProvider = new web3HttpProvider('http://nonexistentnetwork:8545'); -const invalidHostWeb3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: invalidHostNetworkProvider, -}; - -// eslint-disable-next-line @typescript-eslint/no-var-requires -const web3Eth = require('web3-eth'); -const eth = new web3Eth(provider); - -const contractHashSubmitter = new eth.Contract( - SmartContracts.requestHashSubmitterArtifact.getContractAbi(), - SmartContracts.requestHashSubmitterArtifact.getAddress('private'), -); -const addressRequestHashSubmitter = contractHashSubmitter._address; - -const content1 = 'this is a little test !'; -const hash1 = 'QmNXA5DyFZkdf4XkUT81nmJSo3nS2bL25x7YepxeoDa6tY'; -const realSize1 = 29; -const realSize1Bytes32Hex = web3Utils.padLeft(web3Utils.toHex(realSize1), 64); -const fakeSize1 = 50; -const fakeSize1Bytes32Hex = web3Utils.padLeft(web3Utils.toHex(fakeSize1), 64); - -const content2 = 'content\nwith\nspecial\ncharacters\n'; -const hash2 = 'QmQj8fQ9T16Ddrxfij5eyRnxXKTVxRXyQuazYnezt9iZpy'; -const realSize2 = 38; -const realSize2Bytes32Hex = web3Utils.padLeft(web3Utils.toHex(realSize2), 64); -const fakeSize2 = 0; -const fakeSize2Bytes32Hex = web3Utils.padLeft(web3Utils.toHex(fakeSize2), 64); - -// Define a mock for getPastEvents to be independent of the state of ganache instance -const pastEventsMock = [ - { - blockNumber: 1, - event: 'NewHash', - returnValues: { - feesParameters: realSize1Bytes32Hex, - hash: hash1, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xa', - }, - { - blockNumber: 1, - event: 'NewHash', - returnValues: { - feesParameters: fakeSize1Bytes32Hex, - hash: hash1, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xa', - }, - { - blockNumber: 2, - event: 'NewHash', - returnValues: { - feesParameters: realSize2Bytes32Hex, - hash: hash2, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xb', - }, - { - blockNumber: 3, - event: 'NewHash', - returnValues: { - feesParameters: fakeSize2Bytes32Hex, - hash: hash2, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xc', - }, - { - blockNumber: 3, - event: 'NewHash', - returnValues: { - feesParameters: fakeSize2Bytes32Hex, - hash: 'notAHash', - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xc', - }, -]; -/* eslint-disable */ -const getPastEventsMock = () => pastEventsMock; -const ipfsStorage = new IpfsStorage({ ipfsGatewayConnection }); - -describe('EthereumStorage', () => { - beforeEach(() => { - jest.resetAllMocks(); - }); - - describe('initialize', () => { - it('cannot use functions when not initialized', async () => { - const ethereumStorageNotInitialized = new EthereumStorage( - 'localhost', - ipfsStorage, - web3Connection, - ); - await expect(ethereumStorageNotInitialized.getData()).rejects.toThrowError( - 'Ethereum storage must be initialized', - ); - await expect(ethereumStorageNotInitialized.append('')).rejects.toThrowError( - 'Ethereum storage must be initialized', - ); - await expect(ethereumStorageNotInitialized.read('')).rejects.toThrowError( - 'Ethereum storage must be initialized', - ); - }); - - it('cannot initialize if ethereum node not reachable', async () => { - const ethereumStorageNotInitialized = new EthereumStorage( - 'localhost', - ipfsStorage, - invalidHostWeb3Connection, - ); - await expect(ethereumStorageNotInitialized.initialize()).rejects.toThrowError( - 'Ethereum node is not accessible: Error: Error when trying to reach Web3 provider: Error: Invalid JSON RPC response: ""', - ); - }); - - it('cannot initialize if ethereum node not listening', async () => { - const ethereumStorageNotInitialized = new EthereumStorage( - 'localhost', - ipfsStorage, - web3Connection, - ); - - ethereumStorageNotInitialized.smartContractManager.eth.net.isListening = async () => false; - - await expect(ethereumStorageNotInitialized.initialize()).rejects.toThrowError( - 'Ethereum node is not accessible: Error: The Web3 provider is not listening', - ); - }); - - it('cannot initialize if contracts are not deployed', async () => { - const ethereumStorageNotInitialized = new EthereumStorage( - 'localhost', - ipfsStorage, - web3Connection, - ); - - const invalidHashStorageAddress = '0x0000000000000000000000000000000000000000'; - const invalidHashSubmitterAddress = '0x0000000000000000000000000000000000000000'; - - // Initialize smart contract instance - ethereumStorageNotInitialized.smartContractManager.requestHashStorage = new eth.Contract( - SmartContracts.requestHashStorageArtifact.getContractAbi(), - invalidHashStorageAddress, - ); - ethereumStorageNotInitialized.smartContractManager.requestHashSubmitter = new eth.Contract( - SmartContracts.requestHashSubmitterArtifact.getContractAbi(), - invalidHashSubmitterAddress, - ); - - await expect(ethereumStorageNotInitialized.initialize()).rejects.toThrowError( - 'Contracts are not deployed or not well configured:', - ); - }); - }); - - describe('append/read/getData', () => { - let ethereumStorage: EthereumStorage; - beforeEach(async () => { - const ipfsStorage = new IpfsStorage({ ipfsGatewayConnection }); - - ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - await ethereumStorage.initialize(); - - ethereumStorage.smartContractManager.requestHashStorage.getPastEvents = getPastEventsMock; - ethereumStorage.smartContractManager.addHashAndSizeToEthereum = - async (): Promise => { - return { - blockConfirmation: 10, - blockNumber: 10, - blockTimestamp: 1545816416, - cost: '110', - fee: '100', - gasFee: '10', - networkName: 'private', - smartContractAddress: '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - transactionHash: '0x7c45c575a54893dc8dc7230e3044e1de5c8714cd0a1374cf3a66378c639627a3', - }; - }; - }); - - it('cannot be initialized twice', async () => { - await expect(ethereumStorage.initialize()).rejects.toThrowError('already initialized'); - }); - - it('allows to append a file', async () => { - jest.useFakeTimers('modern'); - jest.setSystemTime(0); - const timestamp = getCurrentTimestampInSecond(); - const result = await ethereumStorage.append(content1); - - const resultExpected: StorageTypes.IAppendResult = Object.assign(new EventEmitter(), { - content: content1, - id: hash1, - meta: { - ipfs: { - size: realSize1, - }, - local: { location: 'localhost' }, - state: StorageTypes.ContentState.PENDING, - storageType: StorageTypes.StorageSystemType.LOCAL, - timestamp, - }, - }); - expect(result).toMatchObject(resultExpected); - jest.useRealTimers(); - }); - - it('throws when append and addHashAndSizeToEthereum throws', (done) => { - ethereumStorage.smartContractManager.addHashAndSizeToEthereum = - async (): Promise => { - throw Error('fake error'); - }; - - expect.assertions(1); - // eslint-disable-next-line @typescript-eslint/no-floating-promises - ethereumStorage.append(content1).then((result) => { - result - .on('confirmed', () => { - fail('addHashAndSizeToEthereum must have thrown'); - }) - .on('error', (error) => { - expect(error.message).toEqual('fake error'); - done(); - }); - }); - }); - - it(`allows to save dataId's Ethereum metadata into the metadata cache when append is called`, async () => { - await expect( - ethereumStorage.ethereumMetadataCache.metadataCache.get(hash1), - ).resolves.toBeUndefined(); - - const result = await ethereumStorage.append(content1); - await expect(ethereumStorage.ethereumMetadataCache.metadataCache.get(hash1)).resolves.toEqual( - result.meta.ethereum, - ); - }); - - it(`prevents already saved dataId's Ethereum metadata to be erased in the metadata cache when append is called`, async () => { - await expect( - ethereumStorage.ethereumMetadataCache.metadataCache.get(hash1), - ).resolves.toBeUndefined(); - - const result1 = await ethereumStorage.append(content1); - - // Ethereum metadata is determined by the return data of addHashAndSizeToEthereum - // We change the return data of this function to ensure the second call of append contain different metadata - ethereumStorage.smartContractManager.addHashAndSizeToEthereum = - async (): Promise => { - return { - blockConfirmation: 20, - blockNumber: 11, - blockTimestamp: 1545816416, - cost: '110', - fee: '1', - gasFee: '100', - networkName: 'private', - smartContractAddress: '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - transactionHash: '0x7c45c575a54893dc8dc7230e3044e1de5c8714cd0a1374cf3a66378c639627a3', - }; - }; - - const result2 = await ethereumStorage.append(content1); - - result1.on('confirmed', (resultConfirmed1) => { - result2.on('confirmed', async (resultConfirmed2) => { - expect(resultConfirmed1).not.toMatchObject(resultConfirmed2); - await expect( - ethereumStorage.ethereumMetadataCache.metadataCache.get(hash1), - ).resolves.toEqual(resultConfirmed1.meta.ethereum); - }); - }); - }); - - it('allows to read a file', async () => { - const appendResult = await ethereumStorage.append(content1); - const confirmation = new Promise((r) => appendResult.on('confirmed', r)); - - const resultBeforeConfirmation = await ethereumStorage.read(hash1); - expect(resultBeforeConfirmation.meta.ethereum).not.toBeDefined(); - expect(resultBeforeConfirmation.meta.state).toBe(StorageTypes.ContentState.PENDING); - - await confirmation; - const result = await ethereumStorage.read(hash1); - - expect(result.meta.ethereum).toBeDefined(); - expect(result.meta.state).toBe(StorageTypes.ContentState.CONFIRMED); - - expect(result.content).toBe(content1); - expect(result.meta.ipfs).toMatchObject({ size: realSize1 }); - - expect(result.meta.ethereum?.blockNumber).toEqual(10); - expect(result.meta.ethereum?.networkName).toEqual('private'); - expect(result.meta.ethereum?.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(result.meta.ethereum?.blockConfirmation).toBeGreaterThan(1); - expect(result.meta.ethereum?.blockTimestamp).toBeDefined(); - }); - - it('cannot append if ipfs read fail', async () => { - jest.spyOn((ethereumStorage as any).ipfsStorage, 'read').mockImplementation(() => { - throw Error('expected error'); - }); - await ethereumStorage.append(content1); - await expect(ethereumStorage.read(hash1)).rejects.toThrowError(`expected error`); - }); - - it('allows to retrieve all data id (even if pin fail)', async () => { - // These contents have to be appended in order to check their size - await ethereumStorage.append(content1); - await ethereumStorage.append(content2); - const { entries } = await ethereumStorage.getData(); - - if (!entries[0].meta.ethereum) { - fail('entries[0].meta.ethereum does not exist'); - return; - } - expect(entries[0].meta.ipfs).toMatchObject({ - size: realSize1, - }); - expect(entries[0].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[0].meta.ethereum.networkName).toEqual('private'); - expect(entries[0].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[0].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[0].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[0].meta.ethereum.blockTimestamp).toBeDefined(); - - if (!entries[1].meta.ethereum) { - fail('entries[1].meta.ethereum does not exist'); - return; - } - expect(entries[1].meta.ipfs).toMatchObject({ - size: realSize1, - }); - expect(entries[1].meta.ethereum.blockNumber).toEqual(pastEventsMock[1].blockNumber); - expect(entries[1].meta.ethereum.networkName).toEqual('private'); - expect(entries[1].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[1].meta.ethereum.blockNumber).toEqual(pastEventsMock[1].blockNumber); - expect(entries[1].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[1].meta.ethereum.blockTimestamp).toBeDefined(); - - if (!entries[2].meta.ethereum) { - fail('entries[2].meta.ethereum does not exist'); - return; - } - - expect(entries[2].meta.ipfs).toMatchObject({ - size: realSize2, - }); - expect(entries[2].meta.ethereum.blockNumber).toEqual(pastEventsMock[2].blockNumber); - expect(entries[2].meta.ethereum.networkName).toEqual('private'); - expect(entries[2].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[2].meta.ethereum.blockNumber).toEqual(pastEventsMock[2].blockNumber); - expect(entries[2].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[2].meta.ethereum.blockTimestamp).toBeDefined(); - - expect(entries.map(({ id }) => id)).toMatchObject([hash1, hash1, hash2]); - }); - - it('allows to retrieve all data', async () => { - // For this test, we don't want to use the ethereum metadata cache - // We want to force the retrieval of metadata with getPastEvents function - ethereumStorage.ethereumMetadataCache.saveDataIdMeta = async (_dataId, _meta) => {}; - - // These contents have to be appended in order to check their size - await ethereumStorage.append(content1); - await ethereumStorage.append(content2); - const { entries } = await ethereumStorage.getData(); - - if (!entries[0].meta.ethereum) { - fail('entries[0].meta.ethereum does not exist'); - return; - } - expect(entries[0].meta.ipfs).toMatchObject({ - size: realSize1, - }); - expect(entries[0].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[0].meta.ethereum.networkName).toEqual('private'); - expect(entries[0].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[0].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[0].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[0].meta.ethereum.blockTimestamp).toBeDefined(); - - if (!entries[1].meta.ethereum) { - fail('entries[1].meta.ethereum does not exist'); - return; - } - expect(entries[1].meta.ipfs).toMatchObject({ - size: realSize1, - }); - expect(entries[1].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[1].meta.ethereum.networkName).toEqual('private'); - expect(entries[1].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[1].meta.ethereum.blockNumber).toEqual(pastEventsMock[0].blockNumber); - expect(entries[1].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[1].meta.ethereum.blockTimestamp).toBeDefined(); - - if (!entries[2].meta.ethereum) { - fail('entries[2].meta.ethereum does not exist'); - return; - } - expect(entries[2].meta.ipfs).toMatchObject({ - size: realSize2, - }); - expect(entries[2].meta.ethereum.blockNumber).toEqual(pastEventsMock[2].blockNumber); - expect(entries[2].meta.ethereum.networkName).toEqual('private'); - expect(entries[2].meta.ethereum.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(entries[2].meta.ethereum.blockNumber).toEqual(pastEventsMock[2].blockNumber); - expect(entries[2].meta.ethereum.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(entries[2].meta.ethereum.blockTimestamp).toBeDefined(); - - expect(entries.map(({ content }) => content)).toMatchObject([content1, content1, content2]); - expect(entries.map(({ id }) => id)).toMatchObject([hash1, hash1, hash2]); - }); - - it('doest get meta data if the fees are too low', async () => { - // For this test, we don't want to use the ethereum metadata cache - // We want to force the retrieval of metadata with getPastEvents function - ethereumStorage.ethereumMetadataCache.saveDataIdMeta = async (_dataId, _meta) => {}; - ethereumStorage.smartContractManager.getEntriesFromEthereum = - async (): Promise => { - return { - ethereumEntries: [ - { - feesParameters: { contentSize: 1 }, - hash: hash1, - meta: { - blockConfirmation: 1561192254600, - blockNumber: 1, - blockTimestamp: 1561191682, - networkName: 'private', - smartContractAddress: '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - transactionHash: '0xa', - }, - }, - ], - lastTimestamp: 0, - }; - }; - - const result = await ethereumStorage.getData(); - expect(result.entries.length).toBe(0); - }); - - it('append and read with no parameter should throw an error', async () => { - await expect(ethereumStorage.append('')).rejects.toThrowError('No content provided'); - await expect(ethereumStorage.read('')).rejects.toThrowError('No id provided'); - }); - - it('append content with an invalid web3 connection should throw an error', async () => { - await expect( - ethereumStorage.updateEthereumNetwork(invalidHostWeb3Connection), - ).rejects.toThrowError( - 'Ethereum node is not accessible: Error: Error when trying to reach Web3 provider: Error: Invalid JSON RPC response: ""', - ); - }); - - it('getData should throw an error when data from getEntriesFromEthereum are incorrect', async () => { - // Mock getEntriesFromEthereum of smartContractManager to return unexpected promise value - ethereumStorage.smartContractManager.getEntriesFromEthereum = (): Promise => { - return Promise.resolve({ - ethereumEntries: [ - { - feesParameters: { contentSize: 10 }, - meta: {} as StorageTypes.IEthereumMetadata, - } as StorageTypes.IEthereumEntry, - ], - lastTimestamp: 0, - }); - }; - await expect(ethereumStorage.getData()).rejects.toThrowError( - 'The event log has no hash or feesParameters', - ); - - // Test with no meta - ethereumStorage.smartContractManager.getEntriesFromEthereum = - (): Promise => { - return Promise.resolve({ - ethereumEntries: [ - { - feesParameters: { contentSize: 10 }, - hash: '0xad', - } as StorageTypes.IEthereumEntry, - ], - lastTimestamp: 0, - }); - }; - - await expect(ethereumStorage.getData()).rejects.toThrowError('The event log has no metadata'); - }); - - it('allows to read several files', async () => { - const content = [content1, content2]; - const realSizes = [realSize1, realSize2]; - - const r1 = await ethereumStorage.append(content1); - await new Promise((r) => r1.on('confirmed', r)); - const r2 = await ethereumStorage.append(content2); - await new Promise((r) => r2.on('confirmed', r)); - const results = await ethereumStorage.readMany([hash1, hash2]); - - results.forEach((result, index) => { - expect(result.meta.ethereum).toBeDefined(); - expect(result.content).toBe(content[index]); - expect(result.meta.ipfs).toMatchObject({ - size: realSizes[index], - }); - - expect(result.meta.ethereum?.blockNumber).toEqual(10); - expect(result.meta.ethereum?.networkName).toEqual('private'); - expect(result.meta.ethereum?.smartContractAddress).toEqual( - '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - ); - expect(result.meta.ethereum?.blockConfirmation).toBeGreaterThanOrEqual(1); - expect(result.meta.ethereum?.blockTimestamp).toBeDefined(); - }); - }); - - it('allows to read hash on IPFS with retries', async () => { - // Mock to test IPFS read retry - ethereumStorage.smartContractManager.getEntriesFromEthereum = - (): Promise => { - return Promise.resolve({ - ethereumEntries: [ - { - feesParameters: { contentSize: 10 }, - hash: '0x0', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x1', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x2', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x3', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x4', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x5', - meta: {}, - } as StorageTypes.IEthereumEntry, - { - feesParameters: { contentSize: 10 }, - hash: '0x6', - meta: {}, - } as StorageTypes.IEthereumEntry, - ], - lastTimestamp: 0, - }); - }; - - // Store how many time we tried to read a specific hash - const hashTryCount: any = {}; - - // This mock simulates ipfsManager.read() when we try to read the hash on IPFS differente times - jest - .spyOn((ethereumStorage as any).ipfsStorage, 'read') - .mockImplementation(async (hash: any) => { - hashTryCount[hash] ? hashTryCount[hash]++ : (hashTryCount[hash] = 1); - - switch (hash) { - case '0x0': - throw new Error(`File size (1) exceeds maximum file size of 0`); - case '0x1': - throw new Error('Ipfs object get request response cannot be parsed into JSON format'); - case '0x2': - throw new Error('Ipfs object get failed'); - case '0x3': - return { - content: '0000', - ipfsSize: 20, - } as StorageTypes.IIpfsObject; - case '0x4': - if (hashTryCount[hash] < 2) { - throw new IpfsConnectionError('Timeout'); - } - - return { - content: '0000', - ipfsSize: 10, - } as StorageTypes.IIpfsObject; - case '0x5': - if (hashTryCount[hash] < 3) { - throw new IpfsConnectionError('Timeout'); - } - - return { - content: '0000', - ipfsSize: 10, - } as StorageTypes.IIpfsObject; - case '0x6': - if (hashTryCount[hash] < 10) { - throw new IpfsConnectionError('Timeout'); - } - - return { - content: '0000', - ipfsSize: 10, - } as StorageTypes.IIpfsObject; - default: - fail(`ipfsManager.read() unrocognized hash: ${hash}`); - } - throw Error('expected error'); - }); - - await ethereumStorage.getData(); - - // Check how many time we tried to get hashes - expect(hashTryCount).toMatchObject({ - '0x0': 1, - '0x1': 1, - '0x2': 1, - '0x3': 1, - '0x4': 2, - '0x5': 2, - '0x6': 2, - }); - }); - it('getData returns an empty array if no hash was found', async () => { - ethereumStorage.smartContractManager.requestHashStorage.getPastEvents = () => []; - const result = await ethereumStorage.getData({ from: 10000, to: 10001 }); - expect(result.entries).toMatchObject([]); - expect(typeof result.lastTimestamp).toBe('number'); - }); - }); - - describe('getIgnoredData', () => { - it('cannot get ignored data if not initialized', async () => { - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - await expect(ethereumStorage.getIgnoredData()).rejects.toThrowError( - 'Ethereum storage must be initialized', - ); - }); - it('can get ignored data', async () => { - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - await ethereumStorage.initialize(); - - ethereumStorage.ignoredDataIds.getDataIdsToRetry = async (): Promise< - StorageTypes.IEthereumEntry[] - > => [ - { - error: { - message: 'Ipfs read request response error: test purpose', - type: StorageTypes.ErrorEntries.IPFS_CONNECTION_ERROR, - }, - feesParameters: { - contentSize: 3, - }, - hash: 'hConnectionError', - meta: { blockTimestamp: 123 } as any, - }, - ]; - - jest.spyOn((ethereumStorage as any).ipfsStorage, 'read').mockImplementation( - async (): Promise => ({ - content: 'ok', - ipfsLinks: [], - ipfsSize: 2, - }), - ); - - const entries = await ethereumStorage.getIgnoredData(); - expect(entries.length).toBe(1); - expect(entries[0]).toEqual({ - content: 'ok', - id: 'hConnectionError', - meta: { - ethereum: { - blockTimestamp: 123, - }, - ipfs: { - size: 2, - }, - state: 'confirmed', - storageType: 'ethereumIpfs', - timestamp: 123, - }, - }); - }); - it('can get ignored data even if empty', async () => { - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - await ethereumStorage.initialize(); - - const entries = await ethereumStorage.getIgnoredData(); - // 'config wrong' - expect(entries.length).toBe(0); - }); - }); - - describe('_getStatus()', () => { - it('can get status', async () => { - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - await ethereumStorage.initialize(); - await ethereumStorage.append(content1); - await ethereumStorage.getData(); - - const status = await ethereumStorage._getStatus(); - expect(status.dataIds.count).toBeGreaterThanOrEqual(0); - expect(status.ignoredDataIds.count).toBeGreaterThanOrEqual(0); - expect(status.ethereum).toEqual({ - creationBlockNumberHashStorage: 0, - currentProvider: 'http://localhost:8545', - hashStorageAddress: '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - hashSubmitterAddress: '0xf25186b5081ff5ce73482ad761db0eb0d25abfbf', - maxConcurrency: 5, - maxRetries: undefined, - networkName: 'private', - retryDelay: undefined, - }); - expect(status.ipfs).toBeDefined(); - }, 10000); - }); -}); diff --git a/packages/ethereum-storage/test/ethereum-tx-submitter.test.ts b/packages/ethereum-storage/test/ethereum-tx-submitter.test.ts new file mode 100644 index 0000000000..3ffdaec240 --- /dev/null +++ b/packages/ethereum-storage/test/ethereum-tx-submitter.test.ts @@ -0,0 +1,26 @@ +import { JsonRpcProvider } from '@ethersproject/providers'; +import { BigNumber, Wallet } from 'ethers'; +import { EthereumTransactionSubmitter } from '../src'; + +const mnemonic = 'candy maple cake sugar pudding cream honey rich smooth crumble sweet treat'; +const signer = Wallet.fromMnemonic(mnemonic).connect(new JsonRpcProvider('http://localhost:8545')); + +describe(EthereumTransactionSubmitter, () => { + const txSubmitter = new EthereumTransactionSubmitter({ network: 'private', signer }); + + it('can initialize', async () => { + await txSubmitter.initialize(); + }); + + it('can prepareSubmit', async () => { + expect(await txSubmitter.prepareSubmit('hash', 1)).toMatchObject({ + to: '0xf25186b5081ff5ce73482ad761db0eb0d25abfbf', + data: /^0x.+/, + value: BigNumber.from(0), + }); + }); + it('can submit', async () => { + const tx = await txSubmitter.submit('hash', 1); + expect(tx.hash).toMatch(/^0x.+/); + }); +}); diff --git a/packages/ethereum-storage/test/ipfs-manager.test.ts b/packages/ethereum-storage/test/ipfs-manager.test.ts index 565524debc..3b02a03e17 100644 --- a/packages/ethereum-storage/test/ipfs-manager.test.ts +++ b/packages/ethereum-storage/test/ipfs-manager.test.ts @@ -3,20 +3,6 @@ import IpfsManager from '../src/ipfs-manager'; import { AxiosInstance } from 'axios'; import MockAdapter from 'axios-mock-adapter'; -const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 1000, -}; - -const invalidHostIpfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'nonexistent', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 1500, -}; - const testErrorHandling: StorageTypes.IIpfsErrorHandlingConfiguration = { delayBetweenRetries: 0, maxRetries: 0, @@ -46,7 +32,7 @@ const contentLengthOnIpfs2 = 38; describe('Ipfs manager', () => { beforeEach(() => { ipfsManager = new IpfsManager({ - ipfsGatewayConnection, + ipfsTimeout: 1000, ipfsErrorHandling: testErrorHandling, }); }); @@ -117,7 +103,8 @@ describe('Ipfs manager', () => { it('operations with a invalid host network should throw ENOTFOUND errors', async () => { ipfsManager = new IpfsManager({ - ipfsGatewayConnection: invalidHostIpfsGatewayConnection, + ipfsUrl: 'http://nonexistent:5001', + ipfsTimeout: 1500, ipfsErrorHandling: testErrorHandling, }); await expect(ipfsManager.getIpfsNodeId()).rejects.toThrowError('getaddrinfo ENOTFOUND'); @@ -139,7 +126,6 @@ describe('Ipfs manager', () => { it('should retry on error', async () => { ipfsManager = new IpfsManager({ - ipfsGatewayConnection, ipfsErrorHandling: retryTestErrorHandling, }); const axiosInstance: AxiosInstance = (ipfsManager as any).axiosInstance; @@ -151,7 +137,7 @@ describe('Ipfs manager', () => { it('timeout errors should generate retry', async () => { ipfsManager = new IpfsManager({ - ipfsGatewayConnection: { ...ipfsGatewayConnection, timeout: 1 }, + ipfsTimeout: 1, ipfsErrorHandling: retryTestErrorHandling, }); const axiosInstance: AxiosInstance = (ipfsManager as any).axiosInstance; diff --git a/packages/ethereum-storage/test/ipfs-storage.test.ts b/packages/ethereum-storage/test/ipfs-storage.test.ts index 9fe96a94ae..f125c1824f 100644 --- a/packages/ethereum-storage/test/ipfs-storage.test.ts +++ b/packages/ethereum-storage/test/ipfs-storage.test.ts @@ -1,40 +1,22 @@ -import { StorageTypes } from '@requestnetwork/types'; import { IpfsStorage } from '../src/ipfs-storage'; -const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 1000, -}; - -const invalidHostIpfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'nonexistent', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 10000, -}; - -const hash1 = 'QmNXA5DyFZkdf4XkUT81nmJSo3nS2bL25x7YepxeoDa6tY'; - describe('IPFS Storage', () => { let ipfsStorage: IpfsStorage; beforeEach(() => { jest.resetAllMocks(); - ipfsStorage = new IpfsStorage({ ipfsGatewayConnection }); + ipfsStorage = new IpfsStorage({ ipfsTimeout: 1000 }); }); it('cannot initialize if ipfs node not reachable', async () => { const ipfsStorage = new IpfsStorage({ - ipfsGatewayConnection: invalidHostIpfsGatewayConnection, + ipfsUrl: 'http://nonexistent:5001', + ipfsTimeout: 1500, + ipfsErrorHandling: { maxRetries: 1, delayBetweenRetries: 0 }, // speedup test }); await expect(ipfsStorage.initialize()).rejects.toThrowError( 'IPFS node is not accessible or corrupted: Error: getaddrinfo ENOTFOUND nonexistent', ); }); it('cannot initialize if ipfs node not in the right network', async () => { - const ipfsStorage = new IpfsStorage({ - ipfsGatewayConnection, - }); jest .spyOn((ipfsStorage as any).ipfsManager, 'getBootstrapList') .mockImplementation(async () => ['not findable node']); @@ -64,41 +46,6 @@ describe('IPFS Storage', () => { ); }); - it('allows to IPFS pin a list of hashes', async () => { - const spy = jest - .spyOn((ipfsStorage as any).ipfsManager, 'pin') - .mockReturnValue(Promise.resolve([''])); - - const pinConfig = { - delayBetweenCalls: 0, - maxSize: 100, - timeout: 1000, - }; - - let hashes = new Array(100).fill(hash1); - - await ipfsStorage.pinDataToIPFS(hashes, pinConfig); - - expect(spy).toHaveBeenCalledTimes(1); - - hashes = new Array(200).fill(hash1); - await ipfsStorage.pinDataToIPFS(hashes, pinConfig); - expect(spy).toHaveBeenCalledTimes(3); - }); - - it('pinning errors are non blocking', async () => { - const warnLogMock = jest.spyOn((ipfsStorage as any).logger, 'warn'); - jest.spyOn((ipfsStorage as any).ipfsManager, 'pin').mockImplementation(() => { - throw new Error('expected error'); - }); - - await ipfsStorage.pinDataToIPFS([hash1]); - expect(warnLogMock).toHaveBeenCalledWith( - 'Failed pinning some hashes the IPFS node: Error: expected error', - ['ipfs'], - ); - }); - describe('compareBootstrapNodes', () => { describe.each(['ipfs', 'p2p'])('It supports the %s path', (path) => { it('Returns true for same list', () => { diff --git a/packages/ethereum-storage/test/smartcontract-manager.test.ts b/packages/ethereum-storage/test/smartcontract-manager.test.ts deleted file mode 100644 index 597a00c1fd..0000000000 --- a/packages/ethereum-storage/test/smartcontract-manager.test.ts +++ /dev/null @@ -1,596 +0,0 @@ -import * as SmartContracts from '@requestnetwork/smart-contracts'; -import { CurrencyTypes, StorageTypes } from '@requestnetwork/types'; -import EthereumBlocks from '../src/ethereum-blocks'; -import SmartContractManager from '../src/smart-contract-manager'; - -/* eslint-disable no-magic-numbers */ - -const web3HttpProvider = require('web3-providers-http'); - -const provider = new web3HttpProvider('http://localhost:8545'); -const web3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: provider, -}; - -// Contract instance necessary to get event logs -const web3Utils = require('web3-utils'); -const web3Eth = require('web3-eth'); -const eth = new web3Eth(provider); - -const { time } = require('@openzeppelin/test-helpers'); - -const invalidHostProvider = new web3HttpProvider('http://nonexistent:8545'); -const invalidHostWeb3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: invalidHostProvider, -}; - -const invalidProvider = 'invalidProvider'; -const invalidWeb3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - timeout: 1000, - web3Provider: invalidProvider, -}; - -const invalidNetwork = 999999; -const invalidNetworkWeb3Connection: StorageTypes.IWeb3Connection = { - networkId: invalidNetwork, - timeout: 1000, - web3Provider: provider, -}; - -let smartContractManager: SmartContractManager; - -const hashStr = 'QmNXA5DyFZkdf4XkUT81nmJSo3nS2bL25x7YepxeoDa6tY'; -const realSize = 29; -const realSizeBytes32Hex = web3Utils.padLeft(web3Utils.toHex(realSize), 64); -const fakeSize = 50; -const fakeSizeBytes32Hex = web3Utils.padLeft(web3Utils.toHex(fakeSize), 64); -const otherContent = - 'This is not a hash but but we should be able to add any content into Ethereum, the gas cost for the transaction will be higher'; -const otherSize = 100000; -const otherSizeBytes32Hex = web3Utils.padLeft(web3Utils.toHex(otherSize), 64); - -const contractHashStorage = new eth.Contract( - SmartContracts.requestHashStorageArtifact.getContractAbi(), - SmartContracts.requestHashStorageArtifact.getAddress('private'), -); - -const contractHashSubmitter = new eth.Contract( - SmartContracts.requestHashSubmitterArtifact.getContractAbi(), - SmartContracts.requestHashSubmitterArtifact.getAddress('private'), -); -const addressRequestHashSubmitter = contractHashSubmitter._address; - -// Define a mock for getPastEvents to be independant of the state of ganache instance -const pastEventsMock = [ - { - blockNumber: 0, - event: 'NewHash', - returnValues: { - feesParameters: realSizeBytes32Hex, - hash: hashStr, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xa', - }, - // This event has an invalid size but it should not be ignored in smart contract manager - { - blockNumber: 4, - event: 'NewHash', - returnValues: { - feesParameters: fakeSizeBytes32Hex, - hash: hashStr, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xb', - }, - // We can add any data into the storage - { - blockNumber: 6, - event: 'NewHash', - returnValues: { - feesParameters: otherSizeBytes32Hex, - hash: otherContent, - hashSubmitter: addressRequestHashSubmitter, - }, - transactionHash: '0xc', - }, - // We can add any data into the storage - { - blockNumber: 9, - event: 'NewHash', - returnValues: { - feesParameters: otherSizeBytes32Hex, - hash: otherContent, - hashSubmitter: addressRequestHashSubmitter, - }, - }, -]; - -// Return past event from pastEventsMock from fromBlock -const getPastEventsMock = async (info: { - event: string; - fromBlock: number; - toBlock: number | string; -}): Promise => { - const toBlock = info.toBlock === 'latest' ? Infinity : info.toBlock; - - return pastEventsMock.filter( - (block) => block.blockNumber >= info.fromBlock && block.blockNumber <= toBlock, - ); -}; - -// Mock to test case whare events are badly formatted -const badEventsMock = [ - { - blockNumber: 2, - event: 'NewHash', - returnValues: { - feesParameters: fakeSizeBytes32Hex, - }, - transactionHash: '0xb', - }, -]; -// eslint-disable-next-line -const getBadEventsMock = async () => badEventsMock; - -// This getPastEvents mock simulates cases where there could be more than 1000 results for a block interval -// txPerBlock describes number of result for each block between 0 and 9 -// If the block interval provided by info contains more than 1000 results -// 'query returned more than 1000 results' is thrown -const noMoreThan1000ResultsGetPastEventsMock = async ( - txPerBlock: number[], - info: { - event: string; - fromBlock: number; - toBlock: number; - }, -): Promise => { - // Compute the total number of result - let totalResult = 0; - for (let i = info.fromBlock; i < info.toBlock; i++) { - totalResult += txPerBlock[i]; - } - - // Return the concerned error if total number of result is more than 1000 - if (totalResult > 1000) { - // We use totalResult for the message because we should handle any - // "query returned more than XXX results" error message - throw Error(`query returned more than ${totalResult} results`); - } - - // Same return as previous mock - return pastEventsMock.filter( - (block) => block.blockNumber >= info.fromBlock && block.blockNumber <= info.toBlock, - ); -}; - -// Configuration where the recursive call of getPastEvents never happens -const txPerBlockConfiguration1 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]; - -// Configuration where the recursive call of getPastEvents happens for each range of more than 1 block -const txPerBlockConfiguration2 = [999, 999, 999, 999, 999, 999, 999, 999, 999, 999]; - -// Various configurations -const txPerBlockConfiguration3 = [999, 999, 999, 999, 999, 1, 1, 1, 1, 1]; -const txPerBlockConfiguration4 = [1, 1, 1, 1, 1, 999, 999, 999, 999, 999]; -const txPerBlockConfiguration5 = [100, 200, 150, 400, 1, 670, 300, 140, 20, 600]; - -/* eslint-disable no-magic-numbers */ -describe('SmartContractManager', () => { - beforeEach(() => { - smartContractManager = new SmartContractManager(web3Connection); - smartContractManager.requestHashStorage.getPastEvents = getPastEventsMock; - smartContractManager.ethereumBlocks.retryDelay = 0; - smartContractManager.ethereumBlocks.maxRetries = 0; - }); - - it('can get config', async () => { - // 'config wrong' - expect(smartContractManager.getConfig()).toEqual({ - creationBlockNumberHashStorage: 0, - currentProvider: 'http://localhost:8545', - hashStorageAddress: '0x345ca3e014aaf5dca488057592ee47305d9b3e10', - hashSubmitterAddress: '0xf25186b5081ff5ce73482ad761db0eb0d25abfbf', - maxConcurrency: Number.MAX_SAFE_INTEGER, - maxRetries: undefined, - networkName: 'private', - retryDelay: undefined, - }); - }); - - it('getMainAccount should return the main account', async () => { - const accounts = await eth.getAccounts(); - const mainAccount = await smartContractManager.getMainAccount(); - - expect(mainAccount).toEqual(accounts[0]); - }); - - it('allows to add hashes to contractHashStorage', async () => { - await smartContractManager.addHashAndSizeToEthereum(hashStr, { contentSize: realSize }); - - // Reading last event log - const events = await contractHashStorage.getPastEvents({ - event: 'NewHash', - toBlock: 'latest', - }); - - // Only one event is parsed - expect(events.length).toEqual(1); - - expect(events[0].returnValues.hash).toEqual(hashStr); - expect(events[0].returnValues.hashSubmitter).toEqual(addressRequestHashSubmitter); - expect(events[0].returnValues.feesParameters).toEqual(realSizeBytes32Hex); - }); - - // TODO since the migration to jest, this test fails. - it.skip('allows to add other content than hash to contractHashStorage', async () => { - await smartContractManager.addHashAndSizeToEthereum(otherContent, { contentSize: otherSize }); - // Reading last event log - const events = await contractHashStorage.getPastEvents({ - event: 'NewHash', - toBlock: 'latest', - }); - - // Only one event is parsed - expect(events.length).toEqual(1); - - expect(events[0].returnValues.hash).toEqual(otherContent); - expect(events[0].returnValues.hashSubmitter).toEqual(addressRequestHashSubmitter); - expect(events[0].returnValues.feesParameters).toEqual(otherSizeBytes32Hex); - }); - - it('cannot add hash to ethereum if block of the transaction is not fetchable within 23 confirmation', async () => { - // fake the creation of new blocks on ethereum - const blockInterval = setInterval(async () => { - await time.advanceBlock(); - }, 50); - - // This mock is used to ensure any block is never fetchable - smartContractManager.eth.getBlock = (_block: any): any => { - return null; - }; - - try { - await expect( - smartContractManager.addHashAndSizeToEthereum(hashStr, { contentSize: otherSize }), - ).rejects.toThrowError('Maximum number of confirmation reached'); - } finally { - clearInterval(blockInterval); - } - }, 35000); - - it('allows to get all hashes', async () => { - // Inside getBlockNumberFromNumberOrString, this function will be only called with parameter 'latest' - // For getPastEventsMock the number of the latest block is 9 - smartContractManager.eth.getBlock = (_block: any): any => { - return { - number: 9, - }; - }; - const { ethereumEntries } = await smartContractManager.getEntriesFromEthereum(); - - expect(ethereumEntries.length).toEqual(4); - expect(ethereumEntries[0].hash).toEqual(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toEqual(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toEqual(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toEqual(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - }); - - it('allows to get all hashes with options from', async () => { - // Inside getBlockNumberFromNumberOrString, this function will be only called with parameter 'latest' - // For getPastEventsMock the number of the latest block is 9 - const mockBlocksEthereum = [7, 30, 45, 87, 100, 150, 209, 234, 290, 306]; - const mockEth = { - getBlock: (i: number): any => { - return mockBlocksEthereum[i] ? { timestamp: mockBlocksEthereum[i] } : undefined; - }, - // eslint-disable-next-line - getBlockNumber: () => 9, - }; - smartContractManager.ethereumBlocks = new EthereumBlocks(mockEth, 1, 0, 0); - smartContractManager.ethereumBlocks.getBlock = (_block: any): any => { - return { - number: 9, - }; - }; - - const { ethereumEntries } = await smartContractManager.getEntriesFromEthereum({ - from: 299, - }); - - expect(ethereumEntries.length).toEqual(1); - expect(ethereumEntries[0].hash).toEqual(otherContent); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: otherSize }); - }); - - it('allows to get all hashes with options to', async () => { - const mockBlocksEthereum = [7, 30, 45, 87, 100, 150, 209, 234, 290, 306]; - const mockEth = { - getBlock: (i: number): any => { - return mockBlocksEthereum[i] ? { timestamp: mockBlocksEthereum[i] } : undefined; - }, - // eslint-disable-next-line - getBlockNumber: () => 9, - }; - smartContractManager.ethereumBlocks = new EthereumBlocks(mockEth, 1, 0, 0); - - const { ethereumEntries } = await smartContractManager.getEntriesFromEthereum({ - to: 299, - }); - expect(ethereumEntries.length).toEqual(3); - expect(ethereumEntries[0].hash).toEqual(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toEqual(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toEqual(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - }); - - it('allows to get all hashes with options from and to', async () => { - const mockBlocksEthereum = [7, 30, 45, 87, 100, 150, 209, 234, 290, 306]; - const mockEth = { - getBlock: (i: number): any => { - return mockBlocksEthereum[i] ? { timestamp: mockBlocksEthereum[i] } : undefined; - }, - // eslint-disable-next-line - getBlockNumber: () => 9, - }; - smartContractManager.ethereumBlocks = new EthereumBlocks(mockEth, 1, 0, 0); - - const { ethereumEntries } = await smartContractManager.getEntriesFromEthereum({ - from: 10, - to: 299, - }); - expect(ethereumEntries.length).toEqual(2); - expect(ethereumEntries[0].hash).toEqual(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[1].hash).toEqual(otherContent); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: otherSize }); - }); - - it('getMainAccount with a invalid host provider should throw a timeout error', async () => { - smartContractManager = new SmartContractManager(invalidHostWeb3Connection); - await expect(smartContractManager.getMainAccount()).rejects.toThrowError(); - }); - - it('addHashAndSizeToEthereum with a invalid host provider should throw a timeout error', async () => { - smartContractManager = new SmartContractManager(invalidHostWeb3Connection); - await expect( - smartContractManager.addHashAndSizeToEthereum(hashStr, { contentSize: realSize }), - ).rejects.toThrowError(); - }); - - it('getEntriesFromEthereum with a invalid host provider should throw a timeout error', async () => { - smartContractManager = new SmartContractManager(invalidHostWeb3Connection); - smartContractManager.ethereumBlocks.retryDelay = 0; - smartContractManager.ethereumBlocks.maxRetries = 0; - await expect(smartContractManager.getEntriesFromEthereum()).rejects.toThrowError(); - }); - - it('getEntriesFromEthereum rejects if fromBlock is larger than toBlock', async () => { - const mockBlocksEthereum = [7, 30, 45, 87, 100, 150, 209, 234, 290, 306]; - const mockEth = { - getBlock: (i: number): any => { - return mockBlocksEthereum[i] ? { timestamp: mockBlocksEthereum[i] } : undefined; - }, - // eslint-disable-next-line - getBlockNumber: () => 9, - }; - smartContractManager.ethereumBlocks = new EthereumBlocks(mockEth, 1, 0, 0); - - await expect( - smartContractManager.getEntriesFromEthereum({ - from: 200, - to: 10, - }), - ).rejects.toThrowError('toBlock must be larger than fromBlock'); - }); - - it('initializes smartcontract-manager with default values should not throw an error', () => { - expect(() => new SmartContractManager()).not.toThrow(); - }); - - it('initializes smartcontract-manager with an invalid provider should throw an error', () => { - expect(() => new SmartContractManager(invalidWeb3Connection)).toThrowError( - `Can't initialize web3-eth`, - ); - }); - - it('initializes smartcontract-manager with an invalid network should throw an error', () => { - expect(() => new SmartContractManager(invalidNetworkWeb3Connection)).toThrowError( - `Unsupported storage chain: ${invalidNetwork}`, - ); - }); - - it('getAddress in artifactsRequestHashStorageUtils with a invalid host network should throw an error', () => { - expect(() => - SmartContracts.requestHashStorageArtifact.getAddress( - 'nonexistent' as CurrencyTypes.EvmChainName, - ), - ).toThrowError('No deployment for network'); - }); - - it('getAddress in artifactsRequestHashSubmitterUtils with a invalid host network should throw an error', () => { - expect(() => - SmartContracts.requestHashSubmitterArtifact.getAddress( - 'nonexistent' as CurrencyTypes.EvmChainName, - ), - ).toThrowError('No deployment for network'); - }); - - it('getCreationBlockNumber in artifactsRequestHashSubmitterUtils', () => { - expect(SmartContracts.requestHashSubmitterArtifact.getCreationBlockNumber('private')).toBe(1); - }); - - it('badly formatted events from web3 should throw an error', async () => { - smartContractManager.requestHashStorage.getPastEvents = getBadEventsMock; - - const allHashesPromise = smartContractManager.getEntriesFromEthereum(); - - await expect(allHashesPromise).rejects.toThrowError( - `event is incorrect: doesn't have a hash or feesParameters`, - ); - }); - - it('allows to get hashes and sizes from events on block interval with over 1000 results', async () => { - smartContractManager.eth.getBlock = (_block: any): any => { - return { - number: 9, - }; - }; - - smartContractManager.requestHashStorage.getPastEvents = (info: { - event: string; - fromBlock: number; - toBlock: number; - }): Promise => noMoreThan1000ResultsGetPastEventsMock(txPerBlockConfiguration1, info); - - let { ethereumEntries } = await smartContractManager.getEntriesFromEthereum(); - - expect(ethereumEntries.length).toBe(4); - expect(ethereumEntries[0].hash).toBe(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toBe(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toBe(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toBe(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - - smartContractManager.requestHashStorage.getPastEvents = (info: { - event: string; - fromBlock: number; - toBlock: number; - }): Promise => noMoreThan1000ResultsGetPastEventsMock(txPerBlockConfiguration2, info); - - ethereumEntries = (await smartContractManager.getEntriesFromEthereum()).ethereumEntries; - - expect(ethereumEntries.length).toBe(4); - expect(ethereumEntries[0].hash).toBe(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toBe(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toBe(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toBe(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - - smartContractManager.requestHashStorage.getPastEvents = (info: { - event: string; - fromBlock: number; - toBlock: number; - }): Promise => noMoreThan1000ResultsGetPastEventsMock(txPerBlockConfiguration3, info); - - ethereumEntries = (await smartContractManager.getEntriesFromEthereum()).ethereumEntries; - - expect(ethereumEntries.length).toBe(4); - expect(ethereumEntries[0].hash).toBe(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toBe(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toBe(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toBe(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - - smartContractManager.requestHashStorage.getPastEvents = (info: { - event: string; - fromBlock: number; - toBlock: number; - }): Promise => noMoreThan1000ResultsGetPastEventsMock(txPerBlockConfiguration4, info); - - ethereumEntries = (await smartContractManager.getEntriesFromEthereum()).ethereumEntries; - - expect(ethereumEntries.length).toBe(4); - expect(ethereumEntries[0].hash).toBe(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toBe(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toBe(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toBe(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - - smartContractManager.requestHashStorage.getPastEvents = (info: { - event: string; - fromBlock: number; - toBlock: number; - }): Promise => noMoreThan1000ResultsGetPastEventsMock(txPerBlockConfiguration5, info); - - ethereumEntries = (await smartContractManager.getEntriesFromEthereum()).ethereumEntries; - - expect(ethereumEntries.length).toBe(4); - expect(ethereumEntries[0].hash).toBe(hashStr); - expect(ethereumEntries[0].feesParameters).toMatchObject({ contentSize: realSize }); - expect(ethereumEntries[1].hash).toBe(hashStr); - expect(ethereumEntries[1].feesParameters).toMatchObject({ contentSize: fakeSize }); - expect(ethereumEntries[2].hash).toBe(otherContent); - expect(ethereumEntries[2].feesParameters).toMatchObject({ contentSize: otherSize }); - expect(ethereumEntries[3].hash).toBe(otherContent); - expect(ethereumEntries[3].feesParameters).toMatchObject({ contentSize: otherSize }); - }); - - it('cannot get hashes and sizes from events with incorrect toBlock option', async () => { - await expect( - smartContractManager.getEthereumEntriesFromEvents(0, 'incorrectBlockDescriber'), - ).rejects.toThrowError(`Cannot get the number of the block`); - }); - - it('cannot get hashes and sizes from events with toBlock option containing no number', async () => { - smartContractManager.eth.getBlock = (block: any): any => { - if (block === 'pending') { - return { - transactions: ['0x10', '0x20', '0x30'], - }; - } - return null; - }; - - await expect( - smartContractManager.getEthereumEntriesFromEvents(0, 'pending'), - ).rejects.toThrowError(`Block pending has no number`); - }); - - it('allows to check if the web3 provider is listening', async () => { - // smartContractManager check on http://localhost:8545 - await expect(smartContractManager.checkWeb3ProviderConnection(10000)).resolves.not.toThrow(); - }); - - it('should throw an error if the web3 provider is not listening', async () => { - smartContractManager.eth.net.isListening = async () => false; - await expect(smartContractManager.checkWeb3ProviderConnection(10000)).rejects.toThrowError( - 'The Web3 provider is not listening', - ); - }); - - it('should throw an error if the web3 provider is not reachable or takes too long to respond', async () => { - smartContractManager.eth.net.isListening = () => - new Promise((resolve, _reject): void => { - setTimeout(() => resolve(true), 300); - }); - - // Timeout is lower to not reach the mocha test timeout - await expect(smartContractManager.checkWeb3ProviderConnection(100)).rejects.toThrowError( - 'The Web3 provider is not reachable, did you use the correct protocol (http/https)?', - ); - }); - - it('should throw an error if an error occurs when checking if the web3 provider is listening', async () => { - smartContractManager.eth.net.isListening = async () => { - throw Error('A connection error'); - }; - - await expect(smartContractManager.checkWeb3ProviderConnection(10000)).rejects.toThrowError( - 'Error when trying to reach Web3 provider', - ); - }); -}); diff --git a/packages/integration-test/package.json b/packages/integration-test/package.json index 01ab0c5dba..9486d9ea77 100644 --- a/packages/integration-test/package.json +++ b/packages/integration-test/package.json @@ -40,7 +40,6 @@ "test:btc": "jest test/scheduled/btc.test.ts --forceExit" }, "devDependencies": { - "@openzeppelin/test-helpers": "0.5.6", "@requestnetwork/advanced-logic": "0.36.0", "@requestnetwork/currency": "0.10.0", "@requestnetwork/data-access": "0.28.0", @@ -52,10 +51,10 @@ "@requestnetwork/payment-processor": "0.39.0", "@requestnetwork/request-client.js": "0.41.0", "@requestnetwork/request-logic": "0.27.0", + "@requestnetwork/thegraph-data-access": "0.35.2", "@requestnetwork/transaction-manager": "0.28.0", "@requestnetwork/types": "0.37.0", "@requestnetwork/utils": "0.37.0", - "@truffle/hdwallet-provider": "1.2.3", "@types/jest": "26.0.13", "@types/node": "16.11.7", "ethers": "5.5.1", @@ -64,7 +63,6 @@ "ts-jest": "26.3.0", "ts-node": "10.9.1", "tslib": "2.5.0", - "typescript": "4.9.5", - "web3-eth": "1.3.6" + "typescript": "4.9.5" } } diff --git a/packages/integration-test/test/layers.test.ts b/packages/integration-test/test/layers.test.ts index 1e72d03a60..1f01002867 100644 --- a/packages/integration-test/test/layers.test.ts +++ b/packages/integration-test/test/layers.test.ts @@ -1,12 +1,15 @@ import { getCurrentTimestampInSecond } from '@requestnetwork/utils'; -const web3Eth = require('web3-eth'); - import { AdvancedLogic } from '@requestnetwork/advanced-logic'; -import { DataAccess } from '@requestnetwork/data-access'; import { EthereumPrivateKeyDecryptionProvider } from '@requestnetwork/epk-decryption'; import { EthereumPrivateKeySignatureProvider } from '@requestnetwork/epk-signature'; -import { EthereumStorage, IpfsStorage } from '@requestnetwork/ethereum-storage'; +import { TheGraphDataAccess } from '@requestnetwork/thegraph-data-access'; +import { PendingStore } from '@requestnetwork/data-access'; +import { + EthereumStorage, + EthereumTransactionSubmitter, + IpfsStorage, +} from '@requestnetwork/ethereum-storage'; import MultiFormat from '@requestnetwork/multi-format'; import { RequestLogic } from '@requestnetwork/request-logic'; import { TransactionManager } from '@requestnetwork/transaction-manager'; @@ -19,10 +22,11 @@ import { SignatureTypes, StorageTypes, } from '@requestnetwork/types'; +import { providers, Wallet } from 'ethers'; let advancedLogic: AdvancedLogicTypes.IAdvancedLogic; let requestLogic: RequestLogicTypes.IRequestLogic; -let provider: any; +let provider: providers.JsonRpcProvider; let payeeSignatureInfo: SignatureTypes.ISignatureParameters; let payeeIdentity: IdentityTypes.IIdentity; let encryptionDataPayee: any; @@ -34,45 +38,35 @@ let signatureProvider: any; let dataAccess: DataAccessTypes.IDataAccess; -const { time } = require('@openzeppelin/test-helpers'); - -let nbBlocks = 0; -let testsFinished = false; const interval = setInterval(async () => { - await time.advanceBlock(); - if (testsFinished) { - nbBlocks++; - } + await provider.send('evm_mine', []); // eslint-disable-next-line no-magic-numbers - if (nbBlocks > 25) { - clearInterval(interval); - } - // eslint-disable-next-line no-magic-numbers -}, 1000); +}, 200); afterAll(() => { - testsFinished = true; + clearInterval(interval); }); +const mnemonic = 'candy maple cake sugar pudding cream honey rich smooth crumble sweet treat'; + +jest.setTimeout(60_000); + describe('Request system', () => { beforeEach(async () => { // Storage setup - provider = new web3Eth.providers.HttpProvider('http://localhost:8545'); - const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 10000, - }; - const web3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - web3Provider: provider, - }; - const ipfsStorage = new IpfsStorage({ ipfsGatewayConnection }); - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - + provider = new providers.JsonRpcProvider('http://localhost:8545'); + const signer = Wallet.fromMnemonic(mnemonic).connect(provider); + const ipfsStorage = new IpfsStorage({ ipfsTimeout: 10000 }); + const txSubmitter = new EthereumTransactionSubmitter({ signer, network: 'private' }); + const ethereumStorage = new EthereumStorage({ ipfsStorage, txSubmitter }); // Data access setup - dataAccess = new DataAccess(ethereumStorage); + dataAccess = new TheGraphDataAccess({ + graphql: { url: 'http://localhost:8000/subgraphs/name/RequestNetwork/request-storage' }, + network: 'private', + storage: ethereumStorage, + pendingStore: new PendingStore(), + }); + await dataAccess.initialize(); // Signature provider setup @@ -147,11 +141,6 @@ describe('Request system', () => { requestLogic = new RequestLogic(transactionManager, signatureProvider, advancedLogic); }); - afterAll(() => { - // Stop web3 provider - provider.disconnect(); - }); - it('can create a request', async () => { const contentDataExtensionData = advancedLogic.extensions.contentData.createCreationAction({ content: { this: 'could', be: 'an', invoice: true }, @@ -234,32 +223,6 @@ describe('Request system', () => { }); it('can create a request with cache', async () => { - const ipfsGatewayConnection: StorageTypes.IIpfsGatewayConnection = { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, - timeout: 10000, - }; - const web3Connection: StorageTypes.IWeb3Connection = { - networkId: StorageTypes.EthereumNetwork.PRIVATE, - web3Provider: provider, - }; - const ipfsStorage = new IpfsStorage({ ipfsGatewayConnection }); - const ethereumStorage = new EthereumStorage('localhost', ipfsStorage, web3Connection); - - // Data access setup - dataAccess = new DataAccess(ethereumStorage); - await dataAccess.initialize(); - - // Transaction manager setup - const transactionManager = new TransactionManager(dataAccess, decryptionProvider); - - // Advanced Logic setup - advancedLogic = new AdvancedLogic(); - - // Logic setup - requestLogic = new RequestLogic(transactionManager, signatureProvider, advancedLogic); - const contentDataExtensionData = advancedLogic.extensions.contentData.createCreationAction({ content: { this: 'could', be: 'an', invoice: true }, }); @@ -297,12 +260,11 @@ describe('Request system', () => { const requestIdLength = 66; expect(resultCreation.result.requestId.length).toEqual(requestIdLength); - // wait a bit - // eslint-disable-next-line no-magic-numbers - await new Promise((r: any): any => setTimeout(r, 2000)); + await new Promise((r) => resultCreation.on('confirmed', r)); const request = await requestLogic.getRequestFromId(resultCreation.result.requestId); expect(request).toBeDefined(); + expect(request.meta.transactionManagerMeta.dataAccessMeta.storageMeta[0].storageType).toEqual( StorageTypes.StorageSystemType.ETHEREUM_IPFS, ); @@ -393,6 +355,7 @@ describe('Request system', () => { }, timestamp: getCurrentTimestampInSecond(), }; + await new Promise((r) => resultCreation1.on('confirmed', r)); const resultCreation2 = await requestLogic.createRequest( request2CreationHash, payeeIdentity, @@ -406,9 +369,7 @@ describe('Request system', () => { requestId: requestId1, }; - // wait a bit - // eslint-disable-next-line no-magic-numbers - await new Promise((r: any): any => setTimeout(r, 1000)); + await new Promise((r) => resultCreation2.on('confirmed', r)); const resultReduce1 = await requestLogic.reduceExpectedAmountRequest( request1ReduceHash, @@ -417,9 +378,7 @@ describe('Request system', () => { const timestampReduce1 = resultReduce1.meta.transactionManagerMeta.dataAccessMeta.storageMeta.timestamp; - // wait a bit - // eslint-disable-next-line no-magic-numbers - await new Promise((r: any): any => setTimeout(r, 1100)); + await new Promise((r) => resultReduce1.on('confirmed', r)); // cancel request const request1CancelHash: RequestLogicTypes.ICancelParameters = { @@ -485,14 +444,14 @@ describe('Request system', () => { expect(request.result.pending!.expectedAmount).toEqual('12345678987654321'); expect(request.result.pending!.state).toEqual(RequestLogicTypes.STATE.CREATED); + await new Promise((resolve) => resultCreation.on('confirmed', resolve)); + // reduce the expected amount by payee const resultReduce = await requestLogic.reduceExpectedAmountRequest( { requestId: resultCreation.result.requestId, deltaAmount: '987654321' }, payeeIdentity, ); - await new Promise((resolve) => resultCreation.on('confirmed', resolve)); - expect(resultReduce.meta.transactionManagerMeta.encryptionMethod).toEqual('ecies-aes256-gcm'); expect(resultReduce.result).not.toBeDefined(); @@ -510,6 +469,8 @@ describe('Request system', () => { expect(requestAfterReduce.result.pending).not.toBeNull(); expect(requestAfterReduce.result.pending!.expectedAmount).toEqual('12345678000000000'); + await new Promise((resolve) => resultReduce.on('confirmed', resolve)); + // accept the request by payer const resultAccept = await requestLogic.acceptRequest( { requestId: resultCreation.result.requestId }, @@ -556,6 +517,8 @@ describe('Request system', () => { expect(requestAfterIncrease.result.pending).toBeDefined(); expect(requestAfterIncrease.result.pending!.expectedAmount).toEqual('12345678000000111'); + await new Promise((resolve) => resultIncrease.on('confirmed', resolve)); + // cancel the request by payee const resultCancel = await requestLogic.cancelRequest( { requestId: resultCreation.result.requestId }, @@ -577,6 +540,8 @@ describe('Request system', () => { expect(requestAfterCancel.result.pending).toBeDefined(); expect(requestAfterCancel.result.pending!.state).toEqual(RequestLogicTypes.STATE.CANCELED); + await new Promise((resolve) => resultCancel.on('confirmed', resolve)); + // check that the data are encrypted: const dataAccessData = await dataAccess.getTransactionsByChannelId( resultCreation.result.requestId, diff --git a/packages/integration-test/tsconfig.build.json b/packages/integration-test/tsconfig.build.json index 25704a725f..a04a35939e 100644 --- a/packages/integration-test/tsconfig.build.json +++ b/packages/integration-test/tsconfig.build.json @@ -15,6 +15,7 @@ { "path": "../multi-format/tsconfig.build.json" }, { "path": "../payment-processor/tsconfig.build.json" }, { "path": "../request-logic/tsconfig.build.json" }, + { "path": "../thegraph-data-access/tsconfig.build.json" }, { "path": "../transaction-manager/tsconfig.build.json" }, { "path": "../types/tsconfig.build.json" } ] diff --git a/packages/payment-detection/src/thegraph/info-retriever.ts b/packages/payment-detection/src/thegraph/info-retriever.ts index abc24228b8..638b124921 100644 --- a/packages/payment-detection/src/thegraph/info-retriever.ts +++ b/packages/payment-detection/src/thegraph/info-retriever.ts @@ -2,7 +2,7 @@ import { PaymentTypes } from '@requestnetwork/types'; import { ICurrencyManager } from '@requestnetwork/currency'; import { utils } from 'ethers'; import { pick, mapValues } from 'lodash'; -import type { TheGraphClient } from '.'; +import type { TheGraphClient } from './client'; import type { EscrowEventResultFragment, PaymentEventResultFragment } from './generated/graphql'; import { formatAddress, unpadAmountFromChainlink } from '../utils'; import { TransferEventsParams, ITheGraphBaseInfoRetriever } from '../types'; diff --git a/packages/payment-processor/src/payment/erc20-escrow-payment.ts b/packages/payment-processor/src/payment/erc20-escrow-payment.ts index 9745b483ab..4652b307be 100644 --- a/packages/payment-processor/src/payment/erc20-escrow-payment.ts +++ b/packages/payment-processor/src/payment/erc20-escrow-payment.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/no-non-null-assertion */ import { BigNumber, BigNumberish, constants, ContractTransaction, providers, Signer } from 'ethers'; import { erc20EscrowToPayArtifact } from '@requestnetwork/smart-contracts'; -import { ERC20EscrowToPay__factory } from '@requestnetwork/smart-contracts/types/'; +import { ERC20EscrowToPay__factory } from '@requestnetwork/smart-contracts/types'; import { ClientTypes, ExtensionTypes } from '@requestnetwork/types'; import { getAmountToPay, diff --git a/packages/request-client.js/src/http-data-access.ts b/packages/request-client.js/src/http-data-access.ts index c49ed440c5..1f512fb2c0 100644 --- a/packages/request-client.js/src/http-data-access.ts +++ b/packages/request-client.js/src/http-data-access.ts @@ -181,10 +181,9 @@ export default class HttpDataAccess implements DataAccessTypes.IDataAccess { /** * Gets information from the node (version, files etc...) * - * @param detailed if true get the list of files hashes */ - public async _getStatus(detailed?: boolean): Promise { - return this.fetchAndRetry('/information', { detailed }); + public async _getStatus(): Promise { + return this.fetchAndRetry('/information', {}); } /** diff --git a/packages/request-client.js/src/http-metamask-data-access.ts b/packages/request-client.js/src/http-metamask-data-access.ts index bd60090e04..9c2de334e6 100644 --- a/packages/request-client.js/src/http-metamask-data-access.ts +++ b/packages/request-client.js/src/http-metamask-data-access.ts @@ -21,7 +21,6 @@ export default class HttpMetaMaskDataAccess extends HttpDataAccess { }; } = {}; - private submitterContract: ethers.Contract | undefined; private provider: ethers.providers.JsonRpcProvider | ethers.providers.Web3Provider; private networkName: CurrencyTypes.EvmChainName = 'private'; @@ -77,18 +76,16 @@ export default class HttpMetaMaskDataAccess extends HttpDataAccess { channelId: string, topics?: string[], ): Promise { - if (!this.submitterContract) { + if (!this.networkName) { const network = await this.provider.getNetwork(); this.networkName = network.chainId === 1 ? 'mainnet' : network.chainId === 4 ? 'rinkeby' : 'private'; - - this.submitterContract = new ethers.Contract( - requestHashSubmitterArtifact.getAddress(this.networkName), - requestHashSubmitterArtifact.getContractAbi(), - this.provider.getSigner(), - ); } + const submitterContract = requestHashSubmitterArtifact.connect( + this.networkName, + this.provider.getSigner(), + ); // We don't use the node to persist the transaction, but we will Do it ourselves @@ -106,26 +103,26 @@ export default class HttpMetaMaskDataAccess extends HttpDataAccess { } = await axios.post('/ipfsAdd', { data: block }, this.axiosConfig); // get the fee required to submit the hash - const fee = await this.submitterContract.getFeesAmount(ipfsSize); + const fee = await submitterContract.getFeesAmount(ipfsSize); // submit the hash to ethereum - const tx = await this.submitterContract.submitHash( + const tx = await submitterContract.submitHash( ipfsHash, /* eslint-disable no-magic-numbers */ ethers.utils.hexZeroPad(ethers.utils.hexlify(ipfsSize), 32), { value: fee }, ); - const ethBlock = await this.provider.getBlock(tx.blockNumber); + const ethBlock = await this.provider.getBlock(tx.blockNumber ?? -1); // create the storage meta from the transaction receipt const storageMeta: StorageTypes.IEthereumMetadata = { blockConfirmation: tx.confirmations, - blockNumber: tx.blockNumber, + blockNumber: tx.blockNumber ?? -1, blockTimestamp: ethBlock.timestamp, - fee, + fee: fee.toString(), networkName: this.networkName, - smartContractAddress: tx.to, + smartContractAddress: tx.to ?? '', transactionHash: tx.hash, }; @@ -135,7 +132,7 @@ export default class HttpMetaMaskDataAccess extends HttpDataAccess { } this.cache[channelId][ipfsHash] = { block, storageMeta }; - const eventEmitter = new EventEmitter(); + const eventEmitter = new EventEmitter() as DataAccessTypes.PersistTransactionEmitter; const result: DataAccessTypes.IReturnPersistTransactionRaw = { meta: { storageMeta: { @@ -151,22 +148,23 @@ export default class HttpMetaMaskDataAccess extends HttpDataAccess { }; // When the ethereum transaction is mined, emit an event 'confirmed' - tx.wait().then((txConfirmed: any) => { - // create the storage meta from the transaction receipt - const storageMetaConfirmed = { - blockConfirmation: txConfirmed.confirmations, - blockNumber: txConfirmed.blockNumber, - blockTimestamp: ethBlock.timestamp, - fee, - networkName: this.networkName, - smartContractAddress: txConfirmed.to, - transactionHash: txConfirmed.hash, - }; - + void tx.wait().then((txConfirmed) => { // emit the event to tell the request transaction is confirmed eventEmitter.emit('confirmed', { meta: { - storageMeta: storageMetaConfirmed, + storageMeta: { + ethereum: { + blockConfirmation: txConfirmed.confirmations, + blockNumber: txConfirmed.blockNumber, + blockTimestamp: ethBlock.timestamp, + fee: fee.toString(), + networkName: this.networkName, + smartContractAddress: txConfirmed.to, + transactionHash: txConfirmed.transactionHash, + }, + state: StorageTypes.ContentState.CONFIRMED, + timestamp: ethBlock.timestamp, + }, topics: topics || [], transactionStorageLocation: ipfsHash, }, diff --git a/packages/request-client.js/src/http-request-network.ts b/packages/request-client.js/src/http-request-network.ts index e3eade2ce4..d66cf24d42 100644 --- a/packages/request-client.js/src/http-request-network.ts +++ b/packages/request-client.js/src/http-request-network.ts @@ -9,8 +9,8 @@ import { AxiosRequestConfig } from 'axios'; import { PaymentNetworkOptions } from '@requestnetwork/payment-detection'; import RequestNetwork from './api/request-network'; import HttpDataAccess from './http-data-access'; -import MockDataAccess from './mock-data-access'; -import MockStorage from './mock-storage'; +import { MockDataAccess } from '@requestnetwork/data-access'; +import { MockStorage } from './mock-storage'; /** * Exposes RequestNetwork module configured to use http-data-access. diff --git a/packages/request-client.js/src/mock-data-access.ts b/packages/request-client.js/src/mock-data-access.ts deleted file mode 100644 index cb16aad7b5..0000000000 --- a/packages/request-client.js/src/mock-data-access.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { DataAccess } from '@requestnetwork/data-access'; -import { StorageTypes } from '@requestnetwork/types'; - -/** - * Mock Data access that bypasses the initialization. - * This class is meant to be used with HttpRequestNetwork and useMockStorage=true. - * Data-access initialization is asynchronous and this class is a hack to avoid having an asynchronous operation in the HttpRequestNetwork constructor. - */ -export default class MockDataAccess extends DataAccess { - constructor(storage: StorageTypes.IStorage) { - super(storage); - this.isInitialized = true; - } -} diff --git a/packages/request-client.js/src/mock-storage.ts b/packages/request-client.js/src/mock-storage.ts index 6cfb0394d7..bfa310bd59 100644 --- a/packages/request-client.js/src/mock-storage.ts +++ b/packages/request-client.js/src/mock-storage.ts @@ -6,11 +6,8 @@ import { getCurrentTimestampInSecond, normalizeKeccak256Hash } from '@requestnet /** * Storage layer implemented with in-memory hashmap, to be used for testing. */ -export default class MockStorage implements StorageTypes.IStorage { - private data: Map< - string, - { state: StorageTypes.ContentState; content: string; timestamp: number } - > = new Map(); +export class MockStorage implements StorageTypes.IStorage { + private data: Map = new Map(); // For test purpose we can force the next append call to emit Error private forceEmitError = false; @@ -19,26 +16,6 @@ export default class MockStorage implements StorageTypes.IStorage { return; } - public async ipfsAdd(content: string): Promise { - if (!content) { - throw Error('Error: no content provided'); - } - const hash = MultiFormat.serialize(normalizeKeccak256Hash(content)); - - const nowTimestampInSec = getCurrentTimestampInSecond(); - - this.data.set(hash, { - content, - state: StorageTypes.ContentState.PENDING, - timestamp: nowTimestampInSec, - }); - - return { - ipfsHash: hash, - ipfsSize: content.length, - }; - } - public async append(content: string): Promise { if (!content) { throw Error('Error: no content provided'); @@ -47,14 +24,6 @@ export default class MockStorage implements StorageTypes.IStorage { const nowTimestampInSec = getCurrentTimestampInSecond(); - const dataToStore = { - content, - state: StorageTypes.ContentState.PENDING, - timestamp: nowTimestampInSec, - }; - - this.data.set(hash, dataToStore); - const resultData = { content, id: hash, @@ -64,7 +33,12 @@ export default class MockStorage implements StorageTypes.IStorage { timestamp: nowTimestampInSec, }, }; - const result = Object.assign(new EventEmitter(), resultData); + this.data.set(hash, resultData); + + const result = Object.assign( + new EventEmitter() as StorageTypes.AppendResultEmitter, + resultData, + ); setTimeout(() => { if (this.forceEmitError) { @@ -72,9 +46,8 @@ export default class MockStorage implements StorageTypes.IStorage { this.forceEmitError = false; result.emit('error', 'forced error asked by _makeNextAppendFailInsteadOfConfirmed()'); } else { - // emit confirmed - dataToStore.state = StorageTypes.ContentState.CONFIRMED; - this.data.set(hash, dataToStore); + resultData.meta.state = StorageTypes.ContentState.CONFIRMED; + this.data.set(hash, resultData); result.emit('confirmed', resultData); } // eslint-disable-next-line no-magic-numbers @@ -91,15 +64,7 @@ export default class MockStorage implements StorageTypes.IStorage { if (!data) { throw Error('No content found from this id'); } - return { - content: data.content, - id, - meta: { - state: data.state, - storageType: StorageTypes.StorageSystemType.IN_MEMORY_MOCK, - timestamp: data.timestamp, - }, - }; + return data; } public async readMany(ids: string[]): Promise { @@ -107,15 +72,7 @@ export default class MockStorage implements StorageTypes.IStorage { } public async getData(): Promise { - const entries = Array.from(this.data.entries()).map(([id, { content, state, timestamp }]) => ({ - content, - id, - meta: { - state, - storageType: StorageTypes.StorageSystemType.IN_MEMORY_MOCK, - timestamp, - }, - })); + const entries = Array.from(this.data.values()); const nowTimestampInSec = getCurrentTimestampInSecond(); @@ -125,24 +82,19 @@ export default class MockStorage implements StorageTypes.IStorage { }; } - public async getIgnoredData(): Promise { - return []; - } - /** * Gets information * - * @param detailed if true get the list of files hash */ - public async _getStatus(detailed?: boolean): Promise { + public async _getStatus(): Promise { return { dataIds: { count: Object.entries(this.data).length, - values: detailed ? Object.entries(this.data) : undefined, + values: [], }, ignoredDataIds: { count: 0, - values: detailed ? [] : undefined, + values: [], }, }; } diff --git a/packages/request-client.js/test/declarative-payments.test.ts b/packages/request-client.js/test/declarative-payments.test.ts index 11a7f036d0..8828e926ff 100644 --- a/packages/request-client.js/test/declarative-payments.test.ts +++ b/packages/request-client.js/test/declarative-payments.test.ts @@ -460,7 +460,6 @@ describe('request-client.js: declarative payments', () => { await request.waitForConfirmation(); const data = request.getData(); - console.debug(data); const pn = getPaymentNetworkExtension(data)!; diff --git a/packages/request-client.js/test/http-data-access.test.ts b/packages/request-client.js/test/http-data-access.test.ts index 05c44e8f7e..673b0c3a11 100644 --- a/packages/request-client.js/test/http-data-access.test.ts +++ b/packages/request-client.js/test/http-data-access.test.ts @@ -24,7 +24,7 @@ describe('HttpDataAccess', () => { }, }); void httpDataAccess.persistTransaction({}, '', []).then((returnPersistTransaction) => { - returnPersistTransaction.on('error', (e) => { + returnPersistTransaction.on('error', (e: any) => { expect(e.message).toBe('Transaction confirmation not receive after 0 retries'); done(); }); diff --git a/packages/request-client.js/test/index.test.ts b/packages/request-client.js/test/index.test.ts index 643d0913fa..f0fa7ee295 100644 --- a/packages/request-client.js/test/index.test.ts +++ b/packages/request-client.js/test/index.test.ts @@ -22,8 +22,8 @@ import EtherscanProviderMock from './etherscan-mock'; import httpConfigDefaults from '../src/http-config-defaults'; import { IRequestDataWithEvents } from '../src/types'; import HttpMetaMaskDataAccess from '../src/http-metamask-data-access'; -import MockDataAccess from '../src/mock-data-access'; -import MockStorage from '../src/mock-storage'; +import { MockDataAccess } from '@requestnetwork/data-access'; +import { MockStorage } from '../src/mock-storage'; import * as RequestLogic from '@requestnetwork/types/src/request-logic-types'; const packageJson = require('../package.json'); @@ -894,11 +894,13 @@ describe('request-client.js', () => { [idRaw1.encryptionParams], ); + await request.waitForConfirmation(); + const requestsFromTopic = await requestNetwork.fromTopic('my amazing test topic'); expect(requestsFromTopic).not.toHaveLength(0); - expect(requestsFromTopic[0]).toMatchObject(request); - const requestData = requestsFromTopic[0].getData(); + expect(requestData).toMatchObject(request.getData()); + expect(requestData.meta).not.toBeNull(); expect(requestData.meta!.transactionManagerMeta.encryptionMethod).toBe('ecies-aes256-gcm'); }); @@ -919,6 +921,8 @@ describe('request-client.js', () => { [idRaw1.encryptionParams], ); + await request.waitForConfirmation(); + const request2 = await requestNetwork._createEncryptedRequest( { requestInfo: { @@ -930,21 +934,22 @@ describe('request-client.js', () => { }, [idRaw1.encryptionParams], ); + await request2.waitForConfirmation(); const requestsFromTopic = await requestNetwork.fromMultipleTopics([ 'my amazing test topic', 'my second best test topic', ]); expect(requestsFromTopic).toHaveLength(2); - expect(requestsFromTopic[0]).toMatchObject(request); - expect(requestsFromTopic[1]).toMatchObject(request2); + expect(requestsFromTopic[0].getData()).toMatchObject(request.getData()); + expect(requestsFromTopic[1].getData()).toMatchObject(request2.getData()); requestsFromTopic.forEach((req) => { const requestData = req.getData(); expect(requestData.meta).not.toBeNull(); expect(requestData.meta!.transactionManagerMeta.encryptionMethod).toBe('ecies-aes256-gcm'); }); - }, 10000); + }, 15000); it('creates an encrypted request and recovers it by identity', async () => { const requestNetwork = new RequestNetwork({ @@ -961,12 +966,13 @@ describe('request-client.js', () => { }, [idRaw1.encryptionParams], ); + await request.waitForConfirmation(); const requestFromIdentity = await requestNetwork.fromIdentity(TestData.payee.identity); expect(requestFromIdentity).not.toBe(''); - expect(requestFromIdentity[0]).toMatchObject(request); - const requestData = requestFromIdentity[0].getData(); + expect(requestData).toMatchObject(request.getData()); + expect(requestData.meta).not.toBeNull(); expect(requestData.meta!.transactionManagerMeta.encryptionMethod).toBe('ecies-aes256-gcm'); }); diff --git a/packages/request-client.js/test/mock-storage.test.ts b/packages/request-client.js/test/mock-storage.test.ts index c018d798ba..42299a19d2 100644 --- a/packages/request-client.js/test/mock-storage.test.ts +++ b/packages/request-client.js/test/mock-storage.test.ts @@ -1,6 +1,6 @@ import { StorageTypes } from '@requestnetwork/types'; -import MockStorage from '../src/mock-storage'; +import { MockStorage } from '../src/mock-storage'; describe('mock-storage', () => { it('can append data', async () => { diff --git a/packages/request-node/README.md b/packages/request-node/README.md index 4c23f4e471..a72ce45ecf 100644 --- a/packages/request-node/README.md +++ b/packages/request-node/README.md @@ -230,27 +230,15 @@ Default values correspond to the basic configuration used to run a server in a t - `--providerUrl` URL of the web3 provider for Ethereum - Default value: `http://localhost:8545` - Environment variable name: `$WEB3_PROVIDER_URL` -- `--ipfsHost` Host of the IPFS gateway - - Default value: `localhost` - - Environment variable name: `$IPFS_HOST` -- `--ipfsPort` Port of the IPFS gateway - - Default value: `5001` - - Environment variable name: `$IPFS_PORT` -- `--ipfsProtocol` Protocol used to connect to the IPFS gateway - - Default value: `http` - - Environment variable name: `$IPFS_PROTOCOL` +- `--ipfsUrl` URL of the IPFS gateway + - Default value: `http://localhost:5001` + - Environment variable name: `$IPFS_URL` - `--ipfsTimeout` Timeout threshold to connect to the IPFS gateway - Default value: `10000` - Environment variable name: `$IPFS_TIMEOUT` -- `--headers` Custom headers to send with the API responses (as a stringified JSON object) - - Default value: `'{}'` - - Environment variable name: `$HEADERS` - `--blockConfirmations` The number of block confirmations to consider a transaction successful +- `--blockConfirmations` The number of block confirmations to consider a transaction successful - Default value: `2` - Environment variable name: `$BLOCK_CONFIRMATIONS` -- `--lastBlockNumberDelay` The minimum delay between getLastBlockNumber calls to ethereum network - - Default value: `'10000'` - - Environment variable name: `$LAST_BLOCK_NUMBER_DELAY` - `--storageConcurrency` Maximum number of concurrent calls to Ethereum or IPFS - Default value: `'200'` - Environment variable name: `$STORAGE_MAX_CONCURRENCY` diff --git a/packages/request-node/package.json b/packages/request-node/package.json index 68e880a19f..5e7d04b42a 100644 --- a/packages/request-node/package.json +++ b/packages/request-node/package.json @@ -21,9 +21,9 @@ "engines": { "node": ">=16.0.0" }, - "main": "dist/server", + "main": "dist/bin", "bin": { - "request-node": "dist/server.js" + "request-node": "dist/bin.js" }, "files": [ "dist" @@ -33,8 +33,8 @@ "build:watch": "tsc -b tsconfig.build.json --watch", "test": "jest --runInBand --forceExit", "test:watch": "yarn test --watch", - "start": "ts-node src/server.ts", - "start:watch": "ts-node-dev src/server.ts", + "start": "ts-node src/bin.ts", + "start:watch": "ts-node-dev src/bin.ts", "clean": "shx rm -rf dist tsconfig.tsbuildinfo tsconfig.build.tsbuildinfo", "lint": "eslint . --fix", "lint:check": "eslint .", @@ -42,13 +42,13 @@ }, "dependencies": { "@ethersproject/experimental": "5.5.0", + "@requestnetwork/currency": "0.10.0", "@requestnetwork/data-access": "0.28.0", "@requestnetwork/ethereum-storage": "0.28.0", "@requestnetwork/smart-contracts": "0.30.0", "@requestnetwork/thegraph-data-access": "0.35.2", "@requestnetwork/types": "0.37.0", "@requestnetwork/utils": "0.37.0", - "@truffle/hdwallet-provider": "1.2.3", "chalk": "4.1.0", "cors": "2.8.5", "dotenv": "8.2.0", @@ -62,7 +62,6 @@ "keyv-file": "0.2.0", "shelljs": "0.8.5", "tslib": "2.5.0", - "web3-providers-ws": "1.6.1", "yargs": "17.6.2" }, "devDependencies": { diff --git a/packages/request-node/src/bin.ts b/packages/request-node/src/bin.ts new file mode 100644 index 0000000000..bba8ca5532 --- /dev/null +++ b/packages/request-node/src/bin.ts @@ -0,0 +1,17 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ + +import * as config from './config'; +import { startNode } from './server'; + +// If -h option is used, commands are printed +// Otherwise the node is started +if (config.isHelp()) { + console.log(config.getHelpMessage()); +} else { + console.log(config.getConfigDisplay()); + startNode().catch((error) => { + console.error(error); + process.exit(1); + }); +} diff --git a/packages/request-node/src/config.ts b/packages/request-node/src/config.ts index 27a660104e..46c4b182cb 100644 --- a/packages/request-node/src/config.ts +++ b/packages/request-node/src/config.ts @@ -1,10 +1,12 @@ -import { LogTypes, StorageTypes } from '@requestnetwork/types'; import * as yargs from 'yargs'; -import { LogMode } from './logger'; import { config } from 'dotenv'; import { BigNumber } from 'ethers'; -const argv = yargs.parseSync(); +import { LogTypes } from '@requestnetwork/types'; + +import { LogMode } from './logger'; + +const argv = yargs.option('help', { alias: 'h', type: 'boolean' }).parseSync(); // Load environment variables from .env file (without overriding variables already set) config(); @@ -14,31 +16,27 @@ config(); * when environment variable is not specified */ const defaultValues = { - ethereumStorage: { + storage: { ethereum: { networkId: 0, web3ProviderUrl: 'http://localhost:8545', gasPriceMin: '1000000000', // one gwei blockConfirmations: 2, - graphNodeUrl: '', }, ipfs: { - host: 'localhost', - port: 5001, - protocol: StorageTypes.IpfsGatewayProtocol.HTTP, + url: 'http://localhost:5001', timeout: 30000, }, - lastBlockNumberDelay: 10000, - maxConcurrency: 5, + thegraph: { + nodeUrl: 'http://localhost:8000/subgraphs/name/RequestNetwork/request-storage', + }, persistTransactionTimeout: 600, - retryDelay: 1000, }, log: { level: LogTypes.LogLevel.INFO, mode: LogMode.human, }, server: { - externalUrl: 'localhost', headers: '{}', port: 3000, }, @@ -61,40 +59,20 @@ const makeOption = () => getOption(...params); -/** - * Get the external url of the node (used to identified where the buffer data are stored before being broadcasted) - */ -export const getServerExternalUrl = makeOption( - 'externalUrl', - 'EXTERNAL_URL', - defaultValues.server.externalUrl, -); +export const isHelp = (): boolean => argv.help || false; /** * Get the port from command line argument, environment variables or default values to allow user to connect to the server */ export const getServerPort = makeOption('port', 'PORT', defaultValues.server.port); -/** - * Get custom headers as a JSON stringified object from command line argument, environment variables or default values - */ -export function getCustomHeaders(): Record { - const headersString = getOption('headers', 'HEADERS', defaultValues.server.headers); - - try { - return JSON.parse(headersString); - } catch (e) { - throw new Error('Custom headers must be a valid JSON object'); - } -} - /** * Get network id of the Ethereum network from command line argument, environment variables or default values */ export const getStorageNetworkId = makeOption( 'networkId', 'ETHEREUM_NETWORK_ID', - defaultValues.ethereumStorage.ethereum.networkId, + defaultValues.storage.ethereum.networkId, ); /** @@ -103,21 +81,21 @@ export const getStorageNetworkId = makeOption( export const getStorageWeb3ProviderUrl = makeOption( 'providerUrl', 'WEB3_PROVIDER_URL', - defaultValues.ethereumStorage.ethereum.web3ProviderUrl, + defaultValues.storage.ethereum.web3ProviderUrl, ); /** Get the Graph node URL */ export const getGraphNodeUrl = makeOption( 'graphNodeUrl', 'GRAPH_NODE_URL', - defaultValues.ethereumStorage.ethereum.graphNodeUrl, + defaultValues.storage.thegraph.nodeUrl, ); export function getGasPriceMin(): BigNumber | undefined { const gasPriceMin = getOption( 'gasPriceMin', 'GAS_PRICE_MIN', - defaultValues.ethereumStorage.ethereum.gasPriceMin, + defaultValues.storage.ethereum.gasPriceMin, ); return gasPriceMin ? BigNumber.from(gasPriceMin) : undefined; } @@ -128,38 +106,14 @@ export function getGasPriceMin(): BigNumber | undefined { export const getBlockConfirmations = makeOption( 'blockConfirmations', 'BLOCK_CONFIRMATIONS', - defaultValues.ethereumStorage.ethereum.blockConfirmations, -); - -/** - * Get host from command line argument, environment variables or default values to connect to IPFS gateway - * @returns the host of the IPFS gateway - */ -export const getIpfsHost = makeOption( - 'ipfsHost', - 'IPFS_HOST', - defaultValues.ethereumStorage.ipfs.host, -); - -/** - * Get port from command line argument, environment variables or default values to connect to IPFS gateway - * @returns the port of the IPFS gateway - */ -export const getIpfsPort = makeOption( - 'ipfsPort', - 'IPFS_PORT', - defaultValues.ethereumStorage.ipfs.port, + defaultValues.storage.ethereum.blockConfirmations, ); /** - * Get protocol from command line argument, environment variables or default values to connect to IPFS gateway - * @returns the protocol to connect to the IPFS gateway + * Get IPFS url from command line argument, environment variables or default values to connect to IPFS gateway + * @returns the url of the IPFS gateway */ -export const getIpfsProtocol = makeOption( - 'ipfsProtocol', - 'IPFS_PROTOCOL', - defaultValues.ethereumStorage.ipfs.protocol, -); +export const getIpfsUrl = makeOption('ipfsUrl', 'IPFS_URL', defaultValues.storage.ipfs.url); /** * Get the timeout threshold from command line argument, environment variables or default values for IPFS gateway connection @@ -169,7 +123,7 @@ export const getIpfsProtocol = makeOption( export const getIpfsTimeout = makeOption( 'ipfsTimeout', 'IPFS_TIMEOUT', - defaultValues.ethereumStorage.ipfs.timeout, + defaultValues.storage.ipfs.timeout, ); /** @@ -199,46 +153,14 @@ export const getLogLevel = (): LogTypes.LogLevel => { /** logMode defines the log format to display: `human` is a more readable log, `machine` is better for parsing */ export const getLogMode = makeOption('logMode', 'LOG_MODE', defaultValues.log.mode); -/** - * Get the minimum delay between getLastBlockNumber calls - * - * @returns the minimum delay between last block number fetches - */ -export const getLastBlockNumberDelay = makeOption( - 'lastBlockNumberDelay', - 'LAST_BLOCK_NUMBER_DELAY', - defaultValues.ethereumStorage.lastBlockNumberDelay, -); - -/** - * Get the number of concurrent calls the ethereum storage can make - * - * @returns the maximum concurrency number - */ -export const getStorageConcurrency = makeOption( - 'storageMaxConcurrency', - 'STORAGE_MAX_CONCURRENCY', - defaultValues.ethereumStorage.maxConcurrency, -); - -/** - * Get the delay between subsequent Ethereum call retries - * - * @returns the delay between call retries - */ -export const getEthereumRetryDelay = makeOption( - 'ethereumRetryDelay', - 'ETHEREUM_RETRY_DELAY', - defaultValues.ethereumStorage.retryDelay, -); - /** * Get the initialization storage (a json-like file) path. * @returns the path to the json-like file that stores the initialization data (ethereum metadata and transaction index). */ -export const getInitializationStorageFilePath = makeOption( +export const getInitializationStorageFilePath = makeOption( 'initializationStorageFilePath', 'INITIALIZATION_STORAGE_FILE_PATH', + '', ); /** @@ -251,21 +173,9 @@ export const getInitializationStorageFilePath = makeOption( export const getPersistTransactionTimeout = makeOption( 'persistTransactionTimeout', 'PERSIST_TRANSACTION_TIMEOUT', - defaultValues.ethereumStorage.persistTransactionTimeout, + defaultValues.storage.persistTransactionTimeout, ); -/** - * Get the IPFS connection configuration. - */ -export function getIpfsConfiguration(): StorageTypes.IIpfsGatewayConnection { - return { - host: getIpfsHost(), - port: getIpfsPort(), - protocol: getIpfsProtocol(), - timeout: getIpfsTimeout(), - }; -} - /** * Get the mnemonic from command line argument, environment variables or default values to generate the private key for the wallet * The default value must only be used for test purposes @@ -281,29 +191,35 @@ export function getHelpMessage(): string { OPTIONS SERVER OPTIONS port (${defaultValues.server.port})\t\t\t\tPort for the server to listen for API requests - headers (${defaultValues.server.headers})\t\t\t\tCustom headers to send with the API responses - externalUrl (${defaultValues.server.externalUrl})\t\t\t\tExternal url of the node (used to identified where the buffer data are stored before being broadcasted) + headers (${ + defaultValues.server.headers + })\t\t\t\tCustom headers to send with the API responses + + THE GRAPH OPTIONS + graphNodeUrl (${defaultValues.storage.thegraph.nodeUrl})\t\t\t\t ETHEREUM OPTIONS - networkId (${defaultValues.ethereumStorage.ethereum.networkId})\t\t\t\tId of the Ethereum network used - providerUrl (${defaultValues.ethereumStorage.ethereum.web3ProviderUrl})\tUrl of the web3 provider for Ethereum - LastBlockNumberDelay (${defaultValues.ethereumStorage.lastBlockNumberDelay} ms)\t\t\tThe minimum delay between getLastBlockNumber calls - EthereumRetryDelay (${defaultValues.ethereumStorage.retryDelay})\t\t\tThe delay between subsequent call retries + networkId (${ + defaultValues.storage.ethereum.networkId + })\t\t\t\tId of the Ethereum network used + providerUrl (${ + defaultValues.storage.ethereum.web3ProviderUrl + })\tUrl of the web3 provider for Ethereum IPFS OPTIONS - ipfsHost (${defaultValues.ethereumStorage.ipfs.host})\t\t\tHost of the IPFS gateway - ipfsPort (${defaultValues.ethereumStorage.ipfs.port})\t\t\t\tPort of the IPFS gateway - ipfsProtocol (${defaultValues.ethereumStorage.ipfs.protocol})\t\t\tProtocol used to connect to the IPFS gateway - ipfsTimeout (${defaultValues.ethereumStorage.ipfs.timeout})\t\t\tTimeout threshold to connect to the IPFS gateway + ipfsUrl (${defaultValues.storage.ipfs.url})\t\t\tURL of the IPFS gateway + ipfsTimeout (${ + defaultValues.storage.ipfs.timeout + })\t\t\tTimeout threshold to connect to the IPFS gateway OTHER OPTIONS - storageMaxConcurrency (${defaultValues.ethereumStorage.maxConcurrency})\t\t\tMaximum number of concurrent calls to Ethereum or IPFS - - logLevel (${defaultValues.log.level})\t\t\tThe node log level (ERROR, WARN, INFO or DEBUG) + logLevel (${ + LogTypes.LogLevel[defaultValues.log.level] + })\t\t\tThe node log level (ERROR, WARN, INFO or DEBUG) logMode (${defaultValues.log.mode})\t\t\tThe node log mode (human or machine) EXAMPLE - yarn start --port 5000 --networkId 1 --ipfsPort 6000 + yarn start --port 5000 --networkId 1 All options are optional, not specified options are read from environment variables If the environment variable is not specified, default value is used @@ -314,3 +230,17 @@ export function getHelpMessage(): string { return message; } + +export const getConfigDisplay = (): string => { + return `Using config: + Ethereum network id: ${getStorageNetworkId()} + Log Level: ${LogTypes.LogLevel[getLogLevel()]} + Log Mode: ${getLogMode()} + Web3 provider url: ${getStorageWeb3ProviderUrl()} + TheGraph url: ${getGraphNodeUrl()} + IPFS url: ${getIpfsUrl()} + IPFS timeout: ${getIpfsTimeout()} + Initialization storage path: ${getInitializationStorageFilePath()} + Storage block confirmations: ${getBlockConfirmations()} +`; +}; diff --git a/packages/request-node/src/dataAccess.ts b/packages/request-node/src/dataAccess.ts new file mode 100644 index 0000000000..8300549a36 --- /dev/null +++ b/packages/request-node/src/dataAccess.ts @@ -0,0 +1,50 @@ +import { providers, Wallet } from 'ethers'; +import { NonceManager } from '@ethersproject/experimental'; +import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; +import { EvmChains } from '@requestnetwork/currency'; + +import * as config from './config'; +import { TheGraphDataAccess } from '@requestnetwork/thegraph-data-access'; +import { PendingStore } from '@requestnetwork/data-access'; +import { + EthereumStorage, + EthereumTransactionSubmitter, + getEthereumStorageNetworkNameFromId, +} from '@requestnetwork/ethereum-storage'; + +export function getDataAccess( + ipfsStorage: StorageTypes.IIpfsStorage, + logger: LogTypes.ILogger, +): DataAccessTypes.IDataAccess { + const graphNodeUrl = config.getGraphNodeUrl(); + + const network = getEthereumStorageNetworkNameFromId(config.getStorageNetworkId()) as any; + if (!network) { + throw new Error(`Storage network not supported: ${config.getStorageNetworkId()}`); + } + EvmChains.assertChainSupported(network); + + const wallet = Wallet.fromMnemonic(config.getMnemonic()).connect( + new providers.StaticJsonRpcProvider(config.getStorageWeb3ProviderUrl()), + ); + + const signer = new NonceManager(wallet); + + const gasPriceMin = config.getGasPriceMin(); + const blockConfirmations = config.getBlockConfirmations(); + const txSubmitter = new EthereumTransactionSubmitter({ network, logger, gasPriceMin, signer }); + const pendingStore = new PendingStore(); + const storage = new EthereumStorage({ + ipfsStorage, + txSubmitter, + logger, + blockConfirmations, + }); + return new TheGraphDataAccess({ + graphql: { url: graphNodeUrl }, + storage, + network, + logger, + pendingStore, + }); +} diff --git a/packages/request-node/src/dataStorage.ts b/packages/request-node/src/dataStorage.ts new file mode 100644 index 0000000000..3006c2852f --- /dev/null +++ b/packages/request-node/src/dataStorage.ts @@ -0,0 +1,12 @@ +import * as config from './config'; + +import { IpfsStorage } from '@requestnetwork/ethereum-storage'; +import { LogTypes, StorageTypes } from 'types/dist'; + +export function getDataStorage(logger: LogTypes.ILogger): StorageTypes.IIpfsStorage { + return new IpfsStorage({ + logger, + ipfsUrl: config.getIpfsUrl(), + ipfsTimeout: config.getIpfsTimeout(), + }); +} diff --git a/packages/request-node/src/request/confirmedTransactionStore.ts b/packages/request-node/src/request/confirmedTransactionStore.ts index b48332f915..67a4ac64f9 100644 --- a/packages/request-node/src/request/confirmedTransactionStore.ts +++ b/packages/request-node/src/request/confirmedTransactionStore.ts @@ -7,7 +7,7 @@ import Keyv, { Store } from 'keyv'; * The client can call the getConfirmed entry point, to get the confirmed event. */ export default class ConfirmedTransactionStore { - private store: Keyv; + private store: Keyv; /** * Confirmed transactions store constructor @@ -21,7 +21,7 @@ export default class ConfirmedTransactionStore { public async getConfirmedTransaction( transactionHash: string, - ): Promise { + ): Promise { return this.store.get(transactionHash); } @@ -33,7 +33,7 @@ export default class ConfirmedTransactionStore { */ public async addConfirmedTransaction( transactionHash: string, - result: DataAccessTypes.IReturnPersistTransaction, + result: DataAccessTypes.IReturnPersistTransactionRaw, ): Promise { await this.store.set(transactionHash, result); } diff --git a/packages/request-node/src/request/getStatus.ts b/packages/request-node/src/request/getStatus.ts index 3fe5074bea..06aaa5ecd0 100644 --- a/packages/request-node/src/request/getStatus.ts +++ b/packages/request-node/src/request/getStatus.ts @@ -36,9 +36,7 @@ export default class GetStatusHandler { } try { - const dataAccessStatus = await this.dataAccess._getStatus( - Boolean(clientRequest.query.detailed), - ); + const dataAccessStatus = await this.dataAccess._getStatus(); let providerUrl = ''; // let's extract only the hostname to hide any token or sensible key @@ -52,24 +50,17 @@ export default class GetStatusHandler { const status = { dataAccess: dataAccessStatus, node: { - customHeaders: config.getCustomHeaders(), ethereum: { - concurrency: config.getStorageConcurrency(), - lastBlockNumberDelay: config.getLastBlockNumberDelay(), networkId: config.getStorageNetworkId(), providerUrl, - retryDelay: config.getEthereumRetryDelay(), blockConfirmations: config.getBlockConfirmations(), }, ipfs: { - host: config.getIpfsHost(), - port: config.getIpfsPort(), - protocol: config.getIpfsProtocol(), + url: config.getIpfsUrl(), timeout: config.getIpfsTimeout(), }, persistTransactionTimeout: config.getPersistTransactionTimeout(), port: config.getServerPort(), - serverExternalUrl: config.getServerExternalUrl(), version: packageJson.version, }, }; diff --git a/packages/request-node/src/requestNode.ts b/packages/request-node/src/requestNode.ts index 5fcd451841..4e7920e714 100644 --- a/packages/request-node/src/requestNode.ts +++ b/packages/request-node/src/requestNode.ts @@ -1,36 +1,182 @@ -import { DataAccess, TransactionIndex } from '@requestnetwork/data-access'; -import { LogTypes } from '@requestnetwork/types'; +import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; +import { SimpleLogger } from '@requestnetwork/utils'; +import cors from 'cors'; +import { Server } from 'http'; +import express, { NextFunction, Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; +import { Store } from 'keyv'; +import ConfirmedTransactionStore from './request/confirmedTransactionStore'; +import GetConfirmedTransactionHandler from './request/getConfirmedTransactionHandler'; +import GetTransactionsByChannelIdHandler from './request/getTransactionsByChannelId'; +import PersistTransactionHandler from './request/persistTransaction'; +import GetChannelsByTopicHandler from './request/getChannelsByTopic'; +import GetStatusHandler from './request/getStatus'; +import IpfsAddHandler from './request/ipfsAdd'; -import KeyvFile from 'keyv-file'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const packageJson = require('../package.json'); -import { getInitializationStorageFilePath, getMnemonic } from './config'; -import { getEthereumStorage, getIpfsStorage } from './storageUtils'; +export const NOT_FOUND_MESSAGE = + 'Not found\nAvailable endpoints:\n/POST persistTransaction\n/GET getTransactionsByChannelId\n/GET getChannelsByTopic\n/POST /ipfsAdd\nGET getConfirmedTransaction\nGET status'; -import { RequestNodeBase } from './requestNodeBase'; -import { SimpleLogger } from '@requestnetwork/utils'; +export const NOT_INITIALIZED_MESSAGE = 'The node is not initialized'; + +export const REQUEST_NODE_VERSION_HEADER = 'X-Request-Network-Node-Version'; + +/** + * Main class for request node express server + * This class defines routes to handle requests from client + */ + +export class RequestNode { + /** + * DataAccess layer of the protocol + * This attribute is left public for mocking purpose + */ + protected dataAccess: DataAccessTypes.IDataAccess; + + private express: express.Application; + private initialized: boolean; + private logger: LogTypes.ILogger; + private persistTransactionHandler: PersistTransactionHandler; + private confirmedTransactionStore: ConfirmedTransactionStore; + private requestNodeVersion: string; + + private getTransactionsByChannelIdHandler: GetTransactionsByChannelIdHandler; + private getConfirmedTransactionHandler: GetConfirmedTransactionHandler; + private getChannelByTopicHandler: GetChannelsByTopicHandler; + private getStatusHandler: GetStatusHandler; + private ipfsAddHandler: IpfsAddHandler; + /** + * Request Node constructor + * + * @param [logger] The logger instance + */ + constructor( + dataAccess: DataAccessTypes.IDataAccess, + ipfsStorage: StorageTypes.IIpfsStorage, + store?: Store, + logger?: LogTypes.ILogger, + ) { + this.initialized = false; + + this.logger = logger || new SimpleLogger(); + this.dataAccess = dataAccess; + + this.confirmedTransactionStore = new ConfirmedTransactionStore(store); + this.getConfirmedTransactionHandler = new GetConfirmedTransactionHandler( + this.logger, + this.confirmedTransactionStore, + ); + this.getTransactionsByChannelIdHandler = new GetTransactionsByChannelIdHandler( + this.logger, + this.dataAccess, + ); + this.getChannelByTopicHandler = new GetChannelsByTopicHandler(this.logger, this.dataAccess); + this.getStatusHandler = new GetStatusHandler(this.logger, this.dataAccess); + this.ipfsAddHandler = new IpfsAddHandler(this.logger, ipfsStorage); + this.persistTransactionHandler = new PersistTransactionHandler( + this.confirmedTransactionStore, + this.dataAccess, + this.logger, + ); + + this.express = express(); + this.mountRoutes(); + + // Get the version of the Request Node for the request's response header + this.requestNodeVersion = packageJson.version; + } + + /** + * Initialize data access layer + * This function must be called before listening for requests + * because the data-access layer must be synchronized + * with the current state of the storage smart contract + */ + public async initialize(): Promise { + this.logger.info('Node initialization'); + const initializationStartTime: number = Date.now(); + + try { + await this.dataAccess.initialize(); + } catch (error) { + this.logger.error(`Node failed to initialize`); + throw error; + } + + this.initialized = true; + + this.logger.info('Node initialized'); -export class RequestNode extends RequestNodeBase { - constructor(logger?: LogTypes.ILogger) { - const initializationStoragePath = getInitializationStorageFilePath(); - logger = logger || new SimpleLogger(); - - const store = initializationStoragePath - ? new KeyvFile({ - filename: initializationStoragePath, - }) - : undefined; - const ipfsStorage = getIpfsStorage(logger); - // Use ethereum storage for the storage layer - const ethereumStorage = getEthereumStorage(getMnemonic(), ipfsStorage, logger, store); - - // Use an in-file Transaction index if a path is specified, an in-memory otherwise - const transactionIndex = new TransactionIndex(store); - - const dataAccess = new DataAccess(ethereumStorage, { - logger, - transactionIndex, - autoStartSynchronization: true, + const initializationEndTime: number = Date.now(); + + this.logger.info( + // eslint-disable-next-line no-magic-numbers + `Time to initialize: ${(initializationEndTime - initializationStartTime) / 1000}s`, + ['metric', 'initialization'], + ); + } + + async close(): Promise { + await this.dataAccess.close(); + } + + /** + * Listen for requests + * + * @param port Port used for listening on the server + * @param callback Callback called before listening for request + * @returns Object of the listening server + */ + public listen(port: number | string, callback: () => number): Server { + return this.express.listen(port, callback); + } + + // Defines handlers for necessary routes + private mountRoutes(): void { + const router = express.Router(); + + // Enable all CORS requests + this.express.use(cors()); + + // Set the Request Node version to the header + this.express.use((_, res, next) => { + res.header(REQUEST_NODE_VERSION_HEADER, this.requestNodeVersion); + next(); }); - super(dataAccess, ipfsStorage, store, logger); + + // Supported encodings + this.express.use(express.json()); + this.express.use(express.urlencoded({ extended: true })); + + router.get('/healthz', (_, res) => res.status(StatusCodes.OK).send('OK')); + router.use(this.initializedMiddelware()); + router.get('/readyz', (_, res) => res.status(StatusCodes.OK).send('OK')); + router.get('/status', this.getStatusHandler.handler); + router.post('/ipfsAdd', this.ipfsAddHandler.handler); + router.post('/persistTransaction', this.persistTransactionHandler.handler); + router.get('/getConfirmedTransaction', this.getConfirmedTransactionHandler.handler); + router.get('/getTransactionsByChannelId', this.getTransactionsByChannelIdHandler.handler); + router.get('/getChannelsByTopic', this.getChannelByTopicHandler.handler); + this.express.use('/', router); + + // Any other route returns error 404 + this.express.use((_clientRequest, serverResponse) => { + serverResponse.status(StatusCodes.NOT_FOUND).send(NOT_FOUND_MESSAGE); + }); + } + + /** + * Middleware to refuse traffic if node is not initialized yet + */ + private initializedMiddelware() { + return (_: Request, res: Response, next: NextFunction) => { + if (!this.initialized) { + res.status(StatusCodes.SERVICE_UNAVAILABLE).send(NOT_INITIALIZED_MESSAGE); + } else { + next(); + } + }; } } diff --git a/packages/request-node/src/requestNodeBase.ts b/packages/request-node/src/requestNodeBase.ts deleted file mode 100644 index 81c05c5be4..0000000000 --- a/packages/request-node/src/requestNodeBase.ts +++ /dev/null @@ -1,200 +0,0 @@ -import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; -import { SimpleLogger } from '@requestnetwork/utils'; -import cors from 'cors'; -import { Server } from 'http'; -import express, { NextFunction, Request, Response } from 'express'; -import { StatusCodes } from 'http-status-codes'; -import { Store } from 'keyv'; -import { getCustomHeaders } from './config'; -import ConfirmedTransactionStore from './request/confirmedTransactionStore'; -import GetConfirmedTransactionHandler from './request/getConfirmedTransactionHandler'; -import GetTransactionsByChannelIdHandler from './request/getTransactionsByChannelId'; -import PersistTransactionHandler from './request/persistTransaction'; -import GetChannelsByTopicHandler from './request/getChannelsByTopic'; -import GetStatusHandler from './request/getStatus'; -import IpfsAddHandler from './request/ipfsAdd'; - -// eslint-disable-next-line @typescript-eslint/no-var-requires -const packageJson = require('../package.json'); - -export const NOT_FOUND_MESSAGE = - 'Not found\nAvailable endpoints:\n/POST persistTransaction\n/GET getTransactionsByChannelId\n/GET getChannelsByTopic\n/POST /ipfsAdd\nGET getConfirmedTransaction\nGET status'; - -export const NOT_INITIALIZED_MESSAGE = 'The node is not initialized'; - -export const REQUEST_NODE_VERSION_HEADER = 'X-Request-Network-Node-Version'; - -/** - * Main class for request node express server - * This class defines routes to handle requests from client - */ - -export class RequestNodeBase { - /** - * DataAccess layer of the protocol - * This attribute is left public for mocking purpose - */ - protected dataAccess: DataAccessTypes.IDataAccess; - - private express: express.Application; - private initialized: boolean; - private logger: LogTypes.ILogger; - private persistTransactionHandler: PersistTransactionHandler; - private confirmedTransactionStore: ConfirmedTransactionStore; - private requestNodeVersion: string; - - private getTransactionsByChannelIdHandler: GetTransactionsByChannelIdHandler; - private getConfirmedTransactionHandler: GetConfirmedTransactionHandler; - private getChannelByTopicHandler: GetChannelsByTopicHandler; - private getStatusHandler: GetStatusHandler; - private ipfsAddHandler: IpfsAddHandler; - /** - * Request Node constructor - * - * @param [logger] The logger instance - */ - constructor( - dataAccess: DataAccessTypes.IDataAccess, - ipfsStorage: StorageTypes.IIpfsStorage, - store?: Store, - logger?: LogTypes.ILogger, - ) { - this.initialized = false; - - this.logger = logger || new SimpleLogger(); - this.dataAccess = dataAccess; - - this.confirmedTransactionStore = new ConfirmedTransactionStore(store); - this.getConfirmedTransactionHandler = new GetConfirmedTransactionHandler( - this.logger, - this.confirmedTransactionStore, - ); - this.getTransactionsByChannelIdHandler = new GetTransactionsByChannelIdHandler( - this.logger, - this.dataAccess, - ); - this.getChannelByTopicHandler = new GetChannelsByTopicHandler(this.logger, this.dataAccess); - this.getStatusHandler = new GetStatusHandler(this.logger, this.dataAccess); - this.ipfsAddHandler = new IpfsAddHandler(this.logger, ipfsStorage); - this.persistTransactionHandler = new PersistTransactionHandler( - this.confirmedTransactionStore, - this.dataAccess, - this.logger, - ); - - this.express = express(); - this.mountRoutes(); - - // Get the version of the Request Node for the request's response header - this.requestNodeVersion = packageJson.version; - } - - /** - * Initialize data access layer - * This function must be called before listening for requests - * because the data-access layer must be synchronized - * with the current state of the storage smart contract - */ - public async initialize(): Promise { - this.logger.info('Node initialization'); - const initializationStartTime: number = Date.now(); - - try { - await this.dataAccess.initialize(); - } catch (error) { - this.logger.error(`Node failed to initialize`); - throw error; - } - - this.initialized = true; - - this.logger.info('Node initialized'); - - const initializationEndTime: number = Date.now(); - - this.logger.info( - // eslint-disable-next-line no-magic-numbers - `Time to initialize: ${(initializationEndTime - initializationStartTime) / 1000}s`, - ['metric', 'initialization'], - ); - } - - async close(): Promise { - await this.dataAccess.close(); - } - - /** - * Listen for requests - * - * @param port Port used for listening on the server - * @param callback Callback called before listening for request - * @returns Object of the listening server - */ - public listen(port: number | string, callback: () => number): Server { - return this.express.listen(port, callback); - } - - // Defines handlers for necessary routes - private mountRoutes(): void { - const router = express.Router(); - - // Enable all CORS requests - this.express.use(cors()); - - const customHeaders = getCustomHeaders(); - if (customHeaders) { - this.express.use(this.customHeadersMiddelware(customHeaders)); - } - - // Set the Request Node version to the header - this.express.use((_, res, next) => { - res.header(REQUEST_NODE_VERSION_HEADER, this.requestNodeVersion); - next(); - }); - - // Supported encodings - this.express.use(express.json()); - this.express.use(express.urlencoded({ extended: true })); - - router.get('/healthz', (_, res) => res.status(StatusCodes.OK).send('OK')); - router.use(this.initializedMiddelware()); - router.get('/readyz', (_, res) => res.status(StatusCodes.OK).send('OK')); - router.get('/status', this.getStatusHandler.handler); - router.post('/ipfsAdd', this.ipfsAddHandler.handler); - router.post('/persistTransaction', this.persistTransactionHandler.handler); - router.get('/getConfirmedTransaction', this.getConfirmedTransactionHandler.handler); - router.get('/getTransactionsByChannelId', this.getTransactionsByChannelIdHandler.handler); - router.get('/getChannelsByTopic', this.getChannelByTopicHandler.handler); - this.express.use('/', router); - - // Any other route returns error 404 - this.express.use((_clientRequest, serverResponse) => { - serverResponse.status(StatusCodes.NOT_FOUND).send(NOT_FOUND_MESSAGE); - }); - } - - /** - * Middleware to send custom header on every response - */ - private customHeadersMiddelware(customHeaders: Record) { - return (_: Request, res: Response, next: NextFunction) => { - Object.entries(customHeaders).forEach(([key, value]: [string, string]) => - res.header(key, value), - ); - next(); - }; - } - - /** - * Middleware to refuse traffic if node is not initialized yet - */ - private initializedMiddelware() { - return (_: Request, res: Response, next: NextFunction) => { - if (!this.initialized) { - res.status(StatusCodes.SERVICE_UNAVAILABLE).send(NOT_INITIALIZED_MESSAGE); - } else { - next(); - } - }; - } -} diff --git a/packages/request-node/src/server.ts b/packages/request-node/src/server.ts index 7d9a7a4cef..8d11b53080 100755 --- a/packages/request-node/src/server.ts +++ b/packages/request-node/src/server.ts @@ -1,42 +1,29 @@ -#!/usr/bin/env node -import * as yargs from 'yargs'; -import { LogTypes } from '@requestnetwork/types'; import * as config from './config'; import { Logger } from './logger'; -import { RequestNode } from './requestNode'; import withShutdown from 'http-shutdown'; -import { TheGraphRequestNode } from './thegraph-node'; - -const argv = yargs.parseSync(); +import { RequestNode } from './requestNode'; +import { getDataAccess } from './dataAccess'; +import KeyvFile from 'keyv-file'; +import { getDataStorage } from './dataStorage'; -const logLevel = config.getLogLevel(); -const logMode = config.getLogMode(); // Initialize the node logger -const logger = new Logger(logLevel, logMode); - -const startNode = async (): Promise => { - const serverMessage = `Using config: - Ethereum network id: ${config.getStorageNetworkId()} - Log Level: ${LogTypes.LogLevel[logLevel]} - Log Mode: ${logMode} - Web3 provider url: ${config.getStorageWeb3ProviderUrl()} - TheGraph url: ${config.getGraphNodeUrl()} - IPFS host: ${config.getIpfsHost()} - IPFS port: ${config.getIpfsPort()} - IPFS protocol: ${config.getIpfsProtocol()} - IPFS timeout: ${config.getIpfsTimeout()} - Storage concurrency: ${config.getStorageConcurrency()} - Initialization storage path: ${config.getInitializationStorageFilePath()} - Storage block confirmations: ${config.getBlockConfirmations()} -`; - - logger.info(serverMessage); +const logger = new Logger(config.getLogLevel(), config.getLogMode()); + +export const getRequestNode = (): RequestNode => { + const initializationStoragePath = config.getInitializationStorageFilePath(); + const store = initializationStoragePath + ? new KeyvFile({ + filename: initializationStoragePath, + }) + : undefined; + const storage = getDataStorage(logger); + const dataAccess = getDataAccess(storage, logger); + return new RequestNode(dataAccess, storage, store, logger); +}; +export const startNode = async (): Promise => { const port = config.getServerPort(); - const graphNodeUrl = config.getGraphNodeUrl(); - const requestNode = graphNodeUrl - ? new TheGraphRequestNode(graphNodeUrl, logger) - : new RequestNode(logger); + const requestNode = getRequestNode(); const server = withShutdown( requestNode.listen(port, () => { logger.info(`Listening on port ${port}`); @@ -54,15 +41,3 @@ const startNode = async (): Promise => { await requestNode.initialize(); }; - -// If -h option is used, commands are printed -// Otherwise the node is started -if (argv.h) { - /* eslint-disable no-console */ - console.log(config.getHelpMessage()); -} else { - startNode().catch((error) => { - logger.error(error); - process.exit(1); - }); -} diff --git a/packages/request-node/src/storageUtils.ts b/packages/request-node/src/storageUtils.ts deleted file mode 100644 index f06a95cbf1..0000000000 --- a/packages/request-node/src/storageUtils.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { EthereumStorage, IpfsStorage } from '@requestnetwork/ethereum-storage'; -import { LogTypes, StorageTypes } from '@requestnetwork/types'; -import * as config from './config'; - -import Keyv from 'keyv'; -import KeyvFile from 'keyv-file'; - -import Web3WsProvider from 'web3-providers-ws'; -import HDWalletProvider from '@truffle/hdwallet-provider'; - -export function getIpfsStorage(logger?: LogTypes.ILogger): StorageTypes.IIpfsStorage { - return new IpfsStorage({ ipfsGatewayConnection: config.getIpfsConfiguration(), logger }); -} - -/** - * Get the ethereum storage with values from config - * @param mnemonic: mnemonic for the web3 wallet - * @param logger: logger object for the logs - * @param metadataStore a Keyv store to persist the metadata in ethereumMetadataCache - * @returns ethereum storage object - */ -export function getEthereumStorage( - mnemonic: string, - ipfsStorage: StorageTypes.IIpfsStorage, - logger?: LogTypes.ILogger, - metadataStore?: KeyvFile, -): EthereumStorage { - // Initializes web3 connection object - let provider: HDWalletProvider; - if (config.getStorageWeb3ProviderUrl().match('^wss?://.+')) { - provider = new HDWalletProvider({ - mnemonic, - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - providerOrUrl: new Web3WsProvider(config.getStorageWeb3ProviderUrl(), { - clientConfig: { - keepalive: true, - keepaliveInterval: 10000, // ms - }, - // Enable auto reconnection - reconnect: { - auto: true, - delay: 3000, // ms - maxAttempts: 5, - onTimeout: false, - }, - }), - }); - } else { - provider = new HDWalletProvider(mnemonic, config.getStorageWeb3ProviderUrl()); - } - - const web3Connection: StorageTypes.IWeb3Connection = { - networkId: config.getStorageNetworkId(), - web3Provider: provider, - }; - - const store = new Keyv({ - namespace: 'EthereumStorage', - store: metadataStore, - }); - - return new EthereumStorage( - config.getServerExternalUrl(), - ipfsStorage, - web3Connection, - { - getLastBlockNumberDelay: config.getLastBlockNumberDelay(), - logger, - maxConcurrency: config.getStorageConcurrency(), - retryDelay: config.getEthereumRetryDelay(), - gasPriceMin: config.getGasPriceMin(), - }, - store, - ); -} diff --git a/packages/request-node/src/thegraph-node.ts b/packages/request-node/src/thegraph-node.ts deleted file mode 100644 index 55be709ce4..0000000000 --- a/packages/request-node/src/thegraph-node.ts +++ /dev/null @@ -1,56 +0,0 @@ -import KeyvFile from 'keyv-file'; -import { providers, Wallet } from 'ethers'; -import { NonceManager } from '@ethersproject/experimental'; -import { LogTypes } from '@requestnetwork/types'; - -import { RequestNodeBase } from './requestNodeBase'; -import * as config from './config'; -import { getIpfsStorage } from './storageUtils'; -import { TheGraphDataAccess } from '@requestnetwork/thegraph-data-access'; -import { - EthereumStorageEthers, - getEthereumStorageNetworkNameFromId, -} from '@requestnetwork/ethereum-storage'; -import { SimpleLogger } from '@requestnetwork/utils'; - -export class TheGraphRequestNode extends RequestNodeBase { - constructor(url: string, logger?: LogTypes.ILogger) { - const initializationStoragePath = config.getInitializationStorageFilePath(); - logger = logger || new SimpleLogger(); - - const store = initializationStoragePath - ? new KeyvFile({ - filename: initializationStoragePath, - }) - : undefined; - - const network = getEthereumStorageNetworkNameFromId(config.getStorageNetworkId()); - if (!network) { - throw new Error(`Storage network not supported: ${config.getStorageNetworkId()}`); - } - - const wallet = Wallet.fromMnemonic(config.getMnemonic()).connect( - new providers.StaticJsonRpcProvider(config.getStorageWeb3ProviderUrl()), - ); - const signer = new NonceManager(wallet); - const ipfsStorage = getIpfsStorage(logger); - const gasPriceMin = config.getGasPriceMin(); - const blockConfirmations = config.getBlockConfirmations(); - const storage = new EthereumStorageEthers({ - ipfsStorage, - signer, - network, - logger, - gasPriceMin, - blockConfirmations, - }); - const dataAccess = new TheGraphDataAccess({ - graphql: { url }, - storage, - network, - logger, - }); - - super(dataAccess, ipfsStorage, store, logger); - } -} diff --git a/packages/request-node/test/getChannelsByTopic.test.ts b/packages/request-node/test/getChannelsByTopic.test.ts index 4528dacfed..78dfc67e71 100644 --- a/packages/request-node/test/getChannelsByTopic.test.ts +++ b/packages/request-node/test/getChannelsByTopic.test.ts @@ -1,15 +1,18 @@ import { StatusCodes } from 'http-status-codes'; +import { getRequestNode } from '../src/server'; import request from 'supertest'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; +import { normalizeKeccak256Hash } from '@requestnetwork/utils'; +import { providers } from 'ethers'; -const channelId = '01aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; -const anotherChannelId = '01bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'; -const commonTopic = ['01cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc']; -const topics = ['01dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'].concat( - commonTopic, -); -const otherTopics = ['01eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee'].concat( +// enable re-running these tests on local environment by having a different channel ID each time. +const time = Date.now(); +const channelId = `01aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa${time}`; +const anotherChannelId = `01bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb${time}`; + +const commonTopic = [`01ccccccccccccccccccccccccccccccccccccccccccccccccccc${time}`]; +const topics = [`01ddddddddddddddddddddddddddddddddddddddddddddddddddd${time}`].concat(commonTopic); +const otherTopics = [`01eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee${time}`].concat( commonTopic, ); const nonExistentTopic = '010000000000000000000000000000000000000000000000000000000000000000'; @@ -20,14 +23,14 @@ const otherTransactionData = { data: 'this is other sample data for a transaction to test getChannelsByTopic', }; -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('getChannelsByTopic', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; }); @@ -54,9 +57,10 @@ describe('getChannelsByTopic', () => { .set('Accept', 'application/json') .expect(StatusCodes.OK); - expect(Object.keys(serverResponse.body.result.transactions[channelId])).toHaveLength(1); - expect(serverResponse.body.result.transactions[channelId][0].transaction).toEqual( - transactionData, + expect(serverResponse.body.result.transactions).toMatchObject( + expect.objectContaining({ + [channelId]: [expect.objectContaining({ transaction: transactionData })], + }), ); await request(server) @@ -74,9 +78,11 @@ describe('getChannelsByTopic', () => { .query({ topic: otherTopics[0] }) .set('Accept', 'application/json') .expect(StatusCodes.OK); - expect(Object.keys(serverResponse.body.result.transactions[anotherChannelId])).toHaveLength(1); - expect(serverResponse.body.result.transactions[anotherChannelId][0].transaction).toEqual( - otherTransactionData, + + expect(serverResponse.body.result.transactions).toMatchObject( + expect.objectContaining({ + [anotherChannelId]: [expect.objectContaining({ transaction: otherTransactionData })], + }), ); // If we search for the common topic, there should be two transaction @@ -86,9 +92,32 @@ describe('getChannelsByTopic', () => { .set('Accept', 'application/json') .expect(StatusCodes.OK); - expect(Object.keys(serverResponse.body.result.transactions[channelId])).toHaveLength(1); - expect(Object.keys(serverResponse.body.result.transactions[anotherChannelId])).toHaveLength(1); - }); + expect(serverResponse.body.result.transactions).toMatchObject( + expect.objectContaining({ + [channelId]: [expect.objectContaining({ transaction: transactionData })], + [anotherChannelId]: [expect.objectContaining({ transaction: otherTransactionData })], + }), + ); + + // confirm the transactions for clean shutdown + const provider = new providers.JsonRpcProvider(); + const confirm = (txData: unknown) => { + const transactionHash = normalizeKeccak256Hash(txData).value; + return new Promise((r) => { + const i = setInterval(async () => { + await provider.send('evm_mine', []); + const res = await request(server) + .get('/getConfirmedTransaction') + .query({ transactionHash }); + if (res.status === 200) { + clearInterval(i); + return r(); + } + }, 200); + }); + }; + await Promise.all([confirm(transactionData), confirm(otherTransactionData)]); + }, 10000); it('responds with no transaction to requests with a non-existent topic', async () => { const serverResponse = await request(server) diff --git a/packages/request-node/test/getConfirmedTransaction.test.ts b/packages/request-node/test/getConfirmedTransaction.test.ts index 4cd49ecd5b..6e3409e6e7 100644 --- a/packages/request-node/test/getConfirmedTransaction.test.ts +++ b/packages/request-node/test/getConfirmedTransaction.test.ts @@ -1,22 +1,24 @@ import { normalizeKeccak256Hash } from '@requestnetwork/utils'; import { StatusCodes } from 'http-status-codes'; import request from 'supertest'; +import { getRequestNode } from '../src/server'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; +import { providers } from 'ethers'; const channelId = '010aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; const transactionData = { data: 'this is sample data for a transaction' }; const transactionHash = normalizeKeccak256Hash(transactionData).value; +const provider = new providers.JsonRpcProvider('http://localhost:8545'); -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('getConfirmedTransaction', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; @@ -40,6 +42,9 @@ describe('getConfirmedTransaction', () => { .set('Accept', 'application/json') .expect(StatusCodes.NOT_FOUND); + // mining is required for TheGraph to index data + await provider.send('evm_mine', []); + let serverResponse: request.Response | undefined; // retry mechanism to account for ganache delay for (let i = 0; i < 10; i++) { @@ -60,7 +65,7 @@ describe('getConfirmedTransaction', () => { expect(serverResponse!.body.result).toMatchObject({}); // 'getConfirmedTransaction request meta' expect(serverResponse!.body.meta.storageMeta.state).toBe('confirmed'); - }, 11000); + }, 30000); it('responds with status 422 to requests with no value', async () => { await request(server) diff --git a/packages/request-node/test/getTransactionsByChannelId.test.ts b/packages/request-node/test/getTransactionsByChannelId.test.ts index 9c9dd1a314..cdefe8e8fa 100644 --- a/packages/request-node/test/getTransactionsByChannelId.test.ts +++ b/packages/request-node/test/getTransactionsByChannelId.test.ts @@ -1,7 +1,7 @@ import { StatusCodes } from 'http-status-codes'; import request from 'supertest'; +import { getRequestNode } from '../src/server'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; const channelId = '01aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab'; const anotherChannelId = '01bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbc'; @@ -13,14 +13,14 @@ const otherTransactionData = { data: 'this is other sample data for a transaction to test getTransactionsByChannelId', }; -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('getTransactionsByChannelId', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; }); diff --git a/packages/request-node/test/ipfsAdd.test.ts b/packages/request-node/test/ipfsAdd.test.ts index b3badddda6..76981aacdc 100644 --- a/packages/request-node/test/ipfsAdd.test.ts +++ b/packages/request-node/test/ipfsAdd.test.ts @@ -1,16 +1,16 @@ import { StatusCodes } from 'http-status-codes'; import request from 'supertest'; +import { getRequestNode } from '../src/server'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('ipfsAdd', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; diff --git a/packages/request-node/test/persistTransaction.test.ts b/packages/request-node/test/persistTransaction.test.ts index 2f0db9196b..afa8c7a563 100644 --- a/packages/request-node/test/persistTransaction.test.ts +++ b/packages/request-node/test/persistTransaction.test.ts @@ -1,7 +1,10 @@ -import { StatusCodes } from 'http-status-codes'; +import axios from 'axios'; import request from 'supertest'; +import { StatusCodes } from 'http-status-codes'; +import MockAdapter from 'axios-mock-adapter'; + +import { getRequestNode } from '../src/server'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; const channelId = '010aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; const anotherChannelId = '010bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'; @@ -13,14 +16,18 @@ const transactionData = { data: 'this is sample data for a transaction' }; const anotherTransactionData = { data: 'you can put any data' }; const badlyFormattedTransactionData = { not: 'a transaction' }; -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; +const axiosMock = new MockAdapter(axios); + /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('persistTransaction', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + axiosMock.onAny().passThrough(); + + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; @@ -29,6 +36,8 @@ describe('persistTransaction', () => { afterAll(async () => { await requestNodeInstance.close(); server.close(); + jest.restoreAllMocks(); + axiosMock.reset(); }); it('responds with status 200 to requests with correct values', async () => { @@ -38,7 +47,7 @@ describe('persistTransaction', () => { .set('Accept', 'application/json') .expect(StatusCodes.OK); - expect(serverResponse.body.result).toMatchObject({}); + expect(serverResponse.body).toMatchObject({ result: {} }); // topics parameter should be optional serverResponse = await request(server) @@ -68,4 +77,21 @@ describe('persistTransaction', () => { .set('Accept', 'application/json') .expect(StatusCodes.INTERNAL_SERVER_ERROR); }); + + it('should catch IPFS timeout error', async () => { + axiosMock.reset(); + axiosMock.onAny().timeout(); + const assertionsNb = 10; + const assertions = []; + for (let i = 0; i < assertionsNb; i++) { + assertions.push( + request(server) + .post('/persistTransaction') + .send({ channelId, topics, transactionData }) + .set('Accept', 'application/json') + .expect(StatusCodes.INTERNAL_SERVER_ERROR), + ); + } + await Promise.all(assertions); + }); }); diff --git a/packages/request-node/test/requestNode.test.ts b/packages/request-node/test/requestNode.test.ts index 3856d69f0d..b3f29c6d52 100644 --- a/packages/request-node/test/requestNode.test.ts +++ b/packages/request-node/test/requestNode.test.ts @@ -1,7 +1,7 @@ import { StatusCodes } from 'http-status-codes'; import request from 'supertest'; +import { getRequestNode } from '../src/server'; import { RequestNode } from '../src/requestNode'; -import { RequestNodeBase } from '../src/requestNodeBase'; const packageJson = require('../package.json'); const requestNodeVersion = packageJson.version; @@ -10,14 +10,14 @@ const dataAccessInitializeFailureMock = async (): Promise => { throw Error('This mock function always fails'); }; -let requestNodeInstance: RequestNodeBase; +let requestNodeInstance: RequestNode; let server: any; /* eslint-disable no-magic-numbers */ /* eslint-disable @typescript-eslint/no-unused-expressions */ describe('requestNode server', () => { beforeAll(async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); await requestNodeInstance.initialize(); server = (requestNodeInstance as any).express; @@ -41,14 +41,14 @@ describe('requestNode server', () => { }); it('responds with status 503 to readyness check requests when not ready', async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); server = (requestNodeInstance as any).express; await request(server).get('/readyz').expect(StatusCodes.SERVICE_UNAVAILABLE); }); it('responds with status 503 if server is uninitialized', async () => { // Import directly requestNode to create a server where we don't call requestNodeInstance.initialize() - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); const notInitializedServer = (requestNodeInstance as any).express; await request(notInitializedServer) @@ -63,7 +63,7 @@ describe('requestNode server', () => { }); it('initialization failure should throw an error', async () => { - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); jest .spyOn((requestNodeInstance as any).dataAccess, 'initialize') .mockImplementation(dataAccessInitializeFailureMock); @@ -71,18 +71,9 @@ describe('requestNode server', () => { await expect(requestNodeInstance.initialize()).rejects.toThrowError(Error); }); - it('serves custom headers', async () => { - // Import directly requestNode to create a server - process.env.HEADERS = '{"x-custom-test-header": "test-passed"}'; - requestNodeInstance = new RequestNode(); - server = (requestNodeInstance as any).express; - - await request(server).post('/').expect('x-custom-test-header', 'test-passed'); - }); - it('the response header contains the Request Node version', async () => { // Import directly requestNode to create a server - requestNodeInstance = new RequestNode(); + requestNodeInstance = getRequestNode(); server = (requestNodeInstance as any).express; await request(server).post('/').expect('X-Request-Network-Node-Version', requestNodeVersion); @@ -92,7 +83,7 @@ describe('requestNode server', () => { process.env.ETHEREUM_NETWORK_ID = '4'; // 'must throw' - expect(() => new RequestNode()).toThrowError( + expect(() => getRequestNode()).toThrowError( 'the environment variable MNEMONIC must be set up. The default mnemonic is only for private network.', ); }); diff --git a/packages/request-node/test/thegraph/persistTransaction.test.ts b/packages/request-node/test/thegraph/persistTransaction.test.ts deleted file mode 100644 index 8df2ce7f6b..0000000000 --- a/packages/request-node/test/thegraph/persistTransaction.test.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { StatusCodes } from 'http-status-codes'; -import request from 'supertest'; -import axios from 'axios'; -import MockAdapter from 'axios-mock-adapter'; -import { RequestNodeBase } from '../../src/requestNodeBase'; -import { TheGraphRequestNode } from '../../src/thegraph-node'; -import * as core from 'express-serve-static-core'; - -const subgraphUrl = 'http://localhost:8000/subgraphs/name/RequestNetwork/request-storage'; -const channelId = '010aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; -const topics = [ - '010ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - '010ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd', -]; -const transactionData = { data: 'this is sample data for a transaction' }; - -let requestNodeInstance: RequestNodeBase; -let server: core.Express; - -const axiosMock = new MockAdapter(axios); - -describe('persistTransaction', () => { - beforeAll(async () => { - axiosMock.onAny().passThrough(); - requestNodeInstance = new TheGraphRequestNode(subgraphUrl); - await requestNodeInstance.initialize(); - server = (requestNodeInstance as any).express; - }); - - afterAll(async () => { - await requestNodeInstance.close(); - jest.restoreAllMocks(); - axiosMock.reset(); - }); - - it('responds with status 200 to requests with correct values', async () => { - let serverResponse = await request(server) - .post('/persistTransaction') - .send({ channelId, topics, transactionData }) - .set('Accept', 'application/json') - .expect(StatusCodes.OK); - expect(serverResponse.body.result).toMatchObject({}); - }); - - it('should catch IPFS timeout error', async () => { - axiosMock.reset(); - axiosMock.onAny().timeout(); - const assertionsNb = 10; - const assertions = []; - for (let i = 0; i < assertionsNb; i++) { - assertions.push( - request(server) - .post('/persistTransaction') - .send({ channelId, topics, transactionData }) - .set('Accept', 'application/json') - .expect(StatusCodes.INTERNAL_SERVER_ERROR), - ); - } - await Promise.all(assertions); - }); -}); diff --git a/packages/smart-contracts/package.json b/packages/smart-contracts/package.json index c544bba8cc..561a5ee07b 100644 --- a/packages/smart-contracts/package.json +++ b/packages/smart-contracts/package.json @@ -59,7 +59,6 @@ "@nomiclabs/hardhat-waffle": "2.0.1", "@nomiclabs/hardhat-web3": "2.0.0", "@openzeppelin/contracts": "4.4.2", - "@openzeppelin/test-helpers": "0.5.6", "@rainbow-me/fee-suggestions": "2.1.0", "@requestnetwork/currency": "0.10.0", "@requestnetwork/types": "0.37.0", diff --git a/packages/smart-contracts/scripts/conversion-proxy.ts b/packages/smart-contracts/scripts/conversion-proxy.ts index 682cdd151e..287cd12647 100644 --- a/packages/smart-contracts/scripts/conversion-proxy.ts +++ b/packages/smart-contracts/scripts/conversion-proxy.ts @@ -1,7 +1,7 @@ import { HardhatRuntimeEnvironment } from 'hardhat/types'; // eslint-disable-next-line // @ts-ignore Cannot find module -import { Erc20ConversionProxy } from '../src/types/Erc20ConversionProxy'; +import { Erc20ConversionProxy } from '../src/types'; import { erc20ConversionProxy as erc20ConversionProxyArtifact, ethConversionArtifact, diff --git a/packages/smart-contracts/scripts/deploy-payments.ts b/packages/smart-contracts/scripts/deploy-payments.ts index d760243b1c..0b12f7a15d 100644 --- a/packages/smart-contracts/scripts/deploy-payments.ts +++ b/packages/smart-contracts/scripts/deploy-payments.ts @@ -10,10 +10,10 @@ import { uniswapV2RouterAddresses, jumpToNonce } from './utils'; import { Contract } from 'ethers'; // eslint-disable-next-line // @ts-ignore Cannot find module -import { ChainlinkConversionPath } from '../src/types/ChainlinkConversionPath'; +import { ChainlinkConversionPath } from '../src/types'; // eslint-disable-next-line // @ts-ignore Cannot find module -import { EthConversionProxy } from '../src/types/EthConversionProxy'; +import { EthConversionProxy } from '../src/types'; import { CurrencyManager } from '@requestnetwork/currency'; import { RequestLogicTypes } from '@requestnetwork/types'; import { HardhatRuntimeEnvironmentExtended } from '../scripts-create2/types'; diff --git a/packages/smart-contracts/scripts/erc20-swap-to-conversion.ts b/packages/smart-contracts/scripts/erc20-swap-to-conversion.ts index d9dc36afbe..3da11879ae 100644 --- a/packages/smart-contracts/scripts/erc20-swap-to-conversion.ts +++ b/packages/smart-contracts/scripts/erc20-swap-to-conversion.ts @@ -1,7 +1,7 @@ import { HardhatRuntimeEnvironment } from 'hardhat/types'; // eslint-disable-next-line // @ts-ignore Cannot find module -import { ERC20SwapToConversion } from '../src/types/ERC20SwapToConversion'; +import { ERC20SwapToConversion } from '../src/types'; import { erc20SwapConversionArtifact } from '../src/lib'; import { DeploymentResult, deployOne } from './deploy-one'; import { uniswapV2RouterAddresses } from './utils'; diff --git a/packages/smart-contracts/src/lib/artifacts/BatchConversionPayments/index.ts b/packages/smart-contracts/src/lib/artifacts/BatchConversionPayments/index.ts index 77c67a449a..1dc0a289f4 100644 --- a/packages/smart-contracts/src/lib/artifacts/BatchConversionPayments/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/BatchConversionPayments/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { BatchConversionPayments } from '../../../types/BatchConversionPayments'; +import type { BatchConversionPayments } from '../../../types'; export const batchConversionPaymentsArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/BatchNoConversionPayments/index.ts b/packages/smart-contracts/src/lib/artifacts/BatchNoConversionPayments/index.ts index 6737cd2abb..b9dd636d46 100644 --- a/packages/smart-contracts/src/lib/artifacts/BatchNoConversionPayments/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/BatchNoConversionPayments/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { BatchNoConversionPayments } from '../../../types/BatchNoConversionPayments'; +import type { BatchNoConversionPayments } from '../../../types'; export const batchNoConversionPaymentsArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/BatchPayments/index.ts b/packages/smart-contracts/src/lib/artifacts/BatchPayments/index.ts index 352f758539..055e7e5ebd 100644 --- a/packages/smart-contracts/src/lib/artifacts/BatchPayments/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/BatchPayments/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { BatchPayments } from '../../../types/BatchPayments'; +import type { BatchPayments } from '../../../types'; export const batchPaymentsArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ChainlinkConversionPath/index.ts b/packages/smart-contracts/src/lib/artifacts/ChainlinkConversionPath/index.ts index 9e9be55406..ea2f6e6730 100644 --- a/packages/smart-contracts/src/lib/artifacts/ChainlinkConversionPath/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ChainlinkConversionPath/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_2_0 } from './0.2.0.json'; // @ts-ignore Cannot find module -import type { ChainlinkConversionPath } from '../../../types/ChainlinkConversionPath'; +import type { ChainlinkConversionPath } from '../../../types'; export const chainlinkConversionPath = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ERC20EscrowToPay/index.ts b/packages/smart-contracts/src/lib/artifacts/ERC20EscrowToPay/index.ts index cfecfdf31f..4069ecc30b 100644 --- a/packages/smart-contracts/src/lib/artifacts/ERC20EscrowToPay/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ERC20EscrowToPay/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { ERC20EscrowToPay } from '../../../types/ERC2EscrowToPay'; +import type { ERC20EscrowToPay } from '../../../types'; export const erc20EscrowToPayArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ERC20FeeProxy/index.ts b/packages/smart-contracts/src/lib/artifacts/ERC20FeeProxy/index.ts index 9b0653e0a3..8c23cbafa0 100644 --- a/packages/smart-contracts/src/lib/artifacts/ERC20FeeProxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ERC20FeeProxy/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { ERC20FeeProxy } from '../../../types/ERC20FeeProxy'; +import type { ERC20FeeProxy } from '../../../types'; export const erc20FeeProxyArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ERC20Proxy/index.ts b/packages/smart-contracts/src/lib/artifacts/ERC20Proxy/index.ts index 160cf664ab..5398f2c207 100644 --- a/packages/smart-contracts/src/lib/artifacts/ERC20Proxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ERC20Proxy/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { ERC20Proxy } from '../../../types/ERC20Proxy'; +import type { ERC20Proxy } from '../../../types'; export const erc20ProxyArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ERC20SwapToPay/index.ts b/packages/smart-contracts/src/lib/artifacts/ERC20SwapToPay/index.ts index fba9aa99db..d924259bee 100644 --- a/packages/smart-contracts/src/lib/artifacts/ERC20SwapToPay/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ERC20SwapToPay/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_2_0 } from './0.2.0.json'; // @ts-ignore Cannot find module -import type { ERC20SwapToPay } from '../../../types/ERC20SwapToPay'; +import type { ERC20SwapToPay } from '../../../types'; export const erc20SwapToPayArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/ERC20TransferableReceivable/index.ts b/packages/smart-contracts/src/lib/artifacts/ERC20TransferableReceivable/index.ts index 73fdd87f88..ea9cc032ff 100644 --- a/packages/smart-contracts/src/lib/artifacts/ERC20TransferableReceivable/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/ERC20TransferableReceivable/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_2_0 } from './0.2.0.json'; // @ts-ignore Cannot find module -import type { ERC20TransferableReceivable } from '../../../types/ERC20TransferableReceivable'; +import type { ERC20TransferableReceivable } from '../../../types'; export const erc20TransferableReceivableArtifact = new ContractArtifact( diff --git a/packages/smart-contracts/src/lib/artifacts/Erc20ConversionProxy/index.ts b/packages/smart-contracts/src/lib/artifacts/Erc20ConversionProxy/index.ts index bb0970fbe7..e9799f017d 100644 --- a/packages/smart-contracts/src/lib/artifacts/Erc20ConversionProxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/Erc20ConversionProxy/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_1_1 } from './0.1.1.json'; // @ts-ignore Cannot find module -import type { Erc20ConversionProxy } from '../../../types/Erc20ConversionProxy'; +import type { Erc20ConversionProxy } from '../../../types'; export const erc20ConversionProxy = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/Erc20SwapConversion/index.ts b/packages/smart-contracts/src/lib/artifacts/Erc20SwapConversion/index.ts index c66cc21331..8552e551ad 100644 --- a/packages/smart-contracts/src/lib/artifacts/Erc20SwapConversion/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/Erc20SwapConversion/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_2_0 } from './0.2.0.json'; // @ts-ignore Cannot find module -import type { ERC20SwapToConversion } from '../../../types/ERC20SwapToConversion'; +import type { ERC20SwapToConversion } from '../../../types'; export const erc20SwapConversionArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/EthConversionProxy/index.ts b/packages/smart-contracts/src/lib/artifacts/EthConversionProxy/index.ts index d7e32cc250..8c18e1e019 100644 --- a/packages/smart-contracts/src/lib/artifacts/EthConversionProxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/EthConversionProxy/index.ts @@ -3,7 +3,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; import { abi as ABI_0_2_0 } from './0.2.0.json'; // @ts-ignore Cannot find module -import type { EthConversionProxy } from '../../../types/EthConversionProxy'; +import type { EthConversionProxy } from '../../../types'; export const ethConversionArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/EthereumFeeProxy/index.ts b/packages/smart-contracts/src/lib/artifacts/EthereumFeeProxy/index.ts index 71c693fb66..41dd24a809 100644 --- a/packages/smart-contracts/src/lib/artifacts/EthereumFeeProxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/EthereumFeeProxy/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { EthereumFeeProxy } from '../../../types/EthereumFeeProxy'; +import type { EthereumFeeProxy } from '../../../types'; export const ethereumFeeProxyArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/EthereumProxy/index.ts b/packages/smart-contracts/src/lib/artifacts/EthereumProxy/index.ts index 9e31bd6db6..babdcf9829 100644 --- a/packages/smart-contracts/src/lib/artifacts/EthereumProxy/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/EthereumProxy/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { EthereumProxy } from '../../../types/EthereumProxy'; +import type { EthereumProxy } from '../../../types'; export const ethereumProxyArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/RequestDeployer/index.ts b/packages/smart-contracts/src/lib/artifacts/RequestDeployer/index.ts index e2a0fd2af4..c4c6471a9e 100644 --- a/packages/smart-contracts/src/lib/artifacts/RequestDeployer/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/RequestDeployer/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { RequestDeployer } from '../../../types/RequestDeployer'; +import type { RequestDeployer } from '../../../types'; export const requestDeployer = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/RequestHashStorage/index.ts b/packages/smart-contracts/src/lib/artifacts/RequestHashStorage/index.ts index d9b4ae963c..764bf99ee4 100644 --- a/packages/smart-contracts/src/lib/artifacts/RequestHashStorage/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/RequestHashStorage/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { RequestHashStorage } from '../../../types/RequestHashStorage'; +import type { RequestHashStorage } from '../../../types'; export const requestHashStorageArtifact = new ContractArtifact( { diff --git a/packages/smart-contracts/src/lib/artifacts/RequestHashSubmitter/index.ts b/packages/smart-contracts/src/lib/artifacts/RequestHashSubmitter/index.ts index 7bd15964d5..a2c9dca85d 100644 --- a/packages/smart-contracts/src/lib/artifacts/RequestHashSubmitter/index.ts +++ b/packages/smart-contracts/src/lib/artifacts/RequestHashSubmitter/index.ts @@ -2,7 +2,7 @@ import { ContractArtifact } from '../../ContractArtifact'; import { abi as ABI_0_1_0 } from './0.1.0.json'; // @ts-ignore Cannot find module -import type { RequestOpenHashSubmitter } from '../../../types/RequestOpenHashSubmitter'; +import type { RequestOpenHashSubmitter } from '../../../types'; export const requestHashSubmitterArtifact = new ContractArtifact( { diff --git a/packages/thegraph-data-access/package.json b/packages/thegraph-data-access/package.json index 7affee737a..f13f06840c 100644 --- a/packages/thegraph-data-access/package.json +++ b/packages/thegraph-data-access/package.json @@ -46,8 +46,7 @@ "@requestnetwork/utils": "0.37.0", "ethers": "5.5.1", "graphql-request": "3.4.0", - "tslib": "2.5.0", - "typed-emitter": "1.4.0" + "tslib": "2.5.0" }, "devDependencies": { "@types/jest": "26.0.13", diff --git a/packages/thegraph-data-access/src/NoopDataWrite.ts b/packages/thegraph-data-access/src/NoopDataWrite.ts new file mode 100644 index 0000000000..80e7847e5f --- /dev/null +++ b/packages/thegraph-data-access/src/NoopDataWrite.ts @@ -0,0 +1,17 @@ +import { DataAccessTypes } from '@requestnetwork/types'; + +export class NoopDataWrite implements DataAccessTypes.IDataWrite { + async initialize(): Promise { + // no-op + } + + async close(): Promise { + // no-op + } + + persistTransaction(): Promise { + throw new Error( + `cannot call persistTranscation without storage. Specify storage on TheGraphDataAccess`, + ); + } +} diff --git a/packages/thegraph-data-access/src/data-access.ts b/packages/thegraph-data-access/src/data-access.ts index 19bf9333ad..48ade60721 100644 --- a/packages/thegraph-data-access/src/data-access.ts +++ b/packages/thegraph-data-access/src/data-access.ts @@ -1,357 +1,85 @@ import { EventEmitter } from 'events'; -import TypedEmitter from 'typed-emitter'; -import { BigNumber } from 'ethers'; - -import { getCurrentTimestampInSecond, retry, SimpleLogger } from '@requestnetwork/utils'; -import { Block, CombinedDataAccess } from '@requestnetwork/data-access'; -import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; - -import { Transaction } from './queries'; import { SubgraphClient } from './subgraph-client'; -import { PendingStore } from './pending-store'; -import { RequestInit } from 'graphql-request/dist/types.dom'; - -type TheGraphDataAccessBaseOptions = { - network: string; - logger?: LogTypes.ILogger; - pendingStore?: PendingStore; -}; - -export type TheGraphDataAccessOptions = TheGraphDataAccessBaseOptions & { - graphql: { url: string } & RequestInit; - storage?: StorageTypes.IStorageWrite; -}; - -type DataAccessEventEmitter = TypedEmitter<{ - confirmed: (data: DataAccessTypes.IReturnPersistTransactionRaw) => void; - error: (error: unknown) => void; -}>; - -const getStorageMeta = ( - result: Transaction, - lastBlockNumber: number, - network: string, -): StorageTypes.IEntryMetadata => { - return { - ethereum: { - blockConfirmation: lastBlockNumber - result.blockNumber, - blockNumber: result.blockNumber, - blockTimestamp: result.blockTimestamp, - networkName: network, - smartContractAddress: result.smartContractAddress, - transactionHash: result.transactionHash, - }, - ipfs: { - size: BigNumber.from(result.size).toNumber(), - }, - state: StorageTypes.ContentState.CONFIRMED, - storageType: StorageTypes.StorageSystemType.ETHEREUM_IPFS, - timestamp: result.blockTimestamp, - }; -}; - -export class TheGraphDataRead implements DataAccessTypes.IDataRead { - private network: string; +import { CombinedDataAccess, DataAccessWrite, DataAccessRead } from '@requestnetwork/data-access'; +import { DataAccessTypes, LogTypes, StorageTypes } from '@requestnetwork/types'; +import { retry, SimpleLogger } from '@requestnetwork/utils'; +import { TheGraphDataAccessOptions } from './types'; +import { NoopDataWrite } from './NoopDataWrite'; + +/** + * A custom DataAccess to retrieve Request data from a TheGraph subgraph. + * If no `storage` is passed, the data access is read-only. + */ +export class TheGraphDataAccess extends CombinedDataAccess { + private logger: LogTypes.ILogger; + constructor({ graphql, storage, ...options }: TheGraphDataAccessOptions) { + const { url, ...rest } = graphql; + const graphqlClient = new SubgraphClient(url, rest); - private pendingStore?: PendingStore; + const reader = new DataAccessRead(graphqlClient, options); + const writer = storage + ? new DataAccessWrite(storage, options.pendingStore) + : new NoopDataWrite(); - constructor( - private readonly graphql: SubgraphClient, - { network, pendingStore }: TheGraphDataAccessBaseOptions, - ) { - this.network = network; - this.pendingStore = pendingStore; - } + super(reader, writer); - async initialize(): Promise { - await this.graphql.getBlockNumber(); - } + this.logger = options.logger || new SimpleLogger(); - close(): Promise { - return Promise.resolve(); + this.fetchConfirmedTransaction = retry(this.fetchConfirmedTransaction, { + context: this, + maxRetries: 30, + retryDelay: 1000, + }); } - async getTransactionsByChannelId( + /** intercept events so that confirmation is emitted only once the transaction is indexed */ + persistTransaction = async ( + transactionData: DataAccessTypes.ITransaction, channelId: string, - updatedBetween?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - const pending = await this.getPending(channelId); - - const result = await this.graphql.getTransactionsByChannelId(channelId, updatedBetween); - - return { - meta: { - transactionsStorageLocation: result.transactions - .map((x) => x.hash) - .concat(pending.meta.transactionsStorageLocation), - storageMeta: result.transactions.map((tx) => - getStorageMeta(tx, result._meta.block.number, this.network), - ), - }, - result: { - transactions: result.transactions - .map(this.getTimestampedTransaction) - .concat(pending.result.transactions), - }, - }; - } - - async getChannelsByTopic( - topic: string, - updatedBetween?: DataAccessTypes.ITimestampBoundaries | undefined, - ): Promise { - return this.getChannelsByMultipleTopics([topic], updatedBetween); - } - - async getChannelsByMultipleTopics( - topics: string[], - updatedBetween?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - const result = await this.graphql.getChannelsByTopics(topics); + topics?: string[] | undefined, + ): Promise => { + const eventEmitter = new EventEmitter() as DataAccessTypes.PersistTransactionEmitter; + const result = await this.writer.persistTransaction(transactionData, channelId, topics); + result.on('confirmed', (receipt) => { + this.fetchConfirmedTransaction(channelId, receipt) + .then((confirmedReceipt) => eventEmitter.emit('confirmed', confirmedReceipt)) + .catch(() => this.logger.warn(`Could not confirm channel ${channelId}`)); + }); + result.on('error', (e) => eventEmitter.emit('error', e)); + return Object.assign(eventEmitter, { meta: result.meta, result: result.result }); + }; - // list of channels having at least one tx updated during the updatedBetween boundaries - const channels = result.transactions - .filter( + /** + * We wait until the data is indexed on TheGraph + **/ + private async fetchConfirmedTransaction( + channelId: string, + { meta, result }: DataAccessTypes.IReturnPersistTransactionRaw, + ) { + const transactions = await this.reader.getTransactionsByChannelId(channelId); + if ( + transactions.result.transactions.some( (tx) => - tx.blockTimestamp >= (updatedBetween?.from || 0) && - tx.blockTimestamp <= (updatedBetween?.to || Number.MAX_SAFE_INTEGER), + tx.state === DataAccessTypes.TransactionState.PENDING && + // ignore transactions that were not included in the receipt being confirmed + tx.transaction.hash && + meta.transactionStorageLocation.includes(tx.transaction.hash), ) - .map((x) => x.channelId); - - const filteredTxs = result.transactions.filter((tx) => channels.includes(tx.channelId)); - return { - meta: { - storageMeta: filteredTxs.reduce((acc, tx) => { - acc[tx.channelId] = [getStorageMeta(tx, result._meta.block.number, this.network)]; - return acc; - }, {} as Record), - transactionsStorageLocation: filteredTxs.reduce((prev, curr) => { - if (!prev[curr.channelId]) { - prev[curr.channelId] = []; - } - prev[curr.channelId].push(curr.hash); - return prev; - }, {} as Record), - }, - result: { - transactions: filteredTxs.reduce((prev, curr) => { - if (!prev[curr.channelId]) { - prev[curr.channelId] = []; - } - prev[curr.channelId].push(this.getTimestampedTransaction(curr)); - return prev; - }, {} as DataAccessTypes.ITransactionsByChannelIds), - }, - }; - } - - private async getPending(channelId: string): Promise { - const emptyResult = { - meta: { - transactionsStorageLocation: [], - storageMeta: [], - }, - result: { - transactions: [], - }, - }; - const pending = this.pendingStore?.get(channelId); - if (!pending) { - return emptyResult; + ) { + throw new Error('not confirmed'); } - const { storageResult, transaction } = pending; - - const { transactions } = await this.graphql.getTransactionsByHash(storageResult.id); - - // if the pending tx is found, remove its state and fetch the real data - if (transactions.length > 0) { - this.pendingStore?.remove(channelId); - return emptyResult; - } - return { meta: { - transactionsStorageLocation: [storageResult.id], - storageMeta: [storageResult.meta], + ...meta, + storageMeta: meta.storageMeta + ? { + ...meta.storageMeta, + state: StorageTypes.ContentState.CONFIRMED, + } + : undefined, }, - result: { - transactions: [ - { - state: DataAccessTypes.TransactionState.PENDING, - timestamp: getCurrentTimestampInSecond(), - transaction, - }, - ], - }, - }; - } - - private getTimestampedTransaction( - transaction: Transaction, - ): DataAccessTypes.ITimestampedTransaction { - return { - state: DataAccessTypes.TransactionState.CONFIRMED, - timestamp: transaction.blockTimestamp, - transaction: { - data: transaction.data || undefined, - encryptedData: transaction.encryptedData || undefined, - encryptionMethod: transaction.encryptionMethod || undefined, - keys: transaction.publicKeys?.reduce( - (prev, curr, i) => ({ - ...prev, - [curr]: transaction.encryptedKeys?.[i], - }), - {}, - ), - }, - }; - } -} - -export class TheGraphDataWrite implements DataAccessTypes.IDataWrite { - private logger: LogTypes.ILogger; - private network: string; - private pendingStore?: PendingStore; - - constructor( - protected readonly storage: StorageTypes.IStorageWrite, - private readonly graphql: SubgraphClient, - { network, logger, pendingStore }: TheGraphDataAccessBaseOptions, - ) { - this.logger = logger || new SimpleLogger(); - this.network = network; - this.pendingStore = pendingStore; - } - - async initialize(): Promise { - await this.graphql.getBlockNumber(); - await this.storage.initialize(); - } - - close(): Promise { - return Promise.resolve(); - } - - async persistTransaction( - transaction: DataAccessTypes.ITransaction, - channelId: string, - topics?: string[] | undefined, - ): Promise { - const updatedBlock = Block.pushTransaction( - Block.createEmptyBlock(), - transaction, - channelId, - topics, - ); - - const storageResult = await this.storage.append(JSON.stringify(updatedBlock)); - - return this.createPersistTransactionResult(channelId, transaction, storageResult, topics || []); - } - - protected createPersistTransactionResult( - channelId: string, - transaction: DataAccessTypes.ITransaction, - storageResult: StorageTypes.IAppendResult, - topics: string[], - ): DataAccessTypes.IReturnPersistTransaction { - const eventEmitter = new EventEmitter() as DataAccessEventEmitter; - this.pendingStore?.add(channelId, transaction, storageResult); - - const result: DataAccessTypes.IReturnPersistTransactionRaw = { - meta: { - transactionStorageLocation: storageResult.id, - storageMeta: storageResult.meta, - topics, - }, - result: {}, - }; - - storageResult.on('confirmed', () => { - this.logger.debug(`Looking for ${storageResult.id} in subgraph`); - retry( - async () => { - const response = await this.graphql.getTransactionsByHash(storageResult.id); - if (response.transactions.length === 0) { - throw Error('no transactions'); - } - this.logger.debug(`Hash ${storageResult.id} found in subgraph.`); - return response; - }, - { maxRetries: 100, retryDelay: 1000 }, - )() - .then((response) => { - this.pendingStore?.remove(channelId); - eventEmitter.emit('confirmed', { - ...result, - meta: { - ...result.meta, - storageMeta: getStorageMeta( - response.transactions[0], - response._meta.block.number, - this.network, - ), - }, - }); - }) - .catch((error) => { - this.pendingStore?.remove(channelId); - eventEmitter.emit('error', error); - }); - }); - - return Object.assign(eventEmitter, result); - } -} - -class NoopDataWrite implements DataAccessTypes.IDataWrite { - async initialize(): Promise { - // no-op - } - - async close(): Promise { - // no-op - } - - persistTransaction(): Promise { - throw new Error( - `cannot call persistTranscation without storage. Specify storage on ${TheGraphDataAccess.name}`, - ); - } -} - -export class TheGraphDataAccess extends CombinedDataAccess { - private readonly graphql: SubgraphClient; - private readonly storage: StorageTypes.IStorageWrite | undefined; - - constructor({ graphql, storage, ...options }: TheGraphDataAccessOptions) { - const { url, ...rest } = graphql; - if (!options.pendingStore) { - options.pendingStore = new PendingStore(); - } - const graphqlClient = new SubgraphClient(url, rest); - - const reader = new TheGraphDataRead(graphqlClient, options); - - const writer = storage - ? new TheGraphDataWrite(storage, graphqlClient, options) - : new NoopDataWrite(); - - super(reader, writer); - this.graphql = graphqlClient; - this.storage = storage; - } - - async _getStatus(): Promise { - let storage: any = null; - if (this.storage && '_getStatus' in this.storage) { - storage = await (this.storage as StorageTypes.IStorage)._getStatus(); - } - return { - lastBlock: await this.graphql.getBlockNumber(), - endpoint: this.graphql.endpoint, - storage, + result, }; } } diff --git a/packages/thegraph-data-access/src/index.ts b/packages/thegraph-data-access/src/index.ts index cb60f77cb5..d8e75ffe99 100644 --- a/packages/thegraph-data-access/src/index.ts +++ b/packages/thegraph-data-access/src/index.ts @@ -1,9 +1,3 @@ -export { - TheGraphDataAccess, - TheGraphDataRead, - TheGraphDataWrite, - TheGraphDataAccessOptions, -} from './data-access'; -export { PendingStore } from './pending-store'; +export { TheGraphDataAccess } from './data-access'; export { SubgraphClient } from './subgraph-client'; export * as queries from './queries'; diff --git a/packages/thegraph-data-access/src/pending-store.ts b/packages/thegraph-data-access/src/pending-store.ts deleted file mode 100644 index 81daa03684..0000000000 --- a/packages/thegraph-data-access/src/pending-store.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; - -type PendingItem = { - transaction: DataAccessTypes.ITransaction; - storageResult: StorageTypes.IAppendResult; -}; -/** - * A simple in-memory store to share state between DataReader and DataWriter - * Useful to retrieve a transaction that was just emitted but is not confirmed yet - **/ -export class PendingStore { - private pending: Record = {}; - - /** Gets a pending tx */ - public get(channelId: string): PendingItem { - return this.pending[channelId]; - } - - public add( - channelId: string, - transaction: DataAccessTypes.ITransaction, - storageResult: StorageTypes.IAppendResult, - ): void { - this.pending[channelId] = { transaction, storageResult }; - } - - public remove(channelId: string): void { - delete this.pending[channelId]; - } -} diff --git a/packages/thegraph-data-access/src/queries.ts b/packages/thegraph-data-access/src/queries.ts index 7ccaf0739a..6531332cfd 100644 --- a/packages/thegraph-data-access/src/queries.ts +++ b/packages/thegraph-data-access/src/queries.ts @@ -87,7 +87,7 @@ export const GetChannelsByTopicsQuery = gql` orderBy: blockTimestamp orderDirection: asc ) { - ...TransactionsBody + channelId } } `; diff --git a/packages/thegraph-data-access/src/subgraph-client.ts b/packages/thegraph-data-access/src/subgraph-client.ts index c26801e446..450177189a 100644 --- a/packages/thegraph-data-access/src/subgraph-client.ts +++ b/packages/thegraph-data-access/src/subgraph-client.ts @@ -1,4 +1,4 @@ -import { DataAccessTypes } from '@requestnetwork/types'; +import { DataAccessTypes, StorageTypes } from '@requestnetwork/types'; import { GraphQLClient } from 'graphql-request'; import { GetBlock, @@ -6,14 +6,16 @@ import { GetTransactionsByChannelIdQuery, GetTransactionsByHashQuery, Meta, + Transaction, TransactionsBody, } from './queries'; import { RequestInit } from 'graphql-request/dist/types.dom'; +import { Variables } from 'graphql-request/dist/types'; // Max Int value (as supported by grapqhl types) const MAX_INT_VALUE = 0x7fffffff; -export class SubgraphClient { +export class SubgraphClient implements StorageTypes.IIndexer { private graphql: GraphQLClient; public readonly endpoint: string; constructor(endpoint: string, options?: RequestInit) { @@ -21,31 +23,88 @@ export class SubgraphClient { this.graphql = new GraphQLClient(endpoint, options); } + public async initialize(): Promise { + await this.getBlockNumber(); + } + public async getBlockNumber(): Promise { const { _meta } = await this.graphql.request(GetBlock); return _meta.block.number; } - public getTransactionsByHash(hash: string): Promise { - return this.graphql.request(GetTransactionsByHashQuery, { - hash, - }); + public getTransactionsByStorageLocation( + hash: string, + ): Promise { + return this.fetchAndFormat(GetTransactionsByHashQuery, { hash }); } public getTransactionsByChannelId( channelId: string, updatedBetween?: DataAccessTypes.ITimestampBoundaries, - ): Promise { - return this.graphql.request(GetTransactionsByChannelIdQuery, { + ): Promise { + return this.fetchAndFormat(GetTransactionsByChannelIdQuery, { channelId, ...this.getTimeVariables(updatedBetween), }); } - public getChannelsByTopics(topics: string[]): Promise { - return this.graphql.request(GetChannelsByTopicsQuery, { - topics, - }); + // FIXME: this should be possible to do in a single query to the subgraph, + // but currently one transaction doesn't contain topics from previous ones on the same channel. + // This could be fixed on the Subgraph indexer code for optimization. + public async getTransactionsByTopics( + topics: string[], + ): Promise { + const { _meta, transactions } = await this.graphql.request< + Meta & { transactions: { channelId: string }[] } + >(GetChannelsByTopicsQuery, { topics }); + + const channelIds = transactions + .map((x) => x.channelId) + .filter((val, i, self) => self.indexOf(val) === i); + const transactionsByChannel = await Promise.all( + channelIds.map((channelId) => + this.graphql + .request(GetTransactionsByChannelIdQuery, { + channelId, + ...this.getTimeVariables({}), + }) + .then((x) => x.transactions), + ), + ).then((x) => x.flat()); + + return { + transactions: transactionsByChannel.map(this.toIndexedTransaction), + blockNumber: _meta.block.number, + }; + } + + private async fetchAndFormat( + query: string, + parameters: Variables | undefined, + ): Promise { + const { _meta, transactions } = await this.graphql.request(query, parameters); + return { + transactions: transactions.map(this.toIndexedTransaction), + blockNumber: _meta.block.number, + }; + } + + private toIndexedTransaction({ + publicKeys, + encryptedKeys, + ...transaction + }: Transaction): StorageTypes.IIndexedTransaction { + return { + ...transaction, + keys: + publicKeys?.reduce( + (prev, curr, i) => ({ + ...prev, + [curr]: encryptedKeys?.[i] || '', + }), + {} as Record, + ) || undefined, + }; } private getTimeVariables(updatedBetween?: DataAccessTypes.ITimestampBoundaries) { diff --git a/packages/thegraph-data-access/src/types.ts b/packages/thegraph-data-access/src/types.ts new file mode 100644 index 0000000000..53c0c1b309 --- /dev/null +++ b/packages/thegraph-data-access/src/types.ts @@ -0,0 +1,9 @@ +import { StorageTypes } from '@requestnetwork/types'; +import { DataAccessBaseOptions } from '@requestnetwork/data-access'; + +import { RequestInit } from 'graphql-request/dist/types.dom'; + +export type TheGraphDataAccessOptions = DataAccessBaseOptions & { + graphql: { url: string } & RequestInit; + storage?: StorageTypes.IStorageWrite; +}; diff --git a/packages/toolbox/package.json b/packages/toolbox/package.json index c6332870d0..54139bc686 100644 --- a/packages/toolbox/package.json +++ b/packages/toolbox/package.json @@ -58,6 +58,7 @@ "yargs": "17.6.2" }, "devDependencies": { + "@types/bluebird": "3.5.38", "@types/inquirer": "8.1.3", "@types/yargs": "17.0.14", "cross-env": "7.0.2", diff --git a/packages/toolbox/src/commands/hash/submit.ts b/packages/toolbox/src/commands/hash/submit.ts index 3b345da6fa..a1b75671db 100644 --- a/packages/toolbox/src/commands/hash/submit.ts +++ b/packages/toolbox/src/commands/hash/submit.ts @@ -5,7 +5,6 @@ import { InferArgs } from '../../types'; import yargs from 'yargs'; import { getWallet } from '../transaction/utils'; import { EthereumTransactionSubmitter, IpfsStorage } from '@requestnetwork/ethereum-storage'; -import { StorageTypes } from '@requestnetwork/types'; import { EvmChains } from '@requestnetwork/currency'; export const command = 'hash submit '; @@ -22,12 +21,7 @@ export const handler = async (argv: yargs.Arguments { + .on('confirmed', (resultPersistTransaction) => { const resultAfterConfirmation = { meta: { dataAccessMeta: resultPersistTransaction.meta, diff --git a/packages/transaction-manager/test/index.test.ts b/packages/transaction-manager/test/index.test.ts index caadb683f2..719fef64f4 100644 --- a/packages/transaction-manager/test/index.test.ts +++ b/packages/transaction-manager/test/index.test.ts @@ -12,7 +12,6 @@ import TransactionsParser from '../src/transactions-parser'; import * as TestData from './unit/utils/test-data'; const extraTopics = ['topic1', 'topic2']; -const fakeTxHash = '0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; const data = '{ "what": "ever", "it": "is,", "this": "must", "work": true }'; const data2 = '{"or": "can", "be":false}'; @@ -34,7 +33,7 @@ const dataHash2 = normalizeKeccak256Hash(JSON.parse(data2)); const channelId2 = MultiFormat.serialize(dataHash2); const fakeMetaDataAccessPersistReturn: DataAccessTypes.IReturnPersistTransaction = Object.assign( - new EventEmitter(), + new EventEmitter() as DataAccessTypes.PersistTransactionEmitter, { meta: { transactionStorageLocation: 'fakeDataId', topics: extraTopics }, result: {}, @@ -62,19 +61,17 @@ describe('index', () => { getTransactionsByChannelId: jest.fn().mockReturnValue(fakeMetaDataAccessGetReturn), initialize: jest.fn(), close: jest.fn(), - // persistTransaction: jest.fn().mockReturnValue(fakeMetaDataAccessPersistReturn), persistTransaction: jest.fn((): any => { - setTimeout(() => { - fakeMetaDataAccessPersistReturn.emit( - 'confirmed', - { + setTimeout( + () => { + fakeMetaDataAccessPersistReturn.emit('confirmed', { meta: { transactionStorageLocation: 'fakeDataId', topics: extraTopics }, - result: { topics: [fakeTxHash] }, - }, - // eslint-disable-next-line no-magic-numbers - 100, - ); - }); + result: {}, + }); + }, + // eslint-disable-next-line no-magic-numbers + 100, + ); return fakeMetaDataAccessPersistReturn; }), }; diff --git a/packages/types/package.json b/packages/types/package.json index 3b590043fd..4fb1ad7456 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -38,13 +38,12 @@ "prepare": "yarn run build" }, "dependencies": { - "ethers": "5.5.1", - "events": "3.2.0" + "ethers": "5.5.1" }, "devDependencies": { - "@types/events": "3.0.0", "shx": "0.3.2", "ts-node": "10.9.1", + "typed-emitter": "2.1.0", "typescript": "4.9.5" }, "gitHead": "6155223cfce769e48ccae480c510b35b4f54b4d0" diff --git a/packages/types/src/data-access-types.ts b/packages/types/src/data-access-types.ts index ea814af55f..0899640e42 100644 --- a/packages/types/src/data-access-types.ts +++ b/packages/types/src/data-access-types.ts @@ -1,5 +1,5 @@ -import { EventEmitter } from 'events'; import * as StorageTypes from './storage-types'; +import { ConfirmationEventEmitter } from './events'; /** Data Access Layer */ export interface IDataRead { @@ -32,25 +32,11 @@ export interface IDataWrite { } export interface IDataAccess extends IDataRead, IDataWrite { - _getStatus(detailed?: boolean): Promise; + _getStatus?(): Promise; } export interface IDataAccessStatus { - filesIgnored: { - count: number; - list?: { [location: string]: string }; - }; - filesRetrieved: { - count: number; - lastTimestamp: number | null; - list?: string[]; - }; - lastSynchronizationTimestamp: number; storage: any; - synchronizationConfig: { - intervalTime: number; - successiveFailureThreshold: number; - }; } /** Enum of state possible for an action */ @@ -79,7 +65,9 @@ export type IReturnPersistTransactionRaw = { result: Record; }; -export type IReturnPersistTransaction = EventEmitter & IReturnPersistTransactionRaw; +export type PersistTransactionEmitter = ConfirmationEventEmitter; + +export type IReturnPersistTransaction = PersistTransactionEmitter & IReturnPersistTransactionRaw; /** return interface for getTransactionsByChannelId */ export interface IReturnGetTransactions { @@ -159,26 +147,18 @@ export interface ITimestampedTransaction { /** Transaction data */ export type ITransactionData = string; -/** - * An index to store locations and timestamps of transactions in IPFS. - */ -export interface ITransactionIndex { - initialize(): Promise; - getLastTransactionTimestamp(): Promise; - addTransaction(dataId: string, header: IBlockHeader, timestamp: number): Promise; - getChannelIdsForTopic( - topic: string, - timestampBoundaries?: ITimestampBoundaries, - ): Promise; - getChannelIdsForMultipleTopics( - topics: string[], - timestampBoundaries?: ITimestampBoundaries, - ): Promise; - getIndexedLocations(): Promise; - getStorageLocationList( - channelId: string, - timestampBoundaries?: ITimestampBoundaries, - ): Promise; - updateTimestamp(dataId: string, timestamp: number): Promise; - removeTransaction(dataId: string): Promise; +export type PendingItem = { + topics: string[]; + transaction: ITransaction; + storageResult: StorageTypes.IEntry; +}; + +export interface IPendingStore { + get(channelId: string): PendingItem | undefined; + + findByTopics(topic: string[]): (PendingItem & { channelId: string })[]; + + add(channelId: string, item: PendingItem): void; + + remove(channelId: string): void; } diff --git a/packages/types/src/events.ts b/packages/types/src/events.ts new file mode 100644 index 0000000000..5a17e2469f --- /dev/null +++ b/packages/types/src/events.ts @@ -0,0 +1,6 @@ +import TypedEmitter from 'typed-emitter'; + +export type ConfirmationEventEmitter = TypedEmitter<{ + confirmed: (receipt: T) => void; + error: (error: unknown) => void; +}>; diff --git a/packages/types/src/storage-types.ts b/packages/types/src/storage-types.ts index cc3c8ffe18..756393889f 100644 --- a/packages/types/src/storage-types.ts +++ b/packages/types/src/storage-types.ts @@ -1,18 +1,44 @@ -import { EventEmitter } from 'events'; - import { BigNumber } from 'ethers'; +import { ConfirmationEventEmitter } from './events'; + +export type IIndexedTransaction = { + hash: string; + channelId: string; + data?: string; + encryptedData?: string; + encryptionMethod?: string; + keys?: Record; + blockNumber: number; + blockTimestamp: number; + transactionHash: string; + smartContractAddress: string; + topics: string[]; + size: string; +}; + +export interface ITransactionSubmitter { + initialize: () => Promise; + submit(ipfsHash: string, ipfsSize: number): Promise; + hashSubmitterAddress?: string; + network?: string; + creationBlockNumber?: number; +} export interface IStorageWrite { initialize: () => Promise; append: (data: string) => Promise; } +export type IGetTransactionsResponse = { + transactions: IIndexedTransaction[]; + blockNumber: number; +}; + export interface IStorageRead { initialize: () => Promise; read: (dataId: string) => Promise; readMany: (dataIds: string[]) => Promise; getData: (options?: ITimestampBoundaries) => Promise; - getIgnoredData: () => Promise; } /** Interface of the storage */ @@ -20,13 +46,21 @@ export interface IStorage extends IStorageRead, IStorageWrite { _getStatus: (detailed?: boolean) => Promise; } +export interface IIndexer { + initialize(): Promise; + getTransactionsByStorageLocation(hash: string): Promise; + getTransactionsByChannelId( + channel: string, + updatedBetween?: ITimestampBoundaries, + ): Promise; + getTransactionsByTopics(topics: string[]): Promise; +} + export type IIpfsConfig = { delayBetweenRetries?: number; - host: string; + url: string; id: string; maxRetries?: number; - port: number; - protocol: string; timeout?: number; }; @@ -34,7 +68,6 @@ export interface IIpfsStorage { initialize: () => Promise; ipfsAdd: (data: string) => Promise; read(hash: string, maxSize?: number, retries?: number): Promise; - pinDataToIPFS(hashes: string[], config?: IPinRequestConfiguration): Promise; getConfig(): Promise; } @@ -55,7 +88,8 @@ export interface IEntry extends IWithMeta { content: string; } -export type IAppendResult = EventEmitter & IEntry; +export type AppendResultEmitter = ConfirmationEventEmitter; +export type IAppendResult = IEntry & AppendResultEmitter; /** A list of entries with the last timestamp these entries were fetched from */ export interface IEntriesWithLastTimestamp { @@ -156,26 +190,12 @@ export interface IWeb3Connection { timeout?: number; } -/** Information to connect to an IPFS gateway */ -export interface IIpfsGatewayConnection { - host: string; - port: number; - protocol: IpfsGatewayProtocol; - timeout: number; -} - /** two blocks number */ export interface IBlockNumbersInterval { blockAfter: number; blockBefore: number; } -/** Protocol to connect to ipfs */ -export enum IpfsGatewayProtocol { - HTTP = 'http', - HTTPS = 'https', -} - /** Storage type for now only ethereum + ipfs available */ export enum StorageSystemType { /** Ethereum and IPFS */ diff --git a/packages/usage-examples/src/mock/mock-storage.ts b/packages/usage-examples/src/mock/mock-storage.ts index db4166116b..fd1b91d34f 100644 --- a/packages/usage-examples/src/mock/mock-storage.ts +++ b/packages/usage-examples/src/mock/mock-storage.ts @@ -42,7 +42,10 @@ export default class MockStorage implements StorageTypes.IStorage { timestamp: nowTimestampInSec, }, }; - const result = Object.assign(new EventEmitter(), resultData); + const result = Object.assign( + new EventEmitter() as StorageTypes.AppendResultEmitter, + resultData, + ); // emit confirmed setTimeout(() => { @@ -91,8 +94,4 @@ export default class MockStorage implements StorageTypes.IStorage { lastTimestamp: nowTimestampInSec, }; } - - public async getIgnoredData(): Promise { - return []; - } } diff --git a/packages/usage-examples/src/request-client-js-add-stakeholders.ts b/packages/usage-examples/src/request-client-js-add-stakeholders.ts index 0095bca8a4..62380fe380 100644 --- a/packages/usage-examples/src/request-client-js-add-stakeholders.ts +++ b/packages/usage-examples/src/request-client-js-add-stakeholders.ts @@ -1,8 +1,8 @@ import { EthereumPrivateKeyDecryptionProvider } from '@requestnetwork/epk-decryption'; import { EthereumPrivateKeySignatureProvider } from '@requestnetwork/epk-signature'; import * as RequestNetwork from '@requestnetwork/request-client.js'; -import MockStorage from '../../request-client.js/dist/mock-storage'; -import MockDataAccess from '../../request-client.js/dist/mock-data-access'; +import MockStorage from './mock/mock-storage'; +import { MockDataAccess } from '@requestnetwork/data-access'; // payee information const payeeSignatureInfo = { @@ -80,16 +80,16 @@ const thirdPartyRequestNetwork = new RequestNetwork.RequestNetworkBase({ }); const requestInfo: RequestNetwork.Types.IRequestInfo = { - currency: 'BTC', + currency: 'EUR', expectedAmount: '100000000000', payee: payeeIdentity, payer: payerIdentity, }; const paymentNetwork: RequestNetwork.Types.Payment.PaymentNetworkCreateParameters = { - id: RequestNetwork.Types.Extension.PAYMENT_NETWORK_ID.BITCOIN_ADDRESS_BASED, + id: RequestNetwork.Types.Extension.PAYMENT_NETWORK_ID.ANY_DECLARATIVE, parameters: { - paymentAddress: '1LEMZPBit6tTtjXfaEfz4yYmTuctHWoMV', + paymentInfo: { IBAN: 'FR89370400440532013000', BIC: 'SABAIE2D' }, }, }; @@ -104,63 +104,37 @@ const createParams = { // Optionally, compute the request ID before actually creating it. // Setting the timestamp is recommended, as it has an impact on the generated ID. createParams.requestInfo.timestamp = RequestNetwork.Utils.getCurrentTimestampInSecond(); -payeeRequestNetwork - .computeRequestId(createParams) - .then((requestId) => { - console.log(`The request will be created with ID ${requestId}`); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); + +const main = async () => { + const requestId = await payeeRequestNetwork.computeRequestId(createParams); + console.log(`The request will be created with ID ${requestId}`); + const request = await payeeRequestNetwork._createEncryptedRequest(createParams, [ + payeeEncryptionParameters, + payerEncryptionParameters, + ]); + console.log('request:', request.requestId); + + const confirmedRequestData = await request.waitForConfirmation(); + console.log('confirmed request data:'); + console.log(confirmedRequestData); + + const requestData = await request.addStakeholders( + [thirdPartyEncryptionParameters], + payeeIdentity, + ); + console.log('request data after add stakeholders:'); + console.log(requestData); + + const payeeFetchedRequest = await payeeRequestNetwork.fromRequestId(requestId, { + disablePaymentDetection: true, }); + console.log('payee fetched request:'); + console.log(payeeFetchedRequest.getData()); -payeeRequestNetwork - ._createEncryptedRequest(createParams, [payeeEncryptionParameters, payerEncryptionParameters]) - .then((request) => { - console.log('request:'); - console.log(request); - request - .addStakeholders([thirdPartyEncryptionParameters], payeeIdentity) - .then((requestData) => { - console.log('request data after add stakeholders:'); - console.log(requestData); - request - .waitForConfirmation() - .then((confirmedRequestData) => { - console.log('confirmed request data:'); - console.log(confirmedRequestData); - payeeRequestNetwork - .fromRequestId(confirmedRequestData.requestId) - .then((payeeFetchedRequest) => { - console.log('payee fetched request:'); - console.log(payeeFetchedRequest); - thirdPartyRequestNetwork - .fromRequestId(confirmedRequestData.requestId) - .then((thirdPartyFetchedRequest) => { - console.log('third party fetched request:'); - console.log(thirdPartyFetchedRequest); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); - }); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); - }); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); - }); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); - }); - }) - .catch((error) => { - console.error(error.message || error); - process.exit(1); + const thirdPartyFetchedRequest = await thirdPartyRequestNetwork.fromRequestId(requestId, { + disablePaymentDetection: true, }); + console.log('third party fetched request:'); + console.log(thirdPartyFetchedRequest.getData()); +}; +main(); diff --git a/packages/usage-examples/src/request-logic-add-stakeholder.ts b/packages/usage-examples/src/request-logic-add-stakeholder.ts index 992ebe81e8..007e9b610a 100644 --- a/packages/usage-examples/src/request-logic-add-stakeholder.ts +++ b/packages/usage-examples/src/request-logic-add-stakeholder.ts @@ -1,4 +1,4 @@ -import { DataAccess } from '@requestnetwork/data-access'; +import { MockDataAccess as DataAccess } from '@requestnetwork/data-access'; import { EthereumPrivateKeyDecryptionProvider } from '@requestnetwork/epk-decryption'; import { EthereumPrivateKeySignatureProvider } from '@requestnetwork/epk-signature'; import { RequestLogic } from '@requestnetwork/request-logic'; diff --git a/packages/usage-examples/src/request-logic-clear-request.ts b/packages/usage-examples/src/request-logic-clear-request.ts index 674f033911..fc56a96fad 100644 --- a/packages/usage-examples/src/request-logic-clear-request.ts +++ b/packages/usage-examples/src/request-logic-clear-request.ts @@ -1,4 +1,4 @@ -import { DataAccess } from '@requestnetwork/data-access'; +import { MockDataAccess as DataAccess } from '@requestnetwork/data-access'; import { EthereumPrivateKeySignatureProvider } from '@requestnetwork/epk-signature'; import { RequestLogic } from '@requestnetwork/request-logic'; import { TransactionManager } from '@requestnetwork/transaction-manager'; diff --git a/packages/usage-examples/src/request-logic-encrypted-request.ts b/packages/usage-examples/src/request-logic-encrypted-request.ts index afdb9f016f..5d5dd2ee42 100644 --- a/packages/usage-examples/src/request-logic-encrypted-request.ts +++ b/packages/usage-examples/src/request-logic-encrypted-request.ts @@ -1,4 +1,4 @@ -import { DataAccess } from '@requestnetwork/data-access'; +import { MockDataAccess as DataAccess } from '@requestnetwork/data-access'; import { EthereumPrivateKeyDecryptionProvider } from '@requestnetwork/epk-decryption'; import { EthereumPrivateKeySignatureProvider } from '@requestnetwork/epk-signature'; import { RequestLogic } from '@requestnetwork/request-logic'; diff --git a/packages/web3-signature/src/web3-signature-provider.ts b/packages/web3-signature/src/web3-signature-provider.ts index 3527b631ba..6c48a25eb0 100644 --- a/packages/web3-signature/src/web3-signature-provider.ts +++ b/packages/web3-signature/src/web3-signature-provider.ts @@ -21,7 +21,7 @@ export default class Web3SignatureProvider implements SignatureProviderTypes.ISi try { this.web3Provider = new providers.Web3Provider(web3Provider); } catch (error) { - throw Error(`Can't initialize web3-eth ${error}`); + throw Error(`Can't initialize Web3Provider ${error}`); } } diff --git a/tsconfig.json b/tsconfig.json index e9f2a27f4b..84638a12a9 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -13,6 +13,6 @@ "moduleResolution": "node", "resolveJsonModule": true, "useUnknownInCatchVariables": false, - "lib": ["es2018"] + "lib": ["es2019"] } } diff --git a/yarn.lock b/yarn.lock index 6a035aead1..1f120e2bce 100644 --- a/yarn.lock +++ b/yarn.lock @@ -43,16 +43,16 @@ dependencies: "@babel/highlight" "^7.14.5" -"@babel/compat-data@^7.13.0", "@babel/compat-data@^7.22.9": - version "7.22.9" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" - integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== - "@babel/compat-data@^7.14.5": version "7.14.5" resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.14.5.tgz" integrity sha512-kixrYn4JwfAVPa0f2yfzc2AWti6WRRyO3XjWW5PJAvtE11qhSayrrcrEnee05KAtNaPC+EwehE8Qt1UedEVB8w== +"@babel/compat-data@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" + integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== + "@babel/core@^7.0.0", "@babel/core@^7.1.0", "@babel/core@^7.7.5": version "7.22.9" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.9.tgz#bd96492c68822198f33e8a256061da3cf391f58f" @@ -91,17 +91,6 @@ dependencies: "@babel/types" "^7.14.5" -"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.22.9": - version "7.22.9" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.9.tgz#f9d0a7aaaa7cd32a3f31c9316a69f5a9bcacb892" - integrity sha512-7qYrNM6HjpnPHJbopxmb8hSPoZ0gsX8IvUS32JGVoy+pU9e5N0nLr1VjJoR6kA4d9dmGLxNYOjeB8sUDal2WMw== - dependencies: - "@babel/compat-data" "^7.22.9" - "@babel/helper-validator-option" "^7.22.5" - browserslist "^4.21.9" - lru-cache "^5.1.1" - semver "^6.3.1" - "@babel/helper-compilation-targets@^7.14.5": version "7.14.5" resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.14.5.tgz" @@ -112,6 +101,17 @@ browserslist "^4.16.6" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.9.tgz#f9d0a7aaaa7cd32a3f31c9316a69f5a9bcacb892" + integrity sha512-7qYrNM6HjpnPHJbopxmb8hSPoZ0gsX8IvUS32JGVoy+pU9e5N0nLr1VjJoR6kA4d9dmGLxNYOjeB8sUDal2WMw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.5" + browserslist "^4.21.9" + lru-cache "^5.1.1" + semver "^6.3.1" + "@babel/helper-create-class-features-plugin@^7.14.5": version "7.14.5" resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.14.5.tgz" @@ -124,20 +124,6 @@ "@babel/helper-replace-supers" "^7.14.5" "@babel/helper-split-export-declaration" "^7.14.5" -"@babel/helper-define-polyfill-provider@^0.1.5": - version "0.1.5" - resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.1.5.tgz" - integrity sha512-nXuzCSwlJ/WKr8qxzW816gwyT6VZgiJG17zR40fou70yfAcqjoNyTLl/DQ+FExw5Hx5KNqshmN8Ldl/r2N7cTg== - dependencies: - "@babel/helper-compilation-targets" "^7.13.0" - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/traverse" "^7.13.0" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" @@ -181,13 +167,6 @@ dependencies: "@babel/types" "^7.14.5" -"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" - integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== - dependencies: - "@babel/types" "^7.22.5" - "@babel/helper-module-imports@^7.14.5": version "7.14.5" resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.14.5.tgz" @@ -195,6 +174,13 @@ dependencies: "@babel/types" "^7.14.5" +"@babel/helper-module-imports@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" + integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-module-transforms@^7.14.5", "@babel/helper-module-transforms@^7.22.9": version "7.22.9" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz#92dfcb1fbbb2bc62529024f72d942a8c97142129" @@ -213,7 +199,7 @@ dependencies: "@babel/types" "^7.14.5" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.8.0": +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.8.0": version "7.13.0" resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz" integrity sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ== @@ -585,18 +571,6 @@ "@babel/plugin-syntax-jsx" "^7.14.5" "@babel/types" "^7.14.5" -"@babel/plugin-transform-runtime@^7.5.5": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.13.10.tgz" - integrity sha512-Y5k8ipgfvz5d/76tx7JYbKQTcgFSU6VgJ3kKQv4zGTKr+a9T/KBvfRvGtSFgKDQGt/DBykQixV0vNWKIdzWErA== - dependencies: - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - babel-plugin-polyfill-corejs2 "^0.1.4" - babel-plugin-polyfill-corejs3 "^0.1.3" - babel-plugin-polyfill-regenerator "^0.1.2" - semver "^6.3.0" - "@babel/plugin-transform-shorthand-properties@^7.0.0": version "7.14.5" resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.14.5.tgz" @@ -619,7 +593,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.14.5" -"@babel/runtime@^7.0.0", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5": +"@babel/runtime@^7.0.0", "@babel/runtime@^7.3.1": version "7.13.10" resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.13.10.tgz" integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== @@ -672,7 +646,7 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.13.0", "@babel/traverse@^7.14.5", "@babel/traverse@^7.22.6", "@babel/traverse@^7.22.8": +"@babel/traverse@^7.1.0", "@babel/traverse@^7.14.5", "@babel/traverse@^7.22.6", "@babel/traverse@^7.22.8": version "7.22.8" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.8.tgz#4d4451d31bc34efeae01eac222b514a77aa4000e" integrity sha512-y6LPR+wpM2I3qJrsheCTwhIinzkETbplIgPBbwvqPKc+uljeA5gP+3nP8irdYt1mjQaDnlIcG+dw8OjAco4GXw== @@ -4101,15 +4075,6 @@ dependencies: "@octokit/openapi-types" "^5.3.2" -"@openzeppelin/contract-loader@^0.4.0": - version "0.4.0" - resolved "https://registry.npmjs.org/@openzeppelin/contract-loader/-/contract-loader-0.4.0.tgz" - integrity sha512-K+Pl4tn0FbxMSP0H9sgi61ayCbecpqhQmuBshelC7A3q2MlpcqWRJan0xijpwdtv6TORNd5oZNe/+f3l+GD6tw== - dependencies: - find-up "^4.1.0" - fs-extra "^8.1.0" - try-require "^1.2.1" - "@openzeppelin/contracts@4.4.2": version "4.4.2" resolved "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-4.4.2.tgz" @@ -4125,22 +4090,6 @@ resolved "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-4.7.3.tgz" integrity sha512-dGRS0agJzu8ybo44pCIf3xBaPQN/65AIXNgK8+4gzKd5kbvlqyxryUYVLJv7fK98Seyd2hDZzVEHSWAh0Bt1Yw== -"@openzeppelin/test-helpers@0.5.6": - version "0.5.6" - resolved "https://registry.npmjs.org/@openzeppelin/test-helpers/-/test-helpers-0.5.6.tgz" - integrity sha512-8U4sR4ed4cFmc6UKj7akUxZzQJKU9P3p/3RbF+urQuRLLhBaB8zSya1m9VB7/anYEZnBmTDk8LuVgAmYaCPs9A== - dependencies: - "@openzeppelin/contract-loader" "^0.4.0" - "@truffle/contract" "^4.0.35 <4.2.2" - ansi-colors "^3.2.3" - chai "^4.2.0" - chai-bn "^0.2.1" - ethjs-abi "^0.2.1" - lodash.flatten "^4.4.0" - semver "^5.6.0" - web3 "^1.2.1" - web3-utils "^1.2.1" - "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" resolved "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" @@ -4434,13 +4383,6 @@ faker "5.5.3" fast-check "^2.12.1" -"@truffle/blockchain-utils@^0.0.18": - version "0.0.18" - resolved "https://registry.npmjs.org/@truffle/blockchain-utils/-/blockchain-utils-0.0.18.tgz" - integrity sha512-XnRu5p1QO9krJizOeBY5WfzPDvEOmCnOT5u6qF8uN3Kkq9vcH3ZqW4XTuzz9ERZNpZfWb3UJx4PUosgeHLs5vw== - dependencies: - source-map-support "^0.5.16" - "@truffle/blockchain-utils@^0.0.32": version "0.0.32" resolved "https://registry.npmjs.org/@truffle/blockchain-utils/-/blockchain-utils-0.0.32.tgz" @@ -4470,15 +4412,6 @@ "@truffle/error" "^0.1.0" colors "1.4.0" -"@truffle/contract-schema@^3.1.0": - version "3.3.4" - resolved "https://registry.npmjs.org/@truffle/contract-schema/-/contract-schema-3.3.4.tgz" - integrity sha512-HzscBl/GhZBvPNQeD9l6ewSHSkvNmE+bA0iTVa0Y2mNf5GD5Y3fK2NPyfbOdtckOvLqebvYGEDEPRiXc3BZ05g== - dependencies: - ajv "^6.10.0" - crypto-js "^3.1.9-1" - debug "^4.3.1" - "@truffle/contract-schema@^3.4.4": version "3.4.6" resolved "https://registry.npmjs.org/@truffle/contract-schema/-/contract-schema-3.4.6.tgz" @@ -4507,25 +4440,6 @@ web3-eth-abi "1.5.3" web3-utils "1.5.3" -"@truffle/contract@^4.0.35 <4.2.2": - version "4.2.1" - resolved "https://registry.npmjs.org/@truffle/contract/-/contract-4.2.1.tgz" - integrity sha512-af1rUyU/W75GYHt/i7r+NwHozwaCma7V/q/+SRZ3Cw2MFaGOQ0dA/ZGhH8P1F0fmDiUe1DBEIbKxXWai0PWFYg== - dependencies: - "@truffle/blockchain-utils" "^0.0.18" - "@truffle/contract-schema" "^3.1.0" - "@truffle/error" "^0.0.8" - "@truffle/interface-adapter" "^0.4.6" - bignumber.js "^7.2.1" - ethereum-ens "^0.8.0" - ethers "^4.0.0-beta.1" - exorcist "^1.0.1" - source-map-support "^0.5.16" - web3 "1.2.1" - web3-core-promievent "1.2.1" - web3-eth-abi "1.2.1" - web3-utils "1.2.1" - "@truffle/debug-utils@^6.0.6": version "6.0.15" resolved "https://registry.npmjs.org/@truffle/debug-utils/-/debug-utils-6.0.15.tgz" @@ -4543,41 +4457,11 @@ resolved "https://registry.npmjs.org/@truffle/error/-/error-0.0.15.tgz" integrity sha512-keiYGlVAH7GLggqMpB+XorT7NkOlr3qeBc56thI2WP0eas3qstlyrc0WvckXJ2LXrOfcR2uH6f0Nk6FIxaKXSA== -"@truffle/error@^0.0.8": - version "0.0.8" - resolved "https://registry.npmjs.org/@truffle/error/-/error-0.0.8.tgz" - integrity sha512-x55rtRuNfRO1azmZ30iR0pf0OJ6flQqbax1hJz+Avk1K5fdmOv5cr22s9qFnwTWnS6Bw0jvJEoR0ITsM7cPKtQ== - "@truffle/error@^0.1.0": version "0.1.0" resolved "https://registry.npmjs.org/@truffle/error/-/error-0.1.0.tgz" integrity sha512-RbUfp5VreNhsa2Q4YbBjz18rOQI909pG32bghl1hulO7IpvcqTS+C3Ge5cNbiWQ1WGzy1wIeKLW0tmQtHFB7qg== -"@truffle/hdwallet-provider@1.2.3": - version "1.2.3" - resolved "https://registry.npmjs.org/@truffle/hdwallet-provider/-/hdwallet-provider-1.2.3.tgz" - integrity sha512-o8Mp7eJNQzy+7JmXObCZussrr933nnRWTA1x3qOn0cda6XzDUkCYyg1COvW4zbCTFyIPekHoBloGE4scbulRww== - dependencies: - "@trufflesuite/web3-provider-engine" "15.0.13-1" - any-promise "^1.3.0" - bindings "^1.5.0" - ethereum-cryptography "^0.1.3" - ethereum-protocol "^1.0.1" - ethereumjs-tx "^1.0.0" - ethereumjs-util "^6.1.0" - ethereumjs-wallet "^1.0.1" - source-map-support "^0.5.19" - -"@truffle/interface-adapter@^0.4.6": - version "0.4.19" - resolved "https://registry.npmjs.org/@truffle/interface-adapter/-/interface-adapter-0.4.19.tgz" - integrity sha512-+Zz6Fr8+I2wYSS8RM3WBOMzf22QffMQTnlsYsRgRHzv3gYoRA9ZDLb84lFRfmWyw+IdXTo90tjRHEb5krC6uxg== - dependencies: - bn.js "^5.1.3" - ethers "^4.0.32" - source-map-support "^0.5.19" - web3 "1.2.9" - "@truffle/interface-adapter@^0.5.10": version "0.5.12" resolved "https://registry.npmjs.org/@truffle/interface-adapter/-/interface-adapter-0.5.12.tgz" @@ -4601,84 +4485,6 @@ strip-ansi "^4.0.0" strip-indent "^2.0.0" -"@trufflesuite/eth-json-rpc-filters@^4.1.2-1": - version "4.1.2-1" - resolved "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-filters/-/eth-json-rpc-filters-4.1.2-1.tgz" - integrity sha512-/MChvC5dw2ck9NU1cZmdovCz2VKbOeIyR4tcxDvA5sT+NaL0rA2/R5U0yI7zsbo1zD+pgqav77rQHTzpUdDNJQ== - dependencies: - "@trufflesuite/eth-json-rpc-middleware" "^4.4.2-0" - await-semaphore "^0.1.3" - eth-query "^2.1.2" - json-rpc-engine "^5.1.3" - lodash.flatmap "^4.5.0" - safe-event-emitter "^1.0.1" - -"@trufflesuite/eth-json-rpc-infura@^4.0.3-0": - version "4.0.3-0" - resolved "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-infura/-/eth-json-rpc-infura-4.0.3-0.tgz" - integrity sha512-xaUanOmo0YLqRsL0SfXpFienhdw5bpQ1WEXxMTRi57az4lwpZBv4tFUDvcerdwJrxX9wQqNmgUgd1BrR01dumw== - dependencies: - "@trufflesuite/eth-json-rpc-middleware" "^4.4.2-1" - cross-fetch "^2.1.1" - eth-json-rpc-errors "^1.0.1" - json-rpc-engine "^5.1.3" - -"@trufflesuite/eth-json-rpc-middleware@^4.4.2-0", "@trufflesuite/eth-json-rpc-middleware@^4.4.2-1": - version "4.4.2-1" - resolved "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-middleware/-/eth-json-rpc-middleware-4.4.2-1.tgz" - integrity sha512-iEy9H8ja7/8aYES5HfrepGBKU9n/Y4OabBJEklVd/zIBlhCCBAWBqkIZgXt11nBXO/rYAeKwYuE3puH3ByYnLA== - dependencies: - "@trufflesuite/eth-sig-util" "^1.4.2" - btoa "^1.2.1" - clone "^2.1.1" - eth-json-rpc-errors "^1.0.1" - eth-query "^2.1.2" - ethereumjs-block "^1.6.0" - ethereumjs-tx "^1.3.7" - ethereumjs-util "^5.1.2" - ethereumjs-vm "^2.6.0" - fetch-ponyfill "^4.0.0" - json-rpc-engine "^5.1.3" - json-stable-stringify "^1.0.1" - pify "^3.0.0" - safe-event-emitter "^1.0.1" - -"@trufflesuite/eth-sig-util@^1.4.2": - version "1.4.2" - resolved "https://registry.npmjs.org/@trufflesuite/eth-sig-util/-/eth-sig-util-1.4.2.tgz" - integrity sha512-+GyfN6b0LNW77hbQlH3ufZ/1eCON7mMrGym6tdYf7xiNw9Vv3jBO72bmmos1EId2NgBvPMhmYYm6DSLQFTmzrA== - dependencies: - ethereumjs-abi "^0.6.8" - ethereumjs-util "^5.1.1" - -"@trufflesuite/web3-provider-engine@15.0.13-1": - version "15.0.13-1" - resolved "https://registry.npmjs.org/@trufflesuite/web3-provider-engine/-/web3-provider-engine-15.0.13-1.tgz" - integrity sha512-6u3x/iIN5fyj8pib5QTUDmIOUiwAGhaqdSTXdqCu6v9zo2BEwdCqgEJd1uXDh3DBmPRDfiZ/ge8oUPy7LerpHg== - dependencies: - "@trufflesuite/eth-json-rpc-filters" "^4.1.2-1" - "@trufflesuite/eth-json-rpc-infura" "^4.0.3-0" - "@trufflesuite/eth-json-rpc-middleware" "^4.4.2-1" - "@trufflesuite/eth-sig-util" "^1.4.2" - async "^2.5.0" - backoff "^2.5.0" - clone "^2.0.0" - cross-fetch "^2.1.0" - eth-block-tracker "^4.4.2" - eth-json-rpc-errors "^2.0.2" - ethereumjs-block "^1.2.2" - ethereumjs-tx "^1.2.0" - ethereumjs-util "^5.1.5" - ethereumjs-vm "^2.3.4" - json-stable-stringify "^1.0.1" - promise-to-callback "^1.0.0" - readable-stream "^2.2.9" - request "^2.85.0" - semaphore "^1.0.3" - ws "^5.1.1" - xhr "^2.2.0" - xtend "^4.0.1" - "@tsconfig/node10@^1.0.7": version "1.0.9" resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz" @@ -4763,10 +4569,10 @@ dependencies: bignumber.js "*" -"@types/bluebird@3.5.33": - version "3.5.33" - resolved "https://registry.npmjs.org/@types/bluebird/-/bluebird-3.5.33.tgz" - integrity sha512-ndEo1xvnYeHxm7I/5sF6tBvnsA4Tdi3zj1keRKRs12SP+2ye2A27NDJ1B6PqkfMbGAcT+mqQVqbZRIrhfOp5PQ== +"@types/bluebird@3.5.38": + version "3.5.38" + resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.38.tgz#7a671e66750ccd21c9fc9d264d0e1e5330bc9908" + integrity sha512-yR/Kxc0dd4FfwtEoLZMoqJbM/VE/W7hXn/MIjb+axcwag0iFmSPK7OBUZq1YWLynJUoWQkfUrI7T0HDqGApNSg== "@types/bn.js@*", "@types/bn.js@^5.1.0": version "5.1.0" @@ -4775,7 +4581,7 @@ dependencies: "@types/node" "*" -"@types/bn.js@^4.11.3", "@types/bn.js@^4.11.4", "@types/bn.js@^4.11.5": +"@types/bn.js@^4.11.3", "@types/bn.js@^4.11.5": version "4.11.6" resolved "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz" integrity sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg== @@ -4841,11 +4647,6 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== -"@types/events@3.0.0": - version "3.0.0" - resolved "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz" - integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== - "@types/expect@^1.20.4": version "1.20.4" resolved "https://registry.npmjs.org/@types/expect/-/expect-1.20.4.tgz" @@ -5062,12 +4863,7 @@ resolved "https://registry.npmjs.org/@types/node/-/node-17.0.31.tgz" integrity sha512-AR0x5HbXGqkEx9CadRH3EBYx/VkiUgZIhP4wvPn/+5KIsgpNoyFaRlVe0Zlx9gRtg8fA06a9tskE2MSN7TcG4Q== -"@types/node@^10.12.18", "@types/node@^10.3.2": - version "10.17.55" - resolved "https://registry.npmjs.org/@types/node/-/node-10.17.55.tgz" - integrity sha512-koZJ89uLZufDvToeWO5BrC4CR4OUfHnUz2qoPs/daQH6qq3IN62QFxCTZ+bKaCE0xaoCAJYE4AXre8AbghCrhg== - -"@types/node@^12.12.6", "@types/node@^12.6.1": +"@types/node@^12.12.6": version "12.20.6" resolved "https://registry.npmjs.org/@types/node/-/node-12.20.6.tgz" integrity sha512-sRVq8d+ApGslmkE9e3i+D3gFGk7aZHAT+G4cIpIEdLJYPsWiSPwcAnJEjddLQQDqV3Ra2jOclX/Sv6YrvGYiWA== @@ -5743,11 +5539,6 @@ ansi-colors@^1.0.1: dependencies: ansi-wrap "^0.1.0" -ansi-colors@^3.2.3: - version "3.2.4" - resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== - ansi-escapes@^3.0.0, ansi-escapes@^3.2.0: version "3.2.0" resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz" @@ -5826,7 +5617,7 @@ any-observable@^0.3.0: resolved "https://registry.npmjs.org/any-observable/-/any-observable-0.3.0.tgz" integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== -any-promise@1.3.0, any-promise@^1.0.0, any-promise@^1.3.0: +any-promise@^1.0.0, any-promise@^1.3.0: version "1.3.0" resolved "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz" integrity sha1-q8av7tzqUugJzcA3au0845Y10X8= @@ -6197,11 +5988,6 @@ available-typed-arrays@^1.0.2: dependencies: array-filter "^1.0.0" -await-semaphore@^0.1.3: - version "0.1.3" - resolved "https://registry.npmjs.org/await-semaphore/-/await-semaphore-0.1.3.tgz" - integrity sha512-d1W2aNSYcz/sxYO4pMGX9vq65qOTu0P800epMud+6cYYX0QcT7zyqcxec3VWzpgvdXo57UWmVbZpLMjX2m1I7Q== - aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" @@ -6445,30 +6231,6 @@ babel-plugin-jest-hoist@^26.6.2: "@types/babel__core" "^7.0.0" "@types/babel__traverse" "^7.0.6" -babel-plugin-polyfill-corejs2@^0.1.4: - version "0.1.10" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.1.10.tgz" - integrity sha512-DO95wD4g0A8KRaHKi0D51NdGXzvpqVLnLu5BTvDlpqUEpTmeEtypgC1xqesORaWmiUOQI14UHKlzNd9iZ2G3ZA== - dependencies: - "@babel/compat-data" "^7.13.0" - "@babel/helper-define-polyfill-provider" "^0.1.5" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.1.3: - version "0.1.7" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.1.7.tgz" - integrity sha512-u+gbS9bbPhZWEeyy1oR/YaaSpod/KDT07arZHb80aTpl8H5ZBq+uN1nN9/xtX7jQyfLdPfoqI4Rue/MQSWJquw== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.1.5" - core-js-compat "^3.8.1" - -babel-plugin-polyfill-regenerator@^0.1.2: - version "0.1.6" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.1.6.tgz" - integrity sha512-OUrYG9iKPKz8NxswXbRAdSwF0GhRdIEMTloQATJi4bDuFqrXaXcCUT/VGNrr8pBcjMh1RxZ7Xt9cytVJTJfvMg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.1.5" - babel-plugin-syntax-async-functions@^6.8.0: version "6.13.0" resolved "https://registry.npmjs.org/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz" @@ -7047,14 +6809,6 @@ bip66@^1.1.5: dependencies: safe-buffer "^5.0.1" -bl@^1.0.0: - version "1.2.3" - resolved "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz" - integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== - dependencies: - readable-stream "^2.3.5" - safe-buffer "^5.1.1" - bl@^4.1.0: version "4.1.0" resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" @@ -7069,7 +6823,7 @@ blakejs@^1.1.0: resolved "https://registry.npmjs.org/blakejs/-/blakejs-1.1.0.tgz" integrity sha1-ad+S75U6qIylGjLfarHFShVfx6U= -bluebird@3.7.2, bluebird@^3.4.7, bluebird@^3.5.0, bluebird@^3.5.1, bluebird@^3.5.2, bluebird@^3.5.3, bluebird@^3.5.5: +bluebird@3.7.2, bluebird@^3.5.0, bluebird@^3.5.1, bluebird@^3.5.2, bluebird@^3.5.3, bluebird@^3.5.5: version "3.7.2" resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== @@ -7079,11 +6833,6 @@ bn.js@4.11.6: resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz" integrity sha1-UzRK2xRhehP26N0s4okF0cC6MhU= -bn.js@4.11.8: - version "4.11.8" - resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz" - integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== - bn.js@5.2.1, bn.js@^5.2.0: version "5.2.1" resolved "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz" @@ -7398,16 +7147,6 @@ browserslist@^4.14.5: node-releases "^2.0.13" update-browserslist-db "^1.0.13" -browserslist@^4.16.3, browserslist@^4.21.9: - version "4.21.9" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.9.tgz#e11bdd3c313d7e2a9e87e8b4b0c7872b13897635" - integrity sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg== - dependencies: - caniuse-lite "^1.0.30001503" - electron-to-chromium "^1.4.431" - node-releases "^2.0.12" - update-browserslist-db "^1.0.11" - browserslist@^4.16.6: version "4.16.6" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.16.6.tgz" @@ -7419,6 +7158,16 @@ browserslist@^4.16.6: escalade "^3.1.1" node-releases "^1.1.71" +browserslist@^4.21.9: + version "4.21.9" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.9.tgz#e11bdd3c313d7e2a9e87e8b4b0c7872b13897635" + integrity sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg== + dependencies: + caniuse-lite "^1.0.30001503" + electron-to-chromium "^1.4.431" + node-releases "^2.0.12" + update-browserslist-db "^1.0.11" + bs-logger@0.x: version "0.2.6" resolved "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz" @@ -7454,29 +7203,6 @@ btoa-lite@^1.0.0: resolved "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz" integrity sha1-M3dm2hWAEhD92VbCLpxokaudAzc= -btoa@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz" - integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== - -buffer-alloc-unsafe@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz" - integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== - -buffer-alloc@^1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz" - integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== - dependencies: - buffer-alloc-unsafe "^1.1.0" - buffer-fill "^1.0.0" - -buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz" - integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI= - buffer-equal-constant-time@1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" @@ -7487,11 +7213,6 @@ buffer-equal@0.0.1: resolved "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz" integrity sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs= -buffer-fill@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz" - integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= - buffer-from@1.x: version "1.1.1" resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz" @@ -7870,12 +7591,7 @@ cbor@^8.1.0: dependencies: nofilter "^3.1.0" -chai-bn@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/chai-bn/-/chai-bn-0.2.1.tgz" - integrity sha512-01jt2gSXAw7UYFPT5K8d7HYjdXj2vyeIuE+0T/34FWzlNcVbs1JkPxRu7rYMfQnJhrHT8Nr6qjSf5ZwwLU2EYg== - -chai@4.3.4, chai@^4.2.0: +chai@4.3.4: version "4.3.4" resolved "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz" integrity sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA== @@ -8323,7 +8039,7 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -clone@2.1.2, clone@^2.0.0, clone@^2.1.1: +clone@2.1.2, clone@^2.0.0: version "2.1.2" resolved "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= @@ -8454,7 +8170,7 @@ commander@3.0.2, commander@^3.0.2: resolved "https://registry.npmjs.org/commander/-/commander-3.0.2.tgz" integrity sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow== -commander@^2.20.0, commander@^2.8.1, commander@^2.9.0: +commander@^2.20.0, commander@^2.9.0: version "2.20.3" resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -8730,14 +8446,6 @@ copy-descriptor@^0.1.0: resolved "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= -core-js-compat@^3.8.1: - version "3.9.1" - resolved "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.9.1.tgz" - integrity sha512-jXAirMQxrkbiiLsCx9bQPJFA6llDadKMpYrBJQJ3/c4/vsPP/fAf29h24tviRlvwUL6AmY5CHLu2GvjuYviQqA== - dependencies: - browserslist "^4.16.3" - semver "7.0.0" - core-js-pure@^3.0.1: version "3.15.2" resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.15.2.tgz" @@ -9181,59 +8889,6 @@ decompress-response@^3.2.0, decompress-response@^3.3.0: dependencies: mimic-response "^1.0.0" -decompress-tar@^4.0.0, decompress-tar@^4.1.0, decompress-tar@^4.1.1: - version "4.1.1" - resolved "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz" - integrity sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ== - dependencies: - file-type "^5.2.0" - is-stream "^1.1.0" - tar-stream "^1.5.2" - -decompress-tarbz2@^4.0.0: - version "4.1.1" - resolved "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz" - integrity sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A== - dependencies: - decompress-tar "^4.1.0" - file-type "^6.1.0" - is-stream "^1.1.0" - seek-bzip "^1.0.5" - unbzip2-stream "^1.0.9" - -decompress-targz@^4.0.0: - version "4.1.1" - resolved "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz" - integrity sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w== - dependencies: - decompress-tar "^4.1.1" - file-type "^5.2.0" - is-stream "^1.1.0" - -decompress-unzip@^4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz" - integrity sha1-3qrM39FK6vhVePczroIQ+bSEj2k= - dependencies: - file-type "^3.8.0" - get-stream "^2.2.0" - pify "^2.3.0" - yauzl "^2.4.2" - -decompress@^4.0.0: - version "4.2.1" - resolved "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz" - integrity sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ== - dependencies: - decompress-tar "^4.0.0" - decompress-tarbz2 "^4.0.0" - decompress-targz "^4.0.0" - decompress-unzip "^4.0.1" - graceful-fs "^4.1.10" - make-dir "^1.0.0" - pify "^2.3.0" - strip-dirs "^2.0.0" - dedent@^0.7.0: version "0.7.0" resolved "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz" @@ -9762,16 +9417,6 @@ elegant-spinner@^1.0.1: resolved "https://registry.npmjs.org/elegant-spinner/-/elegant-spinner-1.0.1.tgz" integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= -elliptic@6.3.3: - version "6.3.3" - resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.3.3.tgz" - integrity sha1-VILZZG1UvLif19mU/J4ulWiHbj8= - dependencies: - bn.js "^4.4.0" - brorand "^1.0.1" - hash.js "^1.0.0" - inherits "^2.0.1" - elliptic@6.5.3: version "6.5.3" resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz" @@ -10425,19 +10070,7 @@ eth-block-tracker@^3.0.0: pify "^2.3.0" tape "^4.6.3" -eth-block-tracker@^4.4.2: - version "4.4.3" - resolved "https://registry.npmjs.org/eth-block-tracker/-/eth-block-tracker-4.4.3.tgz" - integrity sha512-A8tG4Z4iNg4mw5tP1Vung9N9IjgMNqpiMoJ/FouSFwNCGHv2X0mmOYwtQOJzki6XN7r7Tyo01S29p7b224I4jw== - dependencies: - "@babel/plugin-transform-runtime" "^7.5.5" - "@babel/runtime" "^7.5.5" - eth-query "^2.1.0" - json-rpc-random-id "^1.0.1" - pify "^3.0.0" - safe-event-emitter "^1.0.1" - -eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.0, eth-ens-namehash@^2.0.8: +eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.8: version "2.0.8" resolved "https://registry.npmjs.org/eth-ens-namehash/-/eth-ens-namehash-2.0.8.tgz" integrity sha1-IprEbsqG1S4MmR58sq74P/D2i88= @@ -10445,20 +10078,6 @@ eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.0, eth-ens-namehash@^2.0.8: idna-uts46-hx "^2.3.1" js-sha3 "^0.5.7" -eth-json-rpc-errors@^1.0.1: - version "1.1.1" - resolved "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-1.1.1.tgz" - integrity sha512-WT5shJ5KfNqHi9jOZD+ID8I1kuYWNrigtZat7GOQkvwo99f8SzAVaEcWhJUv656WiZOAg3P1RiJQANtUmDmbIg== - dependencies: - fast-safe-stringify "^2.0.6" - -eth-json-rpc-errors@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-2.0.2.tgz" - integrity sha512-uBCRM2w2ewusRHGxN8JhcuOb2RN3ueAOYH/0BhqdFmQkZx5lj5+fLKTz0mIVOzd4FG5/kUksCzCD7eTEim6gaA== - dependencies: - fast-safe-stringify "^2.0.6" - eth-json-rpc-infura@^3.1.0: version "3.2.1" resolved "https://registry.npmjs.org/eth-json-rpc-infura/-/eth-json-rpc-infura-3.2.1.tgz" @@ -10488,16 +10107,7 @@ eth-json-rpc-middleware@^1.5.0: promise-to-callback "^1.0.0" tape "^4.6.3" -eth-lib@0.2.7: - version "0.2.7" - resolved "https://registry.npmjs.org/eth-lib/-/eth-lib-0.2.7.tgz" - integrity sha1-L5Pxex4jrsN1nNSj/iDBKGo/wco= - dependencies: - bn.js "^4.11.6" - elliptic "^6.4.0" - xhr-request-promise "^0.1.2" - -eth-lib@0.2.8, eth-lib@^0.2.8: +eth-lib@0.2.8: version "0.2.8" resolved "https://registry.npmjs.org/eth-lib/-/eth-lib-0.2.8.tgz" integrity sha512-ArJ7x1WcWOlSpzdoTBX8vkwlkSQ85CjjifSZtV4co64vWxSV8geWfPI9x4SVYu3DSxnX4yWFVTtGL+j9DUFLNw== @@ -10526,13 +10136,6 @@ eth-query@^2.0.2, eth-query@^2.1.0, eth-query@^2.1.2: json-rpc-random-id "^1.0.0" xtend "^4.0.1" -eth-rpc-errors@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/eth-rpc-errors/-/eth-rpc-errors-3.0.0.tgz" - integrity sha512-iPPNHPrLwUlR9xCSYm7HHQjWBasor3+KZfRvwEWxMz3ca0yqnlBeJrnyphkGIXZ4J7AMAaOLmwy4AWhnxOiLxg== - dependencies: - fast-safe-stringify "^2.0.6" - eth-sig-util@3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/eth-sig-util/-/eth-sig-util-3.0.0.tgz" @@ -10627,23 +10230,6 @@ ethereum-cryptography@^1.0.3: "@scure/bip32" "1.1.5" "@scure/bip39" "1.1.1" -ethereum-ens@^0.8.0: - version "0.8.0" - resolved "https://registry.npmjs.org/ethereum-ens/-/ethereum-ens-0.8.0.tgz" - integrity sha512-a8cBTF4AWw1Q1Y37V1LSCS9pRY4Mh3f8vCg5cbXCCEJ3eno1hbI/+Ccv9SZLISYpqQhaglP3Bxb/34lS4Qf7Bg== - dependencies: - bluebird "^3.4.7" - eth-ens-namehash "^2.0.0" - js-sha3 "^0.5.7" - pako "^1.0.4" - underscore "^1.8.3" - web3 "^1.0.0-beta.34" - -ethereum-protocol@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/ethereum-protocol/-/ethereum-protocol-1.0.1.tgz" - integrity sha512-3KLX1mHuEsBW0dKG+c6EOJS1NBNqdCICvZW9sInmZTt5aY0oxmHVggYRE0lJu1tcnMD1K+AKHdLi6U43Awm1Vg== - ethereum-waffle@3.4.0: version "3.4.0" resolved "https://registry.npmjs.org/ethereum-waffle/-/ethereum-waffle-3.4.0.tgz" @@ -10752,7 +10338,7 @@ ethereumjs-tx@2.1.2, ethereumjs-tx@^2.1.1, ethereumjs-tx@^2.1.2: ethereumjs-common "^1.5.0" ethereumjs-util "^6.0.0" -ethereumjs-tx@^1.0.0, ethereumjs-tx@^1.1.1, ethereumjs-tx@^1.2.0, ethereumjs-tx@^1.2.2, ethereumjs-tx@^1.3.3, ethereumjs-tx@^1.3.7: +ethereumjs-tx@^1.1.1, ethereumjs-tx@^1.2.0, ethereumjs-tx@^1.2.2, ethereumjs-tx@^1.3.3: version "1.3.7" resolved "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz" integrity sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA== @@ -10896,36 +10482,6 @@ ethereumjs-wallet@0.6.5: utf8 "^3.0.0" uuid "^3.3.2" -ethereumjs-wallet@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/ethereumjs-wallet/-/ethereumjs-wallet-1.0.1.tgz" - integrity sha512-3Z5g1hG1das0JWU6cQ9HWWTY2nt9nXCcwj7eXVNAHKbo00XAZO8+NHlwdgXDWrL0SXVQMvTWN8Q/82DRH/JhPw== - dependencies: - aes-js "^3.1.1" - bs58check "^2.1.2" - ethereum-cryptography "^0.1.3" - ethereumjs-util "^7.0.2" - randombytes "^2.0.6" - scrypt-js "^3.0.1" - utf8 "^3.0.0" - uuid "^3.3.2" - -ethers@4.0.0-beta.3: - version "4.0.0-beta.3" - resolved "https://registry.npmjs.org/ethers/-/ethers-4.0.0-beta.3.tgz" - integrity sha512-YYPogooSknTwvHg3+Mv71gM/3Wcrx+ZpCzarBj3mqs9njjRkrOo2/eufzhHloOCo3JSoNI4TQJJ6yU5ABm3Uog== - dependencies: - "@types/node" "^10.3.2" - aes-js "3.0.0" - bn.js "^4.4.0" - elliptic "6.3.3" - hash.js "1.1.3" - js-sha3 "0.5.7" - scrypt-js "2.0.3" - setimmediate "1.0.4" - uuid "2.0.1" - xmlhttprequest "1.8.0" - ethers@5.5.1, ethers@^5.0.0: version "5.5.1" resolved "https://registry.npmjs.org/ethers/-/ethers-5.5.1.tgz" @@ -10962,7 +10518,7 @@ ethers@5.5.1, ethers@^5.0.0: "@ethersproject/web" "5.5.0" "@ethersproject/wordlists" "5.5.0" -ethers@^4.0.0-beta.1, ethers@^4.0.32: +ethers@^4.0.32: version "4.0.48" resolved "https://registry.npmjs.org/ethers/-/ethers-4.0.48.tgz" integrity sha512-sZD5K8H28dOrcidzx9f8KYh8083n5BexIO3+SbE4jK83L85FxtpXZBCQdXb8gkg+7sBqomcLhhkU7UHL+F7I2g== @@ -11121,15 +10677,6 @@ ethers@^5.5.0: "@ethersproject/web" "5.5.1" "@ethersproject/wordlists" "5.5.0" -ethjs-abi@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/ethjs-abi/-/ethjs-abi-0.2.1.tgz" - integrity sha1-4KepOn6BFjqUR3utVu3lJKtt5TM= - dependencies: - bn.js "4.11.6" - js-sha3 "0.5.5" - number-to-bn "1.7.0" - ethjs-unit@0.1.6: version "0.1.6" resolved "https://registry.npmjs.org/ethjs-unit/-/ethjs-unit-0.1.6.tgz" @@ -11172,25 +10719,15 @@ event-target-shim@^5.0.0: resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== -eventemitter3@3.1.2, eventemitter3@^3.1.0: - version "3.1.2" - resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-3.1.2.tgz" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - eventemitter3@4.0.4: version "4.0.4" resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.4.tgz" integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== -eventemitter3@^4.0.0: - version "4.0.7" - resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@3.2.0: - version "3.2.0" - resolved "https://registry.npmjs.org/events/-/events-3.2.0.tgz" - integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg== +eventemitter3@^3.1.0: + version "3.1.2" + resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-3.1.2.tgz" + integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== events@^3.0.0, events@^3.2.0: version "3.3.0" @@ -11261,16 +10798,6 @@ exit@^0.1.2: resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= -exorcist@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/exorcist/-/exorcist-1.0.1.tgz" - integrity sha1-eTFuPEiFhFSQ97tAXA5bXbEWfFI= - dependencies: - is-stream "~1.1.0" - minimist "0.0.5" - mkdirp "~0.5.1" - mold-source-map "~0.4.0" - expand-brackets@^0.1.4: version "0.1.5" resolved "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz" @@ -11504,7 +11031,7 @@ fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= -fast-safe-stringify@^2.0.6, fast-safe-stringify@^2.0.7: +fast-safe-stringify@^2.0.7: version "2.0.7" resolved "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz" integrity sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA== @@ -11548,13 +11075,6 @@ fbjs@^3.0.0: setimmediate "^1.0.5" ua-parser-js "^0.7.18" -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz" - integrity sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4= - dependencies: - pend "~1.2.0" - fetch-ponyfill@^4.0.0: version "4.1.0" resolved "https://registry.npmjs.org/fetch-ponyfill/-/fetch-ponyfill-4.1.0.tgz" @@ -11603,21 +11123,6 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" -file-type@^3.8.0: - version "3.9.0" - resolved "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz" - integrity sha1-JXoHg4TR24CHvESdEH1SpSZyuek= - -file-type@^5.2.0: - version "5.2.0" - resolved "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz" - integrity sha1-LdvqfHP/42No365J3DOMBYwritY= - -file-type@^6.1.0: - version "6.2.0" - resolved "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz" - integrity sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg== - file-uri-to-path@1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz" @@ -11977,11 +11482,6 @@ fromentries@^1.2.0: resolved "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz" integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg== -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - fs-extra@^0.30.0: version "0.30.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz" @@ -12229,14 +11729,6 @@ get-stdin@^4.0.1: resolved "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz" integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= -get-stream@^2.2.0: - version "2.3.1" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz" - integrity sha1-Xzj5PzRgCWZu4BUKBUFn+Rvdld4= - dependencies: - object-assign "^4.0.1" - pinkie-promise "^2.0.0" - get-stream@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz" @@ -12560,7 +12052,7 @@ got@^7.1.0: url-parse-lax "^1.0.0" url-to-options "^1.0.1" -graceful-fs@^4.1.10, graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4: version "4.2.6" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== @@ -13724,11 +13216,6 @@ is-lower-case@^2.0.2: dependencies: tslib "^2.0.3" -is-natural-number@^4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz" - integrity sha1-q5124dtM7VHjXeDHLr7PCfc0zeg= - is-negated-glob@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz" @@ -13876,7 +13363,7 @@ is-ssh@^1.3.0: dependencies: protocols "^1.1.0" -is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0, is-stream@~1.1.0: +is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= @@ -14515,11 +14002,6 @@ js-sha256@^0.9.0: resolved "https://registry.npmjs.org/js-sha256/-/js-sha256-0.9.0.tgz" integrity sha512-sga3MHh9sgQN2+pJ9VYZ+1LPwXOxuBJBA5nrR5/ofPfuiJBE2hnjsaN8se8JznOmGLN2p49Pe5U/ttafcs/apA== -js-sha3@0.5.5: - version "0.5.5" - resolved "https://registry.npmjs.org/js-sha3/-/js-sha3-0.5.5.tgz" - integrity sha1-uvDA6MVK1ZA0R9+Wreekobynmko= - js-sha3@0.5.7, js-sha3@^0.5.7: version "0.5.7" resolved "https://registry.npmjs.org/js-sha3/-/js-sha3-0.5.7.tgz" @@ -14654,14 +14136,6 @@ json-rpc-engine@^3.4.0, json-rpc-engine@^3.6.0: promise-to-callback "^1.0.0" safe-event-emitter "^1.0.1" -json-rpc-engine@^5.1.3: - version "5.4.0" - resolved "https://registry.npmjs.org/json-rpc-engine/-/json-rpc-engine-5.4.0.tgz" - integrity sha512-rAffKbPoNDjuRnXkecTjnsE3xLLrb00rEkdgalINhaYVYIxDwWtvYBr9UFbhTvPB1B2qUOLoFd/cV6f4Q7mh7g== - dependencies: - eth-rpc-errors "^3.0.0" - safe-event-emitter "^1.0.1" - json-rpc-error@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/json-rpc-error/-/json-rpc-error-2.0.0.tgz" @@ -14669,7 +14143,7 @@ json-rpc-error@^2.0.0: dependencies: inherits "^2.0.1" -json-rpc-random-id@^1.0.0, json-rpc-random-id@^1.0.1: +json-rpc-random-id@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/json-rpc-random-id/-/json-rpc-random-id-1.0.1.tgz" integrity sha1-uknZat7RRE27jaPSA3SKy7zeyMg= @@ -15368,21 +14842,6 @@ lodash.clonedeep@^4.5.0: resolved "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz" integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" - integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= - -lodash.flatmap@^4.5.0: - version "4.5.0" - resolved "https://registry.npmjs.org/lodash.flatmap/-/lodash.flatmap-4.5.0.tgz" - integrity sha1-74y/QI9uSCaGYzRTBcaswLd4cC4= - -lodash.flatten@^4.4.0: - version "4.4.0" - resolved "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= - lodash.flattendeep@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz" @@ -16164,11 +15623,6 @@ minimist-options@^3.0.1: arrify "^1.0.1" is-plain-obj "^1.1.0" -minimist@0.0.5: - version "0.0.5" - resolved "https://registry.npmjs.org/minimist/-/minimist-0.0.5.tgz" - integrity sha1-16oye87PUY+RBqxrjwA/o7zqhWY= - minimist@0.0.8: version "0.0.8" resolved "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" @@ -16288,7 +15742,7 @@ mkdirp@0.5.1: dependencies: minimist "0.0.8" -mkdirp@0.5.x, mkdirp@^0.5.1, mkdirp@^0.5.5, mkdirp@~0.5.1: +mkdirp@0.5.x, mkdirp@^0.5.1, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== @@ -16381,14 +15835,6 @@ module-error@^1.0.1, module-error@^1.0.2: resolved "https://registry.npmjs.org/module-error/-/module-error-1.0.2.tgz" integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA== -mold-source-map@~0.4.0: - version "0.4.0" - resolved "https://registry.npmjs.org/mold-source-map/-/mold-source-map-0.4.0.tgz" - integrity sha1-z2fgsxxHq5uttcnCVlGGISe7gxc= - dependencies: - convert-source-map "^1.1.0" - through "~2.2.7" - morgan@^1.9.1: version "1.10.0" resolved "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz" @@ -17485,7 +16931,7 @@ pako@^0.2.5: resolved "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz" integrity sha1-8/dSL073gjSNqBYbrZ7P1Rv4OnU= -pako@^1.0.4, pako@~1.0.5: +pako@~1.0.5: version "1.0.11" resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== @@ -17880,11 +17326,6 @@ pegjs@^0.10.0: resolved "https://registry.npmjs.org/pegjs/-/pegjs-0.10.0.tgz" integrity sha1-z4uvrm7d/0tafvsYUmnqr0YQ3b0= -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz" - integrity sha1-elfrVQpng/kRUzH89GY9XI4AelA= - performance-now@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" @@ -18434,11 +17875,6 @@ randomfill@^1.0.3: randombytes "^2.0.5" safe-buffer "^5.1.0" -randomhex@0.1.5: - version "0.1.5" - resolved "https://registry.npmjs.org/randomhex/-/randomhex-0.1.5.tgz" - integrity sha1-us7vmCMpCRQA8qKRLGzQLxCU9YU= - range-parser@~1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" @@ -18594,7 +18030,7 @@ read@1, read@~1.0.1: dependencies: mute-stream "~0.0.4" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.2.8, readable-stream@^2.2.9, readable-stream@^2.3.0, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.3, readable-stream@~2.3.6: +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.2.8, readable-stream@^2.2.9, readable-stream@^2.3.6, readable-stream@~2.3.3, readable-stream@~2.3.6: version "2.3.7" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -19013,7 +18449,7 @@ resolve@1.17.0, resolve@~1.17.0: dependencies: path-parse "^1.0.6" -resolve@^1.0.0, resolve@^1.1.5, resolve@^1.1.6, resolve@^1.10.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.20.0, resolve@^1.8.1: +resolve@^1.0.0, resolve@^1.1.5, resolve@^1.1.6, resolve@^1.10.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.20.0, resolve@^1.8.1: version "1.20.0" resolved "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -19180,6 +18616,13 @@ rxjs@^7.2.0: dependencies: tslib "^2.1.0" +rxjs@^7.5.2: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" @@ -19274,11 +18717,6 @@ scope-analyzer@^2.0.1: estree-is-function "^1.0.0" get-assigned-identifiers "^1.1.0" -scrypt-js@2.0.3: - version "2.0.3" - resolved "https://registry.npmjs.org/scrypt-js/-/scrypt-js-2.0.3.tgz" - integrity sha1-uwBAvgMEPamgEqLOqfyfhSz8h9Q= - scrypt-js@2.0.4: version "2.0.4" resolved "https://registry.npmjs.org/scrypt-js/-/scrypt-js-2.0.4.tgz" @@ -19289,11 +18727,6 @@ scrypt-js@3.0.1, scrypt-js@^3.0.0, scrypt-js@^3.0.1: resolved "https://registry.npmjs.org/scrypt-js/-/scrypt-js-3.0.1.tgz" integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== -scryptsy@2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/scryptsy/-/scryptsy-2.1.0.tgz" - integrity sha512-1CdSqHQowJBnMAFyPEBRfqag/YP9OF394FV+4YREIJX4ljD7OxvQRDayyoyyCk+senRjSkP6VnUNQmVQqB6g7w== - scryptsy@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/scryptsy/-/scryptsy-1.2.1.tgz" @@ -19334,13 +18767,6 @@ seedrandom@3.0.1: resolved "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.1.tgz" integrity sha512-1/02Y/rUeU1CJBAGLebiC5Lbo5FnB22gQbIFFYTLkwvp1xdABZJH1sn4ZT1MzXmPpzv+Rf/Lu2NcsLJiK4rcDg== -seek-bzip@^1.0.5: - version "1.0.6" - resolved "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz" - integrity sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ== - dependencies: - commander "^2.8.1" - semaphore@>=1.0.1, semaphore@^1.0.3, semaphore@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz" @@ -19356,16 +18782,6 @@ semver-compare@^1.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@6.2.0: - version "6.2.0" - resolved "https://registry.npmjs.org/semver/-/semver-6.2.0.tgz" - integrity sha512-jdFC1VdUGT/2Scgbimf7FSx9iJLXoqfglSF+gJeuNWVpiE37OIbc1jywR/GJyFdz3mnkz2/id0L0J/cr0izR5A== - -semver@7.0.0: - version "7.0.0" - resolved "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz" - integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - semver@7.5.2, semver@7.x, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: version "7.5.2" resolved "https://registry.npmjs.org/semver/-/semver-7.5.2.tgz" @@ -19373,7 +18789,7 @@ semver@7.5.2, semver@7.x, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7 dependencies: lru-cache "^6.0.0" -semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.0, semver@^6.2.0, semver@^6.3.0: version "6.3.0" resolved "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== @@ -19948,7 +19364,7 @@ source-map-support@^0.4.15: dependencies: source-map "^0.5.6" -source-map-support@^0.5.12, source-map-support@^0.5.13, source-map-support@^0.5.16, source-map-support@^0.5.17, source-map-support@^0.5.19, source-map-support@^0.5.6, source-map-support@~0.5.12, source-map-support@~0.5.20: +source-map-support@^0.5.12, source-map-support@^0.5.13, source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.12, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -20431,13 +19847,6 @@ strip-bom@^4.0.0: resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== -strip-dirs@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz" - integrity sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g== - dependencies: - is-natural-number "^4.0.1" - strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz" @@ -20618,24 +20027,6 @@ swap-case@^2.0.2: dependencies: tslib "^2.0.3" -swarm-js@0.1.39: - version "0.1.39" - resolved "https://registry.npmjs.org/swarm-js/-/swarm-js-0.1.39.tgz" - integrity sha512-QLMqL2rzF6n5s50BptyD6Oi0R1aWlJC5Y17SRIVXRj6OR1DRIPM7nepvrxxkjA1zNzFz6mUOMjfeqeDaWB7OOg== - dependencies: - bluebird "^3.5.0" - buffer "^5.0.5" - decompress "^4.0.0" - eth-lib "^0.1.26" - fs-extra "^4.0.2" - got "^7.1.0" - mime-types "^2.1.16" - mkdirp-promise "^5.0.1" - mock-fs "^4.1.0" - setimmediate "^1.0.5" - tar "^4.0.2" - xhr-request-promise "^0.1.2" - swarm-js@^0.1.40: version "0.1.40" resolved "https://registry.npmjs.org/swarm-js/-/swarm-js-0.1.40.tgz" @@ -20740,19 +20131,6 @@ tape@^4.6.3: string.prototype.trim "~1.2.1" through "~2.3.8" -tar-stream@^1.5.2: - version "1.6.2" - resolved "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz" - integrity sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A== - dependencies: - bl "^1.0.0" - buffer-alloc "^1.2.0" - end-of-stream "^1.0.0" - fs-constants "^1.0.0" - readable-stream "^2.3.0" - to-buffer "^1.1.1" - xtend "^4.0.0" - tar@^4.0.2, tar@^4.4.10, tar@^4.4.12, tar@^4.4.8: version "4.4.19" resolved "https://registry.npmjs.org/tar/-/tar-4.4.19.tgz" @@ -20940,11 +20318,6 @@ through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6, through@^2.3.8, resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= -through@~2.2.7: - version "2.2.7" - resolved "https://registry.npmjs.org/through/-/through-2.2.7.tgz" - integrity sha1-bo4hIAGR1OtqmfbwEN9Gqhxusr0= - time-stamp@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/time-stamp/-/time-stamp-1.1.0.tgz" @@ -21001,11 +20374,6 @@ tmpl@1.0.x: resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== -to-buffer@^1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz" - integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg== - to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz" @@ -21135,11 +20503,6 @@ trim-right@^1.0.1: resolved "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= -try-require@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/try-require/-/try-require-1.2.1.tgz" - integrity sha1-NEiaLKwMCcHMEO2RugEVlNQzO+I= - try-to-catch@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/try-to-catch/-/try-to-catch-3.0.0.tgz" @@ -21500,10 +20863,12 @@ typechain@^3.0.0: ts-essentials "^6.0.3" ts-generator "^0.1.1" -typed-emitter@1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/typed-emitter/-/typed-emitter-1.4.0.tgz" - integrity sha512-weBmoo3HhpKGgLBOYwe8EB31CzDFuaK7CCL+axXhUYhn4jo6DSkHnbefboCF5i4DQ2aMFe0C/FdTWcPdObgHyg== +typed-emitter@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/typed-emitter/-/typed-emitter-2.1.0.tgz#ca78e3d8ef1476f228f548d62e04e3d4d3fd77fb" + integrity sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA== + optionalDependencies: + rxjs "^7.5.2" typedarray-to-buffer@^3.1.5: version "3.1.5" @@ -21604,14 +20969,6 @@ unbox-primitive@^1.0.0: has-symbols "^1.0.0" which-boxed-primitive "^1.0.1" -unbzip2-stream@^1.0.9: - version "1.4.3" - resolved "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== - dependencies: - buffer "^5.2.1" - through "^2.3.8" - unc-path-regex@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz" @@ -21628,7 +20985,7 @@ undeclared-identifiers@^1.1.2: simple-concat "^1.0.0" xtend "^4.0.1" -underscore@1.12.1, underscore@^1.8.3: +underscore@1.12.1: version "1.12.1" resolved "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz" integrity sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw== @@ -22057,15 +21414,6 @@ wcwidth@^1.0.0, wcwidth@^1.0.1: dependencies: defaults "^1.0.3" -web3-bzz@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-bzz/-/web3-bzz-1.2.1.tgz" - integrity sha512-LdOO44TuYbGIPfL4ilkuS89GQovxUpmLz6C1UC7VYVVRILeZS740FVB3j9V4P4FHUk1RenaDfKhcntqgVCHtjw== - dependencies: - got "9.6.0" - swarm-js "0.1.39" - underscore "1.9.1" - web3-bzz@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-bzz/-/web3-bzz-1.2.11.tgz" @@ -22076,26 +21424,6 @@ web3-bzz@1.2.11: swarm-js "^0.1.40" underscore "1.9.1" -web3-bzz@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-bzz/-/web3-bzz-1.2.9.tgz" - integrity sha512-ogVQr9jHodu9HobARtvUSmWG22cv2EUQzlPeejGWZ7j5h20HX40EDuWyomGY5VclIj5DdLY76Tmq88RTf/6nxA== - dependencies: - "@types/node" "^10.12.18" - got "9.6.0" - swarm-js "^0.1.40" - underscore "1.9.1" - -web3-bzz@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-bzz/-/web3-bzz-1.3.4.tgz" - integrity sha512-DBRVQB8FAgoAtZCpp2GAGPCJjgBgsuwOKEasjV044AAZiONpXcKHbkO6G1SgItIixnrJsRJpoGLGw52Byr6FKw== - dependencies: - "@types/node" "^12.12.6" - got "9.6.0" - swarm-js "^0.1.40" - underscore "1.9.1" - web3-bzz@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-bzz/-/web3-bzz-1.5.3.tgz" @@ -22114,15 +21442,6 @@ web3-bzz@1.7.3: got "9.6.0" swarm-js "^0.1.40" -web3-core-helpers@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.2.1.tgz" - integrity sha512-Gx3sTEajD5r96bJgfuW377PZVFmXIH4TdqDhgGwd2lZQCcMi+DA4TgxJNJGxn0R3aUVzyyE76j4LBrh412mXrw== - dependencies: - underscore "1.9.1" - web3-eth-iban "1.2.1" - web3-utils "1.2.1" - web3-core-helpers@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.2.11.tgz" @@ -22132,24 +21451,6 @@ web3-core-helpers@1.2.11: web3-eth-iban "1.2.11" web3-utils "1.2.11" -web3-core-helpers@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.2.9.tgz" - integrity sha512-t0WAG3orLCE3lqi77ZoSRNFok3VQWZXTniZigDQjyOJYMAX7BU3F3js8HKbjVnAxlX3tiKoDxI0KBk9F3AxYuw== - dependencies: - underscore "1.9.1" - web3-eth-iban "1.2.9" - web3-utils "1.2.9" - -web3-core-helpers@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.3.4.tgz" - integrity sha512-n7BqDalcTa1stncHMmrnFtyTgDhX5Fy+avNaHCf6qcOP2lwTQC8+mdHVBONWRJ6Yddvln+c8oY/TAaB6PzWK0A== - dependencies: - underscore "1.9.1" - web3-eth-iban "1.3.4" - web3-utils "1.3.4" - web3-core-helpers@1.3.6: version "1.3.6" resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.3.6.tgz" @@ -22167,14 +21468,6 @@ web3-core-helpers@1.5.3: web3-eth-iban "1.5.3" web3-utils "1.5.3" -web3-core-helpers@1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.6.1.tgz" - integrity sha512-om2PZvK1uoWcgMq6JfcSx3241LEIVF6qi2JuHz2SLKiKEW5UsBUaVx0mNCmcZaiuYQCyOsLS3r33q5AdM+v8ng== - dependencies: - web3-eth-iban "1.6.1" - web3-utils "1.6.1" - web3-core-helpers@1.7.3: version "1.7.3" resolved "https://registry.npmjs.org/web3-core-helpers/-/web3-core-helpers-1.7.3.tgz" @@ -22183,17 +21476,6 @@ web3-core-helpers@1.7.3: web3-eth-iban "1.7.3" web3-utils "1.7.3" -web3-core-method@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.2.1.tgz" - integrity sha512-Ghg2WS23qi6Xj8Od3VCzaImLHseEA7/usvnOItluiIc5cKs00WYWsNy2YRStzU9a2+z8lwQywPYp0nTzR/QXdQ== - dependencies: - underscore "1.9.1" - web3-core-helpers "1.2.1" - web3-core-promievent "1.2.1" - web3-core-subscriptions "1.2.1" - web3-utils "1.2.1" - web3-core-method@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.2.11.tgz" @@ -22206,42 +21488,6 @@ web3-core-method@1.2.11: web3-core-subscriptions "1.2.11" web3-utils "1.2.11" -web3-core-method@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.2.9.tgz" - integrity sha512-bjsIoqP3gs7A/gP8+QeLUCyOKJ8bopteCSNbCX36Pxk6TYfYWNuC6hP+2GzUuqdP3xaZNe+XEElQFUNpR3oyAg== - dependencies: - "@ethersproject/transactions" "^5.0.0-beta.135" - underscore "1.9.1" - web3-core-helpers "1.2.9" - web3-core-promievent "1.2.9" - web3-core-subscriptions "1.2.9" - web3-utils "1.2.9" - -web3-core-method@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.3.4.tgz" - integrity sha512-JxmQrujsAWYRRN77P/RY7XuZDCzxSiiQJrgX/60Lfyf7FF1Y0le4L/UMCi7vUJnuYkbU1Kfl9E0udnqwyPqlvQ== - dependencies: - "@ethersproject/transactions" "^5.0.0-beta.135" - underscore "1.9.1" - web3-core-helpers "1.3.4" - web3-core-promievent "1.3.4" - web3-core-subscriptions "1.3.4" - web3-utils "1.3.4" - -web3-core-method@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.3.6.tgz" - integrity sha512-RyegqVGxn0cyYW5yzAwkPlsSEynkdPiegd7RxgB4ak1eKk2Cv1q2x4C7D2sZjeeCEF+q6fOkVmo2OZNqS2iQxg== - dependencies: - "@ethersproject/transactions" "^5.0.0-beta.135" - underscore "1.12.1" - web3-core-helpers "1.3.6" - web3-core-promievent "1.3.6" - web3-core-subscriptions "1.3.6" - web3-utils "1.3.6" - web3-core-method@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-core-method/-/web3-core-method-1.5.3.tgz" @@ -22265,14 +21511,6 @@ web3-core-method@1.7.3: web3-core-subscriptions "1.7.3" web3-utils "1.7.3" -web3-core-promievent@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.2.1.tgz" - integrity sha512-IVUqgpIKoeOYblwpex4Hye6npM0aMR+kU49VP06secPeN0rHMyhGF0ZGveWBrGvf8WDPI7jhqPBFIC6Jf3Q3zw== - dependencies: - any-promise "1.3.0" - eventemitter3 "3.1.2" - web3-core-promievent@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.2.11.tgz" @@ -22280,27 +21518,6 @@ web3-core-promievent@1.2.11: dependencies: eventemitter3 "4.0.4" -web3-core-promievent@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.2.9.tgz" - integrity sha512-0eAUA2zjgXTleSrnc1wdoKQPPIHU6KHf4fAscu4W9kKrR+mqP1KsjYrxY9wUyjNnXxfQ+5M29ipvbiaK8OqdOw== - dependencies: - eventemitter3 "3.1.2" - -web3-core-promievent@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.3.4.tgz" - integrity sha512-V61dZIeBwogg6hhZZUt0qL9hTp1WDhnsdjP++9fhTDr4vy/Gz8T5vibqT2LLg6lQC8i+Py33yOpMeMNjztaUaw== - dependencies: - eventemitter3 "4.0.4" - -web3-core-promievent@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.3.6.tgz" - integrity sha512-Z+QzfyYDTXD5wJmZO5wwnRO8bAAHEItT1XNSPVb4J1CToV/I/SbF7CuF8Uzh2jns0Cm1109o666H7StFFvzVKw== - dependencies: - eventemitter3 "4.0.4" - web3-core-promievent@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-core-promievent/-/web3-core-promievent-1.5.3.tgz" @@ -22315,17 +21532,6 @@ web3-core-promievent@1.7.3: dependencies: eventemitter3 "4.0.4" -web3-core-requestmanager@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.2.1.tgz" - integrity sha512-xfknTC69RfYmLKC+83Jz73IC3/sS2ZLhGtX33D4Q5nQ8yc39ElyAolxr9sJQS8kihOcM6u4J+8gyGMqsLcpIBg== - dependencies: - underscore "1.9.1" - web3-core-helpers "1.2.1" - web3-providers-http "1.2.1" - web3-providers-ipc "1.2.1" - web3-providers-ws "1.2.1" - web3-core-requestmanager@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.2.11.tgz" @@ -22337,41 +21543,6 @@ web3-core-requestmanager@1.2.11: web3-providers-ipc "1.2.11" web3-providers-ws "1.2.11" -web3-core-requestmanager@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.2.9.tgz" - integrity sha512-1PwKV2m46ALUnIN5VPPgjOj8yMLJhhqZYvYJE34hTN5SErOkwhzx5zScvo5MN7v7KyQGFnpVCZKKGCiEnDmtFA== - dependencies: - underscore "1.9.1" - web3-core-helpers "1.2.9" - web3-providers-http "1.2.9" - web3-providers-ipc "1.2.9" - web3-providers-ws "1.2.9" - -web3-core-requestmanager@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.3.4.tgz" - integrity sha512-xriouCrhVnVDYQ04TZXdEREZm0OOJzkSEsoN5bu4JYsA6e/HzROeU+RjDpMUxFMzN4wxmFZ+HWbpPndS3QwMag== - dependencies: - underscore "1.9.1" - util "^0.12.0" - web3-core-helpers "1.3.4" - web3-providers-http "1.3.4" - web3-providers-ipc "1.3.4" - web3-providers-ws "1.3.4" - -web3-core-requestmanager@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.3.6.tgz" - integrity sha512-2rIaeuqeo7QN1Eex7aXP0ZqeteJEPWXYFS/M3r3LXMiV8R4STQBKE+//dnHJXoo2ctzEB5cgd+7NaJM8S3gPyA== - dependencies: - underscore "1.12.1" - util "^0.12.0" - web3-core-helpers "1.3.6" - web3-providers-http "1.3.6" - web3-providers-ipc "1.3.6" - web3-providers-ws "1.3.6" - web3-core-requestmanager@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-core-requestmanager/-/web3-core-requestmanager-1.5.3.tgz" @@ -22394,15 +21565,6 @@ web3-core-requestmanager@1.7.3: web3-providers-ipc "1.7.3" web3-providers-ws "1.7.3" -web3-core-subscriptions@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.2.1.tgz" - integrity sha512-nmOwe3NsB8V8UFsY1r+sW6KjdOS68h8nuh7NzlWxBQT/19QSUGiERRTaZXWu5BYvo1EoZRMxCKyCQpSSXLc08g== - dependencies: - eventemitter3 "3.1.2" - underscore "1.9.1" - web3-core-helpers "1.2.1" - web3-core-subscriptions@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.2.11.tgz" @@ -22412,33 +21574,6 @@ web3-core-subscriptions@1.2.11: underscore "1.9.1" web3-core-helpers "1.2.11" -web3-core-subscriptions@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.2.9.tgz" - integrity sha512-Y48TvXPSPxEM33OmXjGVDMzTd0j8X0t2+sDw66haeBS8eYnrEzasWuBZZXDq0zNUsqyxItgBGDn+cszkgEnFqg== - dependencies: - eventemitter3 "3.1.2" - underscore "1.9.1" - web3-core-helpers "1.2.9" - -web3-core-subscriptions@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.3.4.tgz" - integrity sha512-drVHVDxh54hv7xmjIm44g4IXjfGj022fGw4/meB5R2D8UATFI40F73CdiBlyqk3DysP9njDOLTJFSQvEkLFUOg== - dependencies: - eventemitter3 "4.0.4" - underscore "1.9.1" - web3-core-helpers "1.3.4" - -web3-core-subscriptions@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.3.6.tgz" - integrity sha512-wi9Z9X5X75OKvxAg42GGIf81ttbNR2TxzkAsp1g+nnp5K8mBwgZvXrIsDuj7Z7gx72Y45mWJADCWjk/2vqNu8g== - dependencies: - eventemitter3 "4.0.4" - underscore "1.12.1" - web3-core-helpers "1.3.6" - web3-core-subscriptions@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-core-subscriptions/-/web3-core-subscriptions-1.5.3.tgz" @@ -22455,16 +21590,6 @@ web3-core-subscriptions@1.7.3: eventemitter3 "4.0.4" web3-core-helpers "1.7.3" -web3-core@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.2.1.tgz" - integrity sha512-5ODwIqgl8oIg/0+Ai4jsLxkKFWJYE0uLuE1yUKHNVCL4zL6n3rFjRMpKPokd6id6nJCNgeA64KdWQ4XfpnjdMg== - dependencies: - web3-core-helpers "1.2.1" - web3-core-method "1.2.1" - web3-core-requestmanager "1.2.1" - web3-utils "1.2.1" - web3-core@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.2.11.tgz" @@ -22478,45 +21603,6 @@ web3-core@1.2.11: web3-core-requestmanager "1.2.11" web3-utils "1.2.11" -web3-core@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.2.9.tgz" - integrity sha512-fSYv21IP658Ty2wAuU9iqmW7V+75DOYMVZsDH/c14jcF/1VXnedOcxzxSj3vArsCvXZNe6XC5/wAuGZyQwR9RA== - dependencies: - "@types/bn.js" "^4.11.4" - "@types/node" "^12.6.1" - bignumber.js "^9.0.0" - web3-core-helpers "1.2.9" - web3-core-method "1.2.9" - web3-core-requestmanager "1.2.9" - web3-utils "1.2.9" - -web3-core@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.3.4.tgz" - integrity sha512-7OJu46RpCEfTerl+gPvHXANR2RkLqAfW7l2DAvQ7wN0pnCzl9nEfdgW6tMhr31k3TR2fWucwKzCyyxMGzMHeSA== - dependencies: - "@types/bn.js" "^4.11.5" - "@types/node" "^12.12.6" - bignumber.js "^9.0.0" - web3-core-helpers "1.3.4" - web3-core-method "1.3.4" - web3-core-requestmanager "1.3.4" - web3-utils "1.3.4" - -web3-core@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.3.6.tgz" - integrity sha512-gkLDM4T1Sc0T+HZIwxrNrwPg0IfWI0oABSglP2X5ZbBAYVUeEATA0o92LWV8BeF+okvKXLK1Fek/p6axwM/h3Q== - dependencies: - "@types/bn.js" "^4.11.5" - "@types/node" "^12.12.6" - bignumber.js "^9.0.0" - web3-core-helpers "1.3.6" - web3-core-method "1.3.6" - web3-core-requestmanager "1.3.6" - web3-utils "1.3.6" - web3-core@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-core/-/web3-core-1.5.3.tgz" @@ -22543,15 +21629,6 @@ web3-core@1.7.3: web3-core-requestmanager "1.7.3" web3-utils "1.7.3" -web3-eth-abi@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.2.1.tgz" - integrity sha512-jI/KhU2a/DQPZXHjo2GW0myEljzfiKOn+h1qxK1+Y9OQfTcBMxrQJyH5AP89O6l6NZ1QvNdq99ThAxBFoy5L+g== - dependencies: - ethers "4.0.0-beta.3" - underscore "1.9.1" - web3-utils "1.2.1" - web3-eth-abi@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.2.11.tgz" @@ -22561,33 +21638,6 @@ web3-eth-abi@1.2.11: underscore "1.9.1" web3-utils "1.2.11" -web3-eth-abi@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.2.9.tgz" - integrity sha512-3YwUYbh/DMfDbhMWEebAdjSd5bj3ZQieOjLzWFHU23CaLEqT34sUix1lba+hgUH/EN6A7bKAuKOhR3p0OvTn7Q== - dependencies: - "@ethersproject/abi" "5.0.0-beta.153" - underscore "1.9.1" - web3-utils "1.2.9" - -web3-eth-abi@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.3.4.tgz" - integrity sha512-PVSLXJ2dzdXsC+R24llIIEOS6S1KhG5qwNznJjJvXZFe3sqgdSe47eNvwUamZtCBjcrdR/HQr+L/FTxqJSf80Q== - dependencies: - "@ethersproject/abi" "5.0.7" - underscore "1.9.1" - web3-utils "1.3.4" - -web3-eth-abi@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.3.6.tgz" - integrity sha512-Or5cRnZu6WzgScpmbkvC6bfNxR26hqiKK4i8sMPFeTUABQcb/FU3pBj7huBLYbp9dH+P5W79D2MqwbWwjj9DoQ== - dependencies: - "@ethersproject/abi" "5.0.7" - underscore "1.12.1" - web3-utils "1.3.6" - web3-eth-abi@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth-abi/-/web3-eth-abi-1.5.3.tgz" @@ -22604,23 +21654,6 @@ web3-eth-abi@1.7.3: "@ethersproject/abi" "5.0.7" web3-utils "1.7.3" -web3-eth-accounts@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.2.1.tgz" - integrity sha512-26I4qq42STQ8IeKUyur3MdQ1NzrzCqPsmzqpux0j6X/XBD7EjZ+Cs0lhGNkSKH5dI3V8CJasnQ5T1mNKeWB7nQ== - dependencies: - any-promise "1.3.0" - crypto-browserify "3.12.0" - eth-lib "0.2.7" - scryptsy "2.1.0" - semver "6.2.0" - underscore "1.9.1" - uuid "3.3.2" - web3-core "1.2.1" - web3-core-helpers "1.2.1" - web3-core-method "1.2.1" - web3-utils "1.2.1" - web3-eth-accounts@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.2.11.tgz" @@ -22638,57 +21671,6 @@ web3-eth-accounts@1.2.11: web3-core-method "1.2.11" web3-utils "1.2.11" -web3-eth-accounts@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.2.9.tgz" - integrity sha512-jkbDCZoA1qv53mFcRHCinoCsgg8WH+M0YUO1awxmqWXRmCRws1wW0TsuSQ14UThih5Dxolgl+e+aGWxG58LMwg== - dependencies: - crypto-browserify "3.12.0" - eth-lib "^0.2.8" - ethereumjs-common "^1.3.2" - ethereumjs-tx "^2.1.1" - scrypt-js "^3.0.1" - underscore "1.9.1" - uuid "3.3.2" - web3-core "1.2.9" - web3-core-helpers "1.2.9" - web3-core-method "1.2.9" - web3-utils "1.2.9" - -web3-eth-accounts@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.3.4.tgz" - integrity sha512-gz9ReSmQEjqbYAjpmAx+UZF4CVMbyS4pfjSYWGAnNNI+Xz0f0u0kCIYXQ1UEaE+YeLcYiE+ZlZdgg6YoatO5nA== - dependencies: - crypto-browserify "3.12.0" - eth-lib "0.2.8" - ethereumjs-common "^1.3.2" - ethereumjs-tx "^2.1.1" - scrypt-js "^3.0.1" - underscore "1.9.1" - uuid "3.3.2" - web3-core "1.3.4" - web3-core-helpers "1.3.4" - web3-core-method "1.3.4" - web3-utils "1.3.4" - -web3-eth-accounts@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.3.6.tgz" - integrity sha512-Ilr0hG6ONbCdSlVKffasCmNwftD5HsNpwyQASevocIQwHdTlvlwO0tb3oGYuajbKOaDzNTwXfz25bttAEoFCGA== - dependencies: - crypto-browserify "3.12.0" - eth-lib "0.2.8" - ethereumjs-common "^1.3.2" - ethereumjs-tx "^2.1.1" - scrypt-js "^3.0.1" - underscore "1.12.1" - uuid "3.3.2" - web3-core "1.3.6" - web3-core-helpers "1.3.6" - web3-core-method "1.3.6" - web3-utils "1.3.6" - web3-eth-accounts@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth-accounts/-/web3-eth-accounts-1.5.3.tgz" @@ -22723,20 +21705,6 @@ web3-eth-accounts@1.7.3: web3-core-method "1.7.3" web3-utils "1.7.3" -web3-eth-contract@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.2.1.tgz" - integrity sha512-kYFESbQ3boC9bl2rYVghj7O8UKMiuKaiMkxvRH5cEDHil8V7MGEGZNH0slSdoyeftZVlaWSMqkRP/chfnKND0g== - dependencies: - underscore "1.9.1" - web3-core "1.2.1" - web3-core-helpers "1.2.1" - web3-core-method "1.2.1" - web3-core-promievent "1.2.1" - web3-core-subscriptions "1.2.1" - web3-eth-abi "1.2.1" - web3-utils "1.2.1" - web3-eth-contract@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.2.11.tgz" @@ -22752,51 +21720,6 @@ web3-eth-contract@1.2.11: web3-eth-abi "1.2.11" web3-utils "1.2.11" -web3-eth-contract@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.2.9.tgz" - integrity sha512-PYMvJf7EG/HyssUZa+pXrc8IB06K/YFfWYyW4R7ed3sab+9wWUys1TlWxBCBuiBXOokSAyM6H6P6/cKEx8FT8Q== - dependencies: - "@types/bn.js" "^4.11.4" - underscore "1.9.1" - web3-core "1.2.9" - web3-core-helpers "1.2.9" - web3-core-method "1.2.9" - web3-core-promievent "1.2.9" - web3-core-subscriptions "1.2.9" - web3-eth-abi "1.2.9" - web3-utils "1.2.9" - -web3-eth-contract@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.3.4.tgz" - integrity sha512-Fvy8ZxUksQY2ePt+XynFfOiSqxgQtMn4m2NJs6VXRl2Inl17qyRi/nIJJVKTcENLocm+GmZ/mxq2eOE5u02nPg== - dependencies: - "@types/bn.js" "^4.11.5" - underscore "1.9.1" - web3-core "1.3.4" - web3-core-helpers "1.3.4" - web3-core-method "1.3.4" - web3-core-promievent "1.3.4" - web3-core-subscriptions "1.3.4" - web3-eth-abi "1.3.4" - web3-utils "1.3.4" - -web3-eth-contract@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.3.6.tgz" - integrity sha512-8gDaRrLF2HCg+YEZN1ov0zN35vmtPnGf3h1DxmJQK5Wm2lRMLomz9rsWsuvig3UJMHqZAQKD7tOl3ocJocQsmA== - dependencies: - "@types/bn.js" "^4.11.5" - underscore "1.12.1" - web3-core "1.3.6" - web3-core-helpers "1.3.6" - web3-core-method "1.3.6" - web3-core-promievent "1.3.6" - web3-core-subscriptions "1.3.6" - web3-eth-abi "1.3.6" - web3-utils "1.3.6" - web3-eth-contract@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth-contract/-/web3-eth-contract-1.5.3.tgz" @@ -22825,20 +21748,6 @@ web3-eth-contract@1.7.3: web3-eth-abi "1.7.3" web3-utils "1.7.3" -web3-eth-ens@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.2.1.tgz" - integrity sha512-lhP1kFhqZr2nnbu3CGIFFrAnNxk2veXpOXBY48Tub37RtobDyHijHgrj+xTh+mFiPokyrapVjpFsbGa+Xzye4Q== - dependencies: - eth-ens-namehash "2.0.8" - underscore "1.9.1" - web3-core "1.2.1" - web3-core-helpers "1.2.1" - web3-core-promievent "1.2.1" - web3-eth-abi "1.2.1" - web3-eth-contract "1.2.1" - web3-utils "1.2.1" - web3-eth-ens@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.2.11.tgz" @@ -22854,51 +21763,6 @@ web3-eth-ens@1.2.11: web3-eth-contract "1.2.11" web3-utils "1.2.11" -web3-eth-ens@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.2.9.tgz" - integrity sha512-kG4+ZRgZ8I1WYyOBGI8QVRHfUSbbJjvJAGA1AF/NOW7JXQ+x7gBGeJw6taDWJhSshMoEKWcsgvsiuoG4870YxQ== - dependencies: - content-hash "^2.5.2" - eth-ens-namehash "2.0.8" - underscore "1.9.1" - web3-core "1.2.9" - web3-core-helpers "1.2.9" - web3-core-promievent "1.2.9" - web3-eth-abi "1.2.9" - web3-eth-contract "1.2.9" - web3-utils "1.2.9" - -web3-eth-ens@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.3.4.tgz" - integrity sha512-b0580tQyQwpV2wyacwQiBEfQmjCUln5iPhge3IBIMXaI43BUNtH3lsCL9ERFQeOdweB4o+6rYyNYr6xbRcSytg== - dependencies: - content-hash "^2.5.2" - eth-ens-namehash "2.0.8" - underscore "1.9.1" - web3-core "1.3.4" - web3-core-helpers "1.3.4" - web3-core-promievent "1.3.4" - web3-eth-abi "1.3.4" - web3-eth-contract "1.3.4" - web3-utils "1.3.4" - -web3-eth-ens@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.3.6.tgz" - integrity sha512-n27HNj7lpSkRxTgSx+Zo7cmKAgyg2ElFilaFlUu/X2CNH23lXfcPm2bWssivH9z0ndhg0OyR4AYFZqPaqDHkJA== - dependencies: - content-hash "^2.5.2" - eth-ens-namehash "2.0.8" - underscore "1.12.1" - web3-core "1.3.6" - web3-core-helpers "1.3.6" - web3-core-promievent "1.3.6" - web3-eth-abi "1.3.6" - web3-eth-contract "1.3.6" - web3-utils "1.3.6" - web3-eth-ens@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth-ens/-/web3-eth-ens-1.5.3.tgz" @@ -22927,14 +21791,6 @@ web3-eth-ens@1.7.3: web3-eth-contract "1.7.3" web3-utils "1.7.3" -web3-eth-iban@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.2.1.tgz" - integrity sha512-9gkr4QPl1jCU+wkgmZ8EwODVO3ovVj6d6JKMos52ggdT2YCmlfvFVF6wlGLwi0VvNa/p+0BjJzaqxnnG/JewjQ== - dependencies: - bn.js "4.11.8" - web3-utils "1.2.1" - web3-eth-iban@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.2.11.tgz" @@ -22943,22 +21799,6 @@ web3-eth-iban@1.2.11: bn.js "^4.11.9" web3-utils "1.2.11" -web3-eth-iban@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.2.9.tgz" - integrity sha512-RtdVvJE0pyg9dHLy0GzDiqgnLnssSzfz/JYguhC1wsj9+Gnq1M6Diy3NixACWUAp6ty/zafyOaZnNQ+JuH9TjQ== - dependencies: - bn.js "4.11.8" - web3-utils "1.2.9" - -web3-eth-iban@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.3.4.tgz" - integrity sha512-Y7/hLjVvIN/OhaAyZ8L/hxbTqVX6AFTl2RwUXR6EEU9oaLydPcMjAx/Fr8mghUvQS3QJSr+UGubP3W4SkyNiYw== - dependencies: - bn.js "^4.11.9" - web3-utils "1.3.4" - web3-eth-iban@1.3.6: version "1.3.6" resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.3.6.tgz" @@ -22975,14 +21815,6 @@ web3-eth-iban@1.5.3: bn.js "^4.11.9" web3-utils "1.5.3" -web3-eth-iban@1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.6.1.tgz" - integrity sha512-91H0jXZnWlOoXmc13O9NuQzcjThnWyAHyDn5Yf7u6mmKOhpJSGF/OHlkbpXt1Y4v2eJdEPaVFa+6i8aRyagE7Q== - dependencies: - bn.js "^4.11.9" - web3-utils "1.6.1" - web3-eth-iban@1.7.3: version "1.7.3" resolved "https://registry.npmjs.org/web3-eth-iban/-/web3-eth-iban-1.7.3.tgz" @@ -22991,17 +21823,6 @@ web3-eth-iban@1.7.3: bn.js "^4.11.9" web3-utils "1.7.3" -web3-eth-personal@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.2.1.tgz" - integrity sha512-RNDVSiaSoY4aIp8+Hc7z+X72H7lMb3fmAChuSBADoEc7DsJrY/d0R5qQDK9g9t2BO8oxgLrLNyBP/9ub2Hc6Bg== - dependencies: - web3-core "1.2.1" - web3-core-helpers "1.2.1" - web3-core-method "1.2.1" - web3-net "1.2.1" - web3-utils "1.2.1" - web3-eth-personal@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.2.11.tgz" @@ -23014,42 +21835,6 @@ web3-eth-personal@1.2.11: web3-net "1.2.11" web3-utils "1.2.11" -web3-eth-personal@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.2.9.tgz" - integrity sha512-cFiNrktxZ1C/rIdJFzQTvFn3/0zcsR3a+Jf8Y3KxeQDHszQtosjLWptP7bsUmDwEh4hzh0Cy3KpOxlYBWB8bJQ== - dependencies: - "@types/node" "^12.6.1" - web3-core "1.2.9" - web3-core-helpers "1.2.9" - web3-core-method "1.2.9" - web3-net "1.2.9" - web3-utils "1.2.9" - -web3-eth-personal@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.3.4.tgz" - integrity sha512-JiTbaktYVk1j+S2EDooXAhw5j/VsdvZfKRmHtXUe/HizPM9ETXmj1+ne4RT6m+950jQ7DJwUF3XU1FKYNtEDwQ== - dependencies: - "@types/node" "^12.12.6" - web3-core "1.3.4" - web3-core-helpers "1.3.4" - web3-core-method "1.3.4" - web3-net "1.3.4" - web3-utils "1.3.4" - -web3-eth-personal@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.3.6.tgz" - integrity sha512-pOHU0+/h1RFRYoh1ehYBehRbcKWP4OSzd4F7mDljhHngv6W8ewMHrAN8O1ol9uysN2MuCdRE19qkRg5eNgvzFQ== - dependencies: - "@types/node" "^12.12.6" - web3-core "1.3.6" - web3-core-helpers "1.3.6" - web3-core-method "1.3.6" - web3-net "1.3.6" - web3-utils "1.3.6" - web3-eth-personal@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth-personal/-/web3-eth-personal-1.5.3.tgz" @@ -23074,25 +21859,6 @@ web3-eth-personal@1.7.3: web3-net "1.7.3" web3-utils "1.7.3" -web3-eth@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.2.1.tgz" - integrity sha512-/2xly4Yry5FW1i+uygPjhfvgUP/MS/Dk+PDqmzp5M88tS86A+j8BzKc23GrlA8sgGs0645cpZK/999LpEF5UdA== - dependencies: - underscore "1.9.1" - web3-core "1.2.1" - web3-core-helpers "1.2.1" - web3-core-method "1.2.1" - web3-core-subscriptions "1.2.1" - web3-eth-abi "1.2.1" - web3-eth-accounts "1.2.1" - web3-eth-contract "1.2.1" - web3-eth-ens "1.2.1" - web3-eth-iban "1.2.1" - web3-eth-personal "1.2.1" - web3-net "1.2.1" - web3-utils "1.2.1" - web3-eth@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.2.11.tgz" @@ -23112,63 +21878,6 @@ web3-eth@1.2.11: web3-net "1.2.11" web3-utils "1.2.11" -web3-eth@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.2.9.tgz" - integrity sha512-sIKO4iE9FEBa/CYUd6GdPd7GXt/wISqxUd8PlIld6+hvMJj02lgO7Z7p5T9mZIJcIZJGvZX81ogx8oJ9yif+Ag== - dependencies: - underscore "1.9.1" - web3-core "1.2.9" - web3-core-helpers "1.2.9" - web3-core-method "1.2.9" - web3-core-subscriptions "1.2.9" - web3-eth-abi "1.2.9" - web3-eth-accounts "1.2.9" - web3-eth-contract "1.2.9" - web3-eth-ens "1.2.9" - web3-eth-iban "1.2.9" - web3-eth-personal "1.2.9" - web3-net "1.2.9" - web3-utils "1.2.9" - -web3-eth@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.3.4.tgz" - integrity sha512-8OIVMLbvmx+LB5RZ4tDhXuFGWSdNMrCZ4HM0+PywQ08uEcmAcqTMFAn4vdPii+J8gCatZR501r1KdzX3SDLoPw== - dependencies: - underscore "1.9.1" - web3-core "1.3.4" - web3-core-helpers "1.3.4" - web3-core-method "1.3.4" - web3-core-subscriptions "1.3.4" - web3-eth-abi "1.3.4" - web3-eth-accounts "1.3.4" - web3-eth-contract "1.3.4" - web3-eth-ens "1.3.4" - web3-eth-iban "1.3.4" - web3-eth-personal "1.3.4" - web3-net "1.3.4" - web3-utils "1.3.4" - -web3-eth@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.3.6.tgz" - integrity sha512-9+rnywRRpyX3C4hfsAQXPQh6vHh9XzQkgLxo3gyeXfbhbShUoq2gFVuy42vsRs//6JlsKdyZS7Z3hHPHz2wreA== - dependencies: - underscore "1.12.1" - web3-core "1.3.6" - web3-core-helpers "1.3.6" - web3-core-method "1.3.6" - web3-core-subscriptions "1.3.6" - web3-eth-abi "1.3.6" - web3-eth-accounts "1.3.6" - web3-eth-contract "1.3.6" - web3-eth-ens "1.3.6" - web3-eth-iban "1.3.6" - web3-eth-personal "1.3.6" - web3-net "1.3.6" - web3-utils "1.3.6" - web3-eth@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-eth/-/web3-eth-1.5.3.tgz" @@ -23205,15 +21914,6 @@ web3-eth@1.7.3: web3-net "1.7.3" web3-utils "1.7.3" -web3-net@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.2.1.tgz" - integrity sha512-Yt1Bs7WgnLESPe0rri/ZoPWzSy55ovioaP35w1KZydrNtQ5Yq4WcrAdhBzcOW7vAkIwrsLQsvA+hrOCy7mNauw== - dependencies: - web3-core "1.2.1" - web3-core-method "1.2.1" - web3-utils "1.2.1" - web3-net@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.2.11.tgz" @@ -23223,33 +21923,6 @@ web3-net@1.2.11: web3-core-method "1.2.11" web3-utils "1.2.11" -web3-net@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.2.9.tgz" - integrity sha512-d2mTn8jPlg+SI2hTj2b32Qan6DmtU9ap/IUlJTeQbZQSkTLf0u9suW8Vjwyr4poJYXTurdSshE7OZsPNn30/ZA== - dependencies: - web3-core "1.2.9" - web3-core-method "1.2.9" - web3-utils "1.2.9" - -web3-net@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.3.4.tgz" - integrity sha512-wVyqgVC3Zt/0uGnBiR3GpnsS8lvOFTDgWZMxAk9C6Guh8aJD9MUc7pbsw5rHrPUVe6S6RUfFJvh/Xq8oMIQgSw== - dependencies: - web3-core "1.3.4" - web3-core-method "1.3.4" - web3-utils "1.3.4" - -web3-net@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.3.6.tgz" - integrity sha512-KhzU3wMQY/YYjyMiQzbaLPt2kut88Ncx2iqjy3nw28vRux3gVX0WOCk9EL/KVJBiAA/fK7VklTXvgy9dZnnipw== - dependencies: - web3-core "1.3.6" - web3-core-method "1.3.6" - web3-utils "1.3.6" - web3-net@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-net/-/web3-net-1.5.3.tgz" @@ -23294,14 +21967,6 @@ web3-provider-engine@14.2.1: xhr "^2.2.0" xtend "^4.0.1" -web3-providers-http@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-providers-http/-/web3-providers-http-1.2.1.tgz" - integrity sha512-BDtVUVolT9b3CAzeGVA/np1hhn7RPUZ6YYGB/sYky+GjeO311Yoq8SRDUSezU92x8yImSC2B+SMReGhd1zL+bQ== - dependencies: - web3-core-helpers "1.2.1" - xhr2-cookies "1.1.0" - web3-providers-http@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-providers-http/-/web3-providers-http-1.2.11.tgz" @@ -23310,22 +21975,6 @@ web3-providers-http@1.2.11: web3-core-helpers "1.2.11" xhr2-cookies "1.1.0" -web3-providers-http@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-providers-http/-/web3-providers-http-1.2.9.tgz" - integrity sha512-F956tCIj60Ttr0UvEHWFIhx+be3He8msoPzyA44/kfzzYoMAsCFRn5cf0zQG6al0znE75g6HlWVSN6s3yAh51A== - dependencies: - web3-core-helpers "1.2.9" - xhr2-cookies "1.1.0" - -web3-providers-http@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-providers-http/-/web3-providers-http-1.3.4.tgz" - integrity sha512-aIg/xHXvxpqpFU70sqfp+JC3sGkLfAimRKTUhG4oJZ7U+tTcYTHoxBJj+4A3Id4JAoKiiv0k1/qeyQ8f3rMC3g== - dependencies: - web3-core-helpers "1.3.4" - xhr2-cookies "1.1.0" - web3-providers-http@1.3.6: version "1.3.6" resolved "https://registry.npmjs.org/web3-providers-http/-/web3-providers-http-1.3.6.tgz" @@ -23350,15 +21999,6 @@ web3-providers-http@1.7.3: web3-core-helpers "1.7.3" xhr2-cookies "1.1.0" -web3-providers-ipc@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.2.1.tgz" - integrity sha512-oPEuOCwxVx8L4CPD0TUdnlOUZwGBSRKScCz/Ws2YHdr9Ium+whm+0NLmOZjkjQp5wovQbyBzNa6zJz1noFRvFA== - dependencies: - oboe "2.1.4" - underscore "1.9.1" - web3-core-helpers "1.2.1" - web3-providers-ipc@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.2.11.tgz" @@ -23368,33 +22008,6 @@ web3-providers-ipc@1.2.11: underscore "1.9.1" web3-core-helpers "1.2.11" -web3-providers-ipc@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.2.9.tgz" - integrity sha512-NQ8QnBleoHA2qTJlqoWu7EJAD/FR5uimf7Ielzk4Z2z+m+6UAuJdJMSuQNj+Umhz9L/Ys6vpS1vHx9NizFl+aQ== - dependencies: - oboe "2.1.4" - underscore "1.9.1" - web3-core-helpers "1.2.9" - -web3-providers-ipc@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.3.4.tgz" - integrity sha512-E0CvXEJElr/TIlG1YfJeO3Le5NI/4JZM+1SsEdiPIfBUAJN18oOoum138EBGKv5+YaLKZUtUuJSXWjIIOR/0Ig== - dependencies: - oboe "2.1.5" - underscore "1.9.1" - web3-core-helpers "1.3.4" - -web3-providers-ipc@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.3.6.tgz" - integrity sha512-+TVsSd2sSVvVgHG4s6FXwwYPPT91boKKcRuEFXqEfAbUC5t52XOgmyc2LNiD9LzPhed65FbV4LqICpeYGUvSwA== - dependencies: - oboe "2.1.5" - underscore "1.12.1" - web3-core-helpers "1.3.6" - web3-providers-ipc@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-providers-ipc/-/web3-providers-ipc-1.5.3.tgz" @@ -23411,15 +22024,6 @@ web3-providers-ipc@1.7.3: oboe "2.1.5" web3-core-helpers "1.7.3" -web3-providers-ws@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.2.1.tgz" - integrity sha512-oqsQXzu+ejJACVHy864WwIyw+oB21nw/pI65/sD95Zi98+/HQzFfNcIFneF1NC4bVF3VNX4YHTNq2I2o97LAiA== - dependencies: - underscore "1.9.1" - web3-core-helpers "1.2.1" - websocket "github:web3-js/WebSocket-Node#polyfill/globalThis" - web3-providers-ws@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.2.11.tgz" @@ -23430,36 +22034,6 @@ web3-providers-ws@1.2.11: web3-core-helpers "1.2.11" websocket "^1.0.31" -web3-providers-ws@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.2.9.tgz" - integrity sha512-6+UpvINeI//dglZoAKStUXqxDOXJy6Iitv2z3dbgInG4zb8tkYl/VBDL80UjUg3ZvzWG0g7EKY2nRPEpON2TFA== - dependencies: - eventemitter3 "^4.0.0" - underscore "1.9.1" - web3-core-helpers "1.2.9" - websocket "^1.0.31" - -web3-providers-ws@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.3.4.tgz" - integrity sha512-WBd9hk2fUAdrbA3kUyUk94ZeILtE6txLeoVVvIKAw2bPegx+RjkLyxC1Du0oceKgQ/qQWod8CCzl1E/GgTP+MQ== - dependencies: - eventemitter3 "4.0.4" - underscore "1.9.1" - web3-core-helpers "1.3.4" - websocket "^1.0.32" - -web3-providers-ws@1.3.6: - version "1.3.6" - resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.3.6.tgz" - integrity sha512-bk7MnJf5or0Re2zKyhR3L3CjGululLCHXx4vlbc/drnaTARUVvi559OI5uLytc/1k5HKUUyENAxLvetz2G1dnQ== - dependencies: - eventemitter3 "4.0.4" - underscore "1.12.1" - web3-core-helpers "1.3.6" - websocket "^1.0.32" - web3-providers-ws@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.5.3.tgz" @@ -23469,15 +22043,6 @@ web3-providers-ws@1.5.3: web3-core-helpers "1.5.3" websocket "^1.0.32" -web3-providers-ws@1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.6.1.tgz" - integrity sha512-FWMEFYb4rYFYRgSFBf/O1Ex4p/YKSlN+JydCtdlJwRimd89qm95CTfs4xGjCskwvXMjV2sarH+f1NPwJXicYpg== - dependencies: - eventemitter3 "4.0.4" - web3-core-helpers "1.6.1" - websocket "^1.0.32" - web3-providers-ws@1.7.3: version "1.7.3" resolved "https://registry.npmjs.org/web3-providers-ws/-/web3-providers-ws-1.7.3.tgz" @@ -23487,16 +22052,6 @@ web3-providers-ws@1.7.3: web3-core-helpers "1.7.3" websocket "^1.0.32" -web3-shh@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-shh/-/web3-shh-1.2.1.tgz" - integrity sha512-/3Cl04nza5kuFn25bV3FJWa0s3Vafr5BlT933h26xovQ6HIIz61LmvNQlvX1AhFL+SNJOTcQmK1SM59vcyC8bA== - dependencies: - web3-core "1.2.1" - web3-core-method "1.2.1" - web3-core-subscriptions "1.2.1" - web3-net "1.2.1" - web3-shh@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-shh/-/web3-shh-1.2.11.tgz" @@ -23507,26 +22062,6 @@ web3-shh@1.2.11: web3-core-subscriptions "1.2.11" web3-net "1.2.11" -web3-shh@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-shh/-/web3-shh-1.2.9.tgz" - integrity sha512-PWa8b/EaxaMinFaxy6cV0i0EOi2M7a/ST+9k9nhyhCjVa2vzXuNoBNo2IUOmeZ0WP2UQB8ByJ2+p4htlJaDOjA== - dependencies: - web3-core "1.2.9" - web3-core-method "1.2.9" - web3-core-subscriptions "1.2.9" - web3-net "1.2.9" - -web3-shh@1.3.4: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-shh/-/web3-shh-1.3.4.tgz" - integrity sha512-zoeww5mxLh3xKcqbX85irQbtFe5pc5XwrgjvmdMkhkOdZzPASlWOgqzUFtaPykpLwC3yavVx4jG5RqifweXLUA== - dependencies: - web3-core "1.3.4" - web3-core-method "1.3.4" - web3-core-subscriptions "1.3.4" - web3-net "1.3.4" - web3-shh@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3-shh/-/web3-shh-1.5.3.tgz" @@ -23547,19 +22082,6 @@ web3-shh@1.7.3: web3-core-subscriptions "1.7.3" web3-net "1.7.3" -web3-utils@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.2.1.tgz" - integrity sha512-Mrcn3l58L+yCKz3zBryM6JZpNruWuT0OCbag8w+reeNROSGVlXzUQkU+gtAwc9JCZ7tKUyg67+2YUGqUjVcyBA== - dependencies: - bn.js "4.11.8" - eth-lib "0.2.7" - ethjs-unit "0.1.6" - number-to-bn "1.7.0" - randomhex "0.1.5" - underscore "1.9.1" - utf8 "3.0.0" - web3-utils@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.2.11.tgz" @@ -23574,34 +22096,6 @@ web3-utils@1.2.11: underscore "1.9.1" utf8 "3.0.0" -web3-utils@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.2.9.tgz" - integrity sha512-9hcpuis3n/LxFzEVjwnVgvJzTirS2S9/MiNAa7l4WOEoywY+BSNwnRX4MuHnjkh9NY25B6QOjuNG6FNnSjTw1w== - dependencies: - bn.js "4.11.8" - eth-lib "0.2.7" - ethereum-bloom-filters "^1.0.6" - ethjs-unit "0.1.6" - number-to-bn "1.7.0" - randombytes "^2.1.0" - underscore "1.9.1" - utf8 "3.0.0" - -web3-utils@1.3.4, web3-utils@^1.2.1: - version "1.3.4" - resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.3.4.tgz" - integrity sha512-/vC2v0MaZNpWooJfpRw63u0Y3ag2gNjAWiLtMSL6QQLmCqCy4SQIndMt/vRyx0uMoeGt1YTwSXEcHjUzOhLg0A== - dependencies: - bn.js "^4.11.9" - eth-lib "0.2.8" - ethereum-bloom-filters "^1.0.6" - ethjs-unit "0.1.6" - number-to-bn "1.7.0" - randombytes "^2.1.0" - underscore "1.9.1" - utf8 "3.0.0" - web3-utils@1.3.6: version "1.3.6" resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.3.6.tgz" @@ -23629,19 +22123,6 @@ web3-utils@1.5.3: randombytes "^2.1.0" utf8 "3.0.0" -web3-utils@1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.6.1.tgz" - integrity sha512-RidGKv5kOkcerI6jQqDFDoTllQQqV+rPhTzZHhmbqtFObbYpU93uc+yG1LHivRTQhA6llIx67iudc/vzisgO+w== - dependencies: - bn.js "^4.11.9" - ethereum-bloom-filters "^1.0.6" - ethereumjs-util "^7.1.0" - ethjs-unit "0.1.6" - number-to-bn "1.7.0" - randombytes "^2.1.0" - utf8 "3.0.0" - web3-utils@1.7.3, web3-utils@^1.3.4: version "1.7.3" resolved "https://registry.npmjs.org/web3-utils/-/web3-utils-1.7.3.tgz" @@ -23669,19 +22150,6 @@ web3-utils@^1.0.0-beta.31: underscore "1.12.1" utf8 "3.0.0" -web3@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/web3/-/web3-1.2.1.tgz" - integrity sha512-nNMzeCK0agb5i/oTWNdQ1aGtwYfXzHottFP2Dz0oGIzavPMGSKyVlr8ibVb1yK5sJBjrWVnTdGaOC2zKDFuFRw== - dependencies: - web3-bzz "1.2.1" - web3-core "1.2.1" - web3-eth "1.2.1" - web3-eth-personal "1.2.1" - web3-net "1.2.1" - web3-shh "1.2.1" - web3-utils "1.2.1" - web3@1.2.11: version "1.2.11" resolved "https://registry.npmjs.org/web3/-/web3-1.2.11.tgz" @@ -23695,19 +22163,6 @@ web3@1.2.11: web3-shh "1.2.11" web3-utils "1.2.11" -web3@1.2.9: - version "1.2.9" - resolved "https://registry.npmjs.org/web3/-/web3-1.2.9.tgz" - integrity sha512-Mo5aBRm0JrcNpN/g4VOrDzudymfOnHRC3s2VarhYxRA8aWgF5rnhQ0ziySaugpic1gksbXPe105pUWyRqw8HUA== - dependencies: - web3-bzz "1.2.9" - web3-core "1.2.9" - web3-eth "1.2.9" - web3-eth-personal "1.2.9" - web3-net "1.2.9" - web3-shh "1.2.9" - web3-utils "1.2.9" - web3@1.5.3: version "1.5.3" resolved "https://registry.npmjs.org/web3/-/web3-1.5.3.tgz" @@ -23734,19 +22189,6 @@ web3@1.7.3: web3-shh "1.7.3" web3-utils "1.7.3" -web3@^1.0.0-beta.34, web3@^1.2.1: - version "1.3.4" - resolved "https://registry.npmjs.org/web3/-/web3-1.3.4.tgz" - integrity sha512-D6cMb2EtTMLHgdGbkTPGl/Qi7DAfczR+Lp7iFX3bcu/bsD9V8fZW69hA8v5cRPNGzXUwVQebk3bS17WKR4cD2w== - dependencies: - web3-bzz "1.3.4" - web3-core "1.3.4" - web3-eth "1.3.4" - web3-eth-personal "1.3.4" - web3-net "1.3.4" - web3-shh "1.3.4" - web3-utils "1.3.4" - webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" @@ -23881,16 +22323,6 @@ websocket@^1.0.31, websocket@^1.0.32: utf-8-validate "^5.0.2" yaeti "^0.0.6" -"websocket@github:web3-js/WebSocket-Node#polyfill/globalThis": - version "1.0.29" - resolved "https://codeload.github.com/web3-js/WebSocket-Node/tar.gz/ef5ea2f41daf4a2113b80c9223df884b4d56c400" - dependencies: - debug "^2.2.0" - es5-ext "^0.10.50" - nan "^2.14.0" - typedarray-to-buffer "^3.1.5" - yaeti "^0.0.6" - whatwg-encoding@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz" @@ -24487,14 +22919,6 @@ yargs@^4.7.1: y18n "^3.2.1" yargs-parser "^2.4.1" -yauzl@^2.4.2: - version "2.10.0" - resolved "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz" - integrity sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk= - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0" - yn@3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz"