Skip to content

Commit

Permalink
Merge pull request #13 from Lambda256/upstream/v1.19.1
Browse files Browse the repository at this point in the history
rebase: v1.19.1
  • Loading branch information
celochoi authored Dec 20, 2024
2 parents 9ce86f2 + 747f531 commit 34f5e0b
Show file tree
Hide file tree
Showing 543 changed files with 22,518 additions and 5,098 deletions.
2 changes: 1 addition & 1 deletion .github/actions/rust-setup/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ description: |
runs:
using: composite
steps:
- run: sudo apt-get update && sudo apt-get install build-essential ca-certificates clang curl git libpq-dev libssl-dev pkg-config lsof lld --no-install-recommends --assume-yes
- run: sudo apt-get update && sudo apt-get install build-essential ca-certificates clang curl git libpq-dev libssl-dev pkg-config lsof lld libdw-dev --no-install-recommends --assume-yes
shell: bash

- uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af # pin@v1
Expand Down
25 changes: 19 additions & 6 deletions .github/workflows/build-images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,6 @@ on:
push:
branches:
- main
# aptos-indexer-processors network-specific release branches
- aptos-indexer-processors-devnet
- aptos-indexer-processors-testnet
- aptos-indexer-processors-mainnet

# cancel redundant builds
concurrency:
Expand All @@ -24,7 +20,7 @@ env:
permissions:
contents: read
id-token: write #required for GCP Workload Identity federation which we use to login into Google Artifact Registry

jobs:
Build:
strategy:
Expand All @@ -37,4 +33,21 @@ jobs:
with:
GCP_SERVICE_ACCOUNT_EMAIL: ${{ secrets.GCP_SERVICE_ACCOUNT_EMAIL }}
GCP_WORKLOAD_IDENTITY_PROVIDER: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
- run: ./scripts/build-and-push-images.sh ${{ matrix.example }}
- name: Extract branch name
shell: bash
run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
id: extract_branch
- name: Cache cargo
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Build and push images
run: ./scripts/build-and-push-images.sh ${{ matrix.example }}
env:
GIT_BRANCH: ${{ steps.extract_branch.outputs.branch }}
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
name: Copy images to dockerhub on release
on:
push:
branches:
# aptos-indexer-processors network-specific release branches
- aptos-indexer-processors-devnet
- aptos-indexer-processors-testnet
- aptos-indexer-processors-mainnet
tags:
- aptos-indexer-processors-v*

Expand Down
31 changes: 21 additions & 10 deletions .github/workflows/integration-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ on:

jobs:
Integration-tests:
runs-on: ubuntu-latest # Ensure the correct runner is used
runs-on: runs-on,runner=2cpu-linux-x64,run-id=${{ github.run_id }}

steps:
- name: Checkout code
Expand Down Expand Up @@ -53,18 +53,29 @@ jobs:
# Show Cargo.toml after the update
- name: Show Cargo.toml After Update
run: cat rust/Cargo.toml # Correct path to the Cargo.toml file

# Ensure Cargo.lock is updated with the latest dependencies
- name: Update Dependencies
run: cargo update
- name: rust setup
run: |
sudo apt update && sudo apt install libdw-dev
cargo update
working-directory: rust

- name: Install Dependencies and Run Linter
uses: ./.github/actions/dep_install_and_lint
# Cache Cargo
- name: Cache cargo
uses: actions/cache@v4
with:
working-directory: rust

path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}

# Run Integration Tests
- name: Run Integration Tests
run: cargo test --manifest-path integration-tests/Cargo.toml
working-directory: rust
run: |
# TODO: until we have more comprehensive cli parsers, we will need to run tests separately.
cargo test sdk_tests -- --nocapture
working-directory: rust/integration-tests
5 changes: 1 addition & 4 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,4 @@ jobs:
working-directory: rust
- name: Ensure the --no-default-features build passes too
run: cargo build --no-default-features
working-directory: rust
- name: Ensure tests pass
run: cargo test
working-directory: rust
working-directory: rust
79 changes: 79 additions & 0 deletions .github/workflows/test-coverage.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
name: "Test_Coverage"
on:
# Trigger if any of the conditions
# 1. Daily at 12am UTC from the main branch, or
# 2. PR with a specific label (see below)
schedule:
- cron: "0 0 * * *"
pull_request:
workflow_dispatch:
push:
branches:
- main

env:
CARGO_INCREMENTAL: "0"
CARGO_TERM_COLOR: always

# cancel redundant builds
concurrency:
# cancel redundant builds on PRs (only on PR, not on branches)
group: ${{ github.workflow }}-${{ (github.event_name == 'pull_request' && github.ref) || github.sha }}
cancel-in-progress: true

jobs:
rust-unit-coverage:
timeout-minutes: 60
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # get all the history because cargo xtest --change-since origin/main requires it.

- name: rust setup
run: |
sudo apt update && sudo apt install libdw-dev
cargo update
working-directory: rust

- run: rustup component add llvm-tools-preview
- uses: taiki-e/install-action@4fedbddde88aab767a45a011661f832d68202716 # [email protected]
with:
tool: nextest,cargo-llvm-cov
- run: docker run --detach -p 5432:5432 cimg/postgres:14.2
- run: cargo llvm-cov nextest --lcov --output-path lcov_unit.info -vv --ignore-run-fail --workspace
env:
INDEXER_DATABASE_URL: postgresql://postgres@localhost/postgres
RUST_MIN_STACK: 33554432 # 32 MB of stack
MVP_TEST_ON_CI: true
SOLC_EXE: /home/runner/bin/solc
Z3_EXE: /home/runner/bin/z3
CVC5_EXE: /home/runner/bin/cvc5
DOTNET_ROOT: /home/runner/.dotnet
BOOGIE_EXE: /home/runner/.dotnet/tools/boogie
working-directory: rust
- run: ls -R
working-directory: rust
- uses: actions/upload-artifact@v4
with:
name: lcov_unit
path: rust/lcov_unit.info

upload-to-codecov:
runs-on: ubuntu-latest
continue-on-error: true # Don't fail if the codecov upload fails
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
needs: [ rust-unit-coverage ]
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: lcov_unit
- run: ls -R
- name: Upload coverage to Codecov
uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3
with:
files: lcov_unit.info
# fail_ci_if_error: true
verbose: true
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ config.yaml
cursor.txt
build
node_modules/
lcov.info

# Rust specific ignores
# Please follow https://help.github.com/en/articles/ignoring-files to create a global
Expand Down
14 changes: 14 additions & 0 deletions codecov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
coverage:
# range for color spectrum display, red=50%, green=80%
range: "50...80"
precision: 1

status:
project:
default:
informational: true
patch:
default:
informational: true
changes: false
14 changes: 12 additions & 2 deletions python/processors/nft_orderbooks/parsers/okx_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
"okx_listing_utils::CancelListingEvent",
]
)
DEPOSIT_EVENT_V1 = "0x3::token::DepositEvent"
DEPOSIT_EVENT_V2 = "0x3::token::Deposit"


def parse_marketplace_events(
Expand Down Expand Up @@ -141,8 +143,16 @@ def parse_marketplace_events(
def get_token_data_from_deposit_events(user_transaction) -> Dict[str, TokenDataIdType]:
# Extract deposit events, which contain token metadata
deposit_events: Dict[str, TokenDataIdType] = {}
for event in user_transaction.events:
if event.type_str != "0x3::token::DepositEvent":
for idx, event in enumerate(user_transaction.events):
if event.type_str != DEPOSIT_EVENT_V1 and event.type_str != DEPOSIT_EVENT_V2:
continue
# Current event is either DEPOSIT_EVENT_V1 or DEPOSIT_EVENT_V2.
if (
idx > 0
# skip if prior event is V2 deposit event.
and user_transaction.events[idx - 1].type_str == DEPOSIT_EVENT_V2
and event.type_str == DEPOSIT_EVENT_V1
):
continue
account_address = standardize_address(event_utils.get_account_address(event))
data = json.loads(event.data)
Expand Down
Loading

0 comments on commit 34f5e0b

Please sign in to comment.