Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

An 3060 get streamline in dbt #3

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/dbt_run_dev_refresh.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ run-name: dbt_run_dev_refresh

on:
workflow_dispatch:
# schedule:
# # Runs "at 9:00 UTC" (see https://crontab.guru)
# - cron: '0 9 * * *'
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'

env:
USE_VARS: "${{ vars.USE_VARS }}"
Expand Down
41 changes: 41 additions & 0 deletions .github/workflows/dbt_run_incremental.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled

on:
workflow_dispatch:
schedule:
- cron: '0 1,7,13,19 * * *'

env:
DBT_PROFILES_DIR: "${{ secrets.DBT_PROFILES_DIR }}"

ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"


jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod

steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v1
with:
python-version: "3.7.x"

- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ secrets.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -s ./models
44 changes: 44 additions & 0 deletions .github/workflows/dbt_run_streamline_history.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history

on:
workflow_dispatch:
schedule:
# Runs "every 6 hours" (see https://crontab.guru)
- cron: '0 1-23/6 * * *'

env:
DBT_PROFILES_DIR: ./

ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"

concurrency:
group: ${{ github.workflow }}

jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod

steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v1
with:
python-version: "3.7.x"

- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_RUN_HISTORY":True}' -m 1+models/streamline/realtime
44 changes: 44 additions & 0 deletions .github/workflows/dbt_run_streamline_realtime.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: dbt_run_streamline_realtime
run-name: dbt_run_streamline_realtime

on:
workflow_dispatch:
schedule:
# Runs "every 1 hour at min 40" (see https://crontab.guru)
- cron: '40 */1 * * *'

env:
DBT_PROFILES_DIR: ./

ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"

concurrency:
group: ${{ github.workflow }}

jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_backfill

steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v1
with:
python-version: "3.7.x"

- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/realtime
41 changes: 41 additions & 0 deletions .github/workflows/dbt_test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: dbt_test_scheduled
run-name: dbt_test_scheduled

on:
workflow_dispatch:
schedule:
- cron: '0 4 * * *'

env:
DBT_PROFILES_DIR: "${{ secrets.DBT_PROFILES_DIR }}"

ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"


jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod

steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v1
with:
python-version: "3.7.x"

- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ secrets.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m ./models
11 changes: 4 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@

# Please find and replace all instances of `xyz` with your project name.

## Profile Set Up

#### Use the following within profiles.yml
----

```yml
xyz:
evmos:
target: dev
outputs:
dev:
Expand All @@ -17,7 +14,7 @@ xyz:
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: xyz_DEV
database: evmos_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 4
Expand Down Expand Up @@ -73,5 +70,5 @@ dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.s

```
select *
from table(xyz.information_schema.tag_references('xyz.core.fact_blocks', 'table'));
```
from table(evmos.information_schema.tag_references('evmos.core.fact_blocks', 'table'));
```
47 changes: 47 additions & 0 deletions analyses/sample_tendermint_rpc_calls.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
-- get chainhead
select ETHEREUM.STREAMLINE.UDF_API('GET','https://rpc-evmos.imperator.co/abci_info',{},{} )

--sample gen for last 1k blocks
create table sample_block_ids as (
with gen as (
select
row_number() over (
order by
seq4()
) as block_height
from
table(generator(rowcount => 100000000))
)


select top 10000 block_height from gen
-- except select 12872988 from sample_blocks
where block_height <= 12873408
order by 1 desc)


--pull one block
select
block_height,
ETHEREUM.STREAMLINE.UDF_JSON_RPC_CALL('https://rpc-evmos.imperator.co/',{},
[
{ 'id': block_height, 'jsonrpc': '2.0', 'method': 'block', 'params': [ block_height::STRING ] }
]
) data,
getdate() as _inserted_timestamp
from
(select 12889280 as block_height )


--pull one block's transactions
select
block_height,
ETHEREUM.STREAMLINE.UDF_JSON_RPC_CALL('https://rpc-evmos.imperator.co/',{},
[
{ 'id': block_height, 'jsonrpc': '2.0', 'method': 'tx_search', 'params': [ 'tx.height='||block_height::STRING , true, '1', '1000', 'asc' ] }
]
) data,
getdate() as _inserted_timestamp
from
(select 12889280 as block_height )

11 changes: 8 additions & 3 deletions dbt_project.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "xyz_models"
name: "evmos_models"
version: "1.0.0"
config-version: 2

require-dbt-version: ">=1.4.0"

# This setting configures which "profile" dbt uses for this project.
profile: "xyz"
profile: "evmos"

# These configurations specify where dbt should look for different types of files.
# The `model-paths` config, for example, states that models in this project can be
Expand Down Expand Up @@ -41,7 +41,12 @@ models:

vars:
"dbt_date:time_zone": GMT
"UPDATE_SNOWFLAKE_TAGS": TRUE
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
STREAMLINE_RUN_HISTORY: False
UPDATE_SNOWFLAKE_TAGS: True
WAIT: 0

tests:
+store_failures: true # all tests
Expand Down
2 changes: 1 addition & 1 deletion macros/create_sps.sql
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% macro create_sps() %}
{% if target.database == 'xyz' %}
{% if target.database == 'EVMOS' %}
CREATE SCHEMA IF NOT EXISTS _internal;
{{ sp_create_prod_clone('_internal') }};
{% endif %}
Expand Down
7 changes: 7 additions & 0 deletions macros/create_udfs.sql
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@
{% macro create_udfs() %}
{{ create_udtf_get_base_table(
schema = "streamline"
) }}
{{ create_udf_get_chainhead() }}
{{ create_udf_json_rpc() }}
{{ create_udf_get_tendermint_transactions() }}
{{ create_udf_get_tendermint_validators() }}
{% endmacro %}
2 changes: 1 addition & 1 deletion macros/run_sp_create_prod_clone.sql
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{% macro run_sp_create_prod_clone() %}
{% set clone_query %}
call xyz._internal.create_prod_clone('xyz', 'xyz_dev', 'internal_dev');
call evmos._internal.create_prod_clone('evmos', 'evmos_dev', 'internal_dev');
{% endset %}

{% do run_query(clone_query) %}
Expand Down
11 changes: 11 additions & 0 deletions macros/streamline/api_integrations.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{% macro create_aws_ethereum_api() %}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_evmos_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-evmos' api_allowed_prefixes = (
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/',
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}
24 changes: 24 additions & 0 deletions macros/streamline/get_base_table_udtf.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{% macro create_udtf_get_base_table(schema) %}
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
returns table (height number)
as
$$
with base as (
select
row_number() over (
order by
seq4()
) as id
from
table(generator(rowcount => 100000000))
)
select
id as height
from
base
where
id <= max_height
$$
;

{% endmacro %}
Loading