forked from dbt-labs/dbt-project-evaluator
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdbt_project.yml
86 lines (69 loc) · 2.93 KB
/
dbt_project.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
name: 'dbt_project_evaluator'
version: '1.0.0'
config-version: 2
require-dbt-version: [">=1.6.0-rc1", "<2.0.0"]
model-paths: ["models"]
analysis-paths: ["analysis"]
test-paths: ["tests"]
seed-paths: ["seeds"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_packages"
dispatch:
- macro_namespace: dbt
search_order: ['dbt_project_evaluator', 'dbt']
models:
dbt_project_evaluator:
+materialized: "{{ 'table' if target.type in ['duckdb'] else 'view' }}"
marts:
core:
int_all_graph_resources:
+materialized: table
int_direct_relationships:
# required for BigQuery and Redshift for performance/memory reasons
+materialized: "{{ 'table' if target.type in ['bigquery', 'redshift', 'databricks'] else 'view' }}"
int_all_dag_relationships:
# required for BigQuery, Redshift, and Databricks for performance/memory reasons
+materialized: "{{ 'table' if target.type in ['bigquery', 'redshift', 'databricks'] else 'view' }}"
dag:
+materialized: table
staging:
graph:
stg_node_relationships:
+materialized: table
variables:
stg_naming_convention_folders:
# required for Redshift because listagg runs only on tables
+materialized: "{{ 'table' if target.type == 'redshift' else 'view' }}"
stg_naming_convention_prefixes:
# required for Redshift because listagg runs only on tables
+materialized: "{{ 'table' if target.type == 'redshift' else 'view' }}"
vars:
# -- Tests and docs coverage variables --
documentation_coverage_target: 100
test_coverage_target: 100
primary_key_test_macros: [["dbt.test_unique", "dbt.test_not_null"], ["dbt_utils.test_unique_combination_of_columns"]]
# -- Graph variables --
# node types to test for primary key coverage. acceptable node types: model, source, snapshot, seed
enforced_primary_key_node_types: ["model"]
# -- DAG variables --
models_fanout_threshold: 3
# -- Naming conventions variables --
# to add a new "model type", update the variable model_types
# and create new variables with the names <model_type>_folder_name and/or <model_type>_prefixes
model_types: ['staging', 'intermediate', 'marts', 'other']
staging_folder_name: 'staging'
intermediate_folder_name: 'intermediate'
marts_folder_name: 'marts'
staging_prefixes: ['stg_']
intermediate_prefixes: ['int_']
marts_prefixes: ['fct_', 'dim_']
other_prefixes: ['rpt_']
# -- Performance variables --
chained_views_threshold: "{{ 5 if target.type != 'trino' else 4 }}"
# -- Execution variables --
insert_batch_size: "{{ 500 if target.type == 'bigquery' else 10000 }}"
max_depth_dag: "{{ 9 if target.type in ['bigquery', 'spark', 'databricks'] else 4 if target.type == 'trino' else -1 }}"