-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdbt_project.yml
86 lines (68 loc) · 3.03 KB
/
dbt_project.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: 'tpch'
version: '1.0.0'
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: 'tpch'
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
model-paths: ["models"]
analysis-paths: ["analyses"]
test-paths: ["tests"]
seed-paths: ["seeds"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_modules"
- "dbt_packages"
on-run-start:
#- "alter warehouse transforming set warehouse_size={{env_var('DBT_SNOWFLAKE_WAREHOUSE_SIZE')}};"
#- '{{create_udfs()}}' # comment / uncomment this line to build UDFs called in the create_udfs macro
#- 'CALL {{sproc__pi_multiplier()}}(2);' # comment / uncomment this line to run stored procedure called sproc__pi_multiplier -- can also call the sproc out explictly if it does not exist as a macro. CALL tdunlap_sandbox_dev.dbt_tdunlap.SPROC_PI_MULTIPLIER(3);'
on-run-end:
#- "alter warehouse transforming set warehouse_size={{env_var('DBT_SNOWFLAKE_WAREHOUSE_SIZE','xsmall')}};"
#- "{{ grant_all_on_schemas(schemas, 'transformer') }}"
#- "{% if 'DBT_ARTIFACTS' in target.schema.upper() %}{{ grant_all_on_schemas(schemas, 'transformer') }}{% endif %}"
#- "{% if target.name == 'prod' %}{{ dbt_artifacts.upload_results(results) }}{% endif %}"
#- "{{ dbt_artifacts.upload_results(results) }}"
vars:
start_date: '1999-01-01'
test: 'false' # to trigger runs for unit testing - override in a CLI param in testing job
fct_order_items: 'mock_source__fct_order_items' # this is a map for unit testing
dbt_artifacts:
dbt_artifacts_database: TDUNLAP_SANDBOX_DBT_ARTIFACTS # optional, default is your target database
dbt_artifacts_schema: raw_dbt_artifacts # optional, default is 'dbt_artifacts'
dbt_artifacts_create_schema: true # optional, true|false set to false if you don't have privileges to create schema, default is true
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
dbt_artifacts:
+schema: dbt_artifacts
+database: TDUNLAP_SANDBOX_DBT_ARTIFACTS
staging:
+schema: stg_dbt_artifacts
tpch:
staging:
+materialized: view
+schema: stg
marts:
core:
# +required_docs: 'true' #Uncomment to require docs
+required_tests: {"unique|not_null": 2}
+materialized: table
+schema: core
intermediate:
+schema: intermed
snowflake_spend:
enabled: true
seeds:
tpch:
snowflake_contract_rates:
+column_types:
effective_date: DATE
rate: NUMBER