-
Notifications
You must be signed in to change notification settings - Fork 27
/
dbt_project.yml
80 lines (64 loc) · 4.6 KB
/
dbt_project.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
name: datavault4dbt
version: 1.4.1
require-dbt-version: [">=1.0.0", "<2.0.0"]
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: 'datavault4dbt'
macro-paths: ["macros"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_packages"
vars:
#Column Aliases
datavault4dbt.ldts_alias: 'ldts'
datavault4dbt.rsrc_alias: 'rsrc'
datavault4dbt.ledts_alias: 'ledts'
datavault4dbt.snapshot_trigger_column: 'is_active'
datavault4dbt.sdts_alias: 'sdts'
datavault4dbt.stg_alias: 'stg'
datavault4dbt.is_current_col_alias: 'IS_CURRENT'
datavault4dbt.deleted_flag_alias: 'deleted_flag'
#Hash Configuration
datavault4dbt.hash: 'MD5'
datavault4dbt.hash_datatype: 'STRING'
datavault4dbt.hashkey_input_case_sensitive: FALSE
datavault4dbt.hashdiff_input_case_sensitive: TRUE
#Stage Configuration
datavault4dbt.copy_rsrc_ldts_input_columns: false
#General Configuration
datavault4dbt.include_business_objects_before_appearance: false
#Ghost Record Configuration
datavault4dbt.beginning_of_all_times: {"bigquery":"0001-01-01T00-00-01","snowflake":"0001-01-01T00:00:01", "exasol": "0001-01-01 00:00:01", "postgres": "0001-01-01 00:00:01", "redshift": "0001-01-01 00:00:01", "synapse": "1901-01-01T00:00:01", "fabric": "0001-01-01T00:00:01", "oracle":"0001-01-01 00:00:01"}
datavault4dbt.end_of_all_times: {"bigquery":"8888-12-31T23-59-59","snowflake":"8888-12-31T23:59:59", "exasol": "8888-12-31 23:59:59", "postgres": "8888-12-31 23:59:59", "redshift": "8888-12-31 23:59:59", "synapse": "8888-12-31T23:59:59", "fabric": "8888-12-31T23:59:59", "oracle":"8888-12-31 23:59:59"}
datavault4dbt.timestamp_format: {"bigquery":"%Y-%m-%dT%H-%M-%S","snowflake":"YYYY-MM-DDTHH24:MI:SS", "exasol": "YYYY-mm-dd HH:MI:SS", "postgres": "YYYY-MM-DD HH24:MI:SS", "redshift": "YYYY-MM-DD HH24:MI:SS", "synapse": 126, "fabric": 126, "oracle":"YYYY-MM-DD HH24:MI:SS"}
datavault4dbt.beginning_of_all_times_date: {"bigquery":"0001-01-01","snowflake":"0001-01-01", "exasol": "0001-01-01", "postgres": "0001-01-01", "redshift": "0001-01-01", "synapse": "1901-01-01", "fabric": "0001-01-01", "oracle":"0001-01-01"}
datavault4dbt.end_of_all_times_date: {"bigquery":"8888-12-31","snowflake":"8888-12-31", "exasol": "8888-12-31", "postgres": "8888-12-31", "redshift": "8888-12-31", "synapse": "8888-12-31", "fabric": "8888-12-31", "oracle":"8888-12-31"}
datavault4dbt.date_format: {"bigquery":"%Y-%m-%d","snowflake":"YYYY-MM-DD", "exasol": "YYYY-mm-dd", "postgres": "YYYY-MM-DD", "redshift": "YYYY-MM-DD", "synapse": "yyyy-MM-dd", "fabric": "yyyy-mm-dd", "oracle":"YYYY-MM-DD"}
datavault4dbt.default_unknown_rsrc: 'SYSTEM'
datavault4dbt.default_error_rsrc: 'ERROR'
datavault4dbt.rsrc_default_dtype: {"bigquery":"STRING","snowflake":"VARCHAR", "exasol": "VARCHAR (2000000) UTF8", "postgres": "VARCHAR", "redshift": "VARCHAR", "synapse": "VARCHAR", "fabric": "VARCHAR(255)", "oracle":"VARCHAR2(40)"}
datavault4dbt.timestamp_default_dtype: {"bigquery":"TIMESTAMP","snowflake":"TIMESTAMP_TZ", "exasol": "TIMESTAMP(3) WITH LOCAL TIME ZONE", "postgres": "TIMESTAMPTZ", "redshift": "TIMESTAMPTZ", "synapse": "datetimeoffset", "fabric": "datetime2(6)", "oracle":"TIMESTAMP WITH TIME ZONE"}
datavault4dbt.stg_default_dtype: {"bigquery":"STRING","snowflake":"VARCHAR", "exasol": "VARCHAR (2000000) UTF8", "postgres": "VARCHAR", "redshift": "VARCHAR", "synapse": "VARCHAR", "fabric": "VARCHAR(255)", "oracle":"VARCHAR2(40)"}
datavault4dbt.derived_columns_default_dtype: {"bigquery":"STRING","snowflake":"VARCHAR", "exasol": "VARCHAR (2000000) UTF8", "postgres": "VARCHAR", "redshift": "VARCHAR", "synapse": "VARCHAR", "fabric": "VARCHAR(255)", "oracle":"VARCHAR2(40)"}
#Datatype specific default values
datavault4dbt.error_value__STRING: '(error)'
datavault4dbt.error_value_alt__STRING: 'e'
datavault4dbt.unknown_value__STRING: '(unknown)'
datavault4dbt.unknown_value_alt__STRING: 'u'
datavault4dbt.unknown_value__numeric: '-1'
datavault4dbt.error_value__numeric: '-2'
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
# using the `{{ config(...) }}` macro.
models:
datavault4dbt:
# Config indicated by + and applies to all files under models/example/
dbt_stg:
+schema: <name of your Staging schema>
+materialized: view
raw_vault:
+schema: <name of your RDV schema>
+materialized: table