From 364b02ac26f522669bf2301a3703a189b65197ff Mon Sep 17 00:00:00 2001 From: Alex Higgs Date: Wed, 22 Mar 2023 07:45:30 +0000 Subject: [PATCH 01/24] Update README --- README.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e0d9dde1f..e0054d6c0 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,16 @@
dbtvault - [![Build Status](https://dev.azure.com/data-vault/dbtvault/_apis/build/status/Datavault-UK.dbtvault-dev?branchName=develop)](https://dev.azure.com/data-vault/dbtvault/_build/latest?definitionId=5&branchName=develop) - [![Docs](https://readthedocs.org/projects/dbtvault/badge/?version=stable)](https://dbtvault.readthedocs.io/en/stable/?badge=stable) + [![Documentation Status](https://img.shields.io/badge/docs-stable-blue)](https://dbtvault.readthedocs.io/en/stable/?badge=stable) [![Slack](https://img.shields.io/badge/Slack-Join-yellow?style=flat&logo=slack)](https://join.slack.com/t/dbtvault/shared_invite/enQtODY5MTY3OTIyMzg2LWJlZDMyNzM4YzAzYjgzYTY0MTMzNTNjN2EyZDRjOTljYjY0NDYyYzEwMTlhODMzNGY3MmU2ODNhYWUxYmM2NjA) +
+
+ [![dbt Versions](https://img.shields.io/badge/compatible%20dbt%20versions-%3E=1.3%20%3C=1.4.x-orange?logo=dbt)](https://dbtvault.readthedocs.io/en/latest/versions/) +
+ [Changelog and past doc versions](https://dbtvault.readthedocs.io/en/latest/changelog/stable) # dbtvault by [Datavault](https://www.data-vault.co.uk) From 5c21cb48680d47db039348894a426b9607a340b5 Mon Sep 17 00:00:00 2001 From: Jocelyn Shannon Date: Tue, 9 May 2023 09:24:48 +0000 Subject: [PATCH 02/24] trialling hashing test --- test/context_helpers.py | 40 ++++++ test/dbt_file_utils.py | 58 ++++++++ test/dbtvault_generator.py | 2 +- test/dbtvault_test/macros/harness_helpers.sql | 80 +++++++++++ test/features/staging/hashing.feature | 25 ++++ test/features/steps/shared_steps.py | 48 ++++++- test/result_helpers.py | 127 ++++++++++++++++++ 7 files changed, 378 insertions(+), 2 deletions(-) create mode 100755 test/context_helpers.py create mode 100755 test/dbt_file_utils.py create mode 100755 test/dbtvault_test/macros/harness_helpers.sql create mode 100644 test/features/staging/hashing.feature create mode 100755 test/result_helpers.py diff --git a/test/context_helpers.py b/test/context_helpers.py new file mode 100755 index 000000000..5c78d423a --- /dev/null +++ b/test/context_helpers.py @@ -0,0 +1,40 @@ +import pandas as pd +from behave.model import Table + +from test import dbt_file_utils +import test +from test import dbt_runner + + +def context_table_to_df(table: Table) -> pd.DataFrame: + table_df = pd.DataFrame(columns=table.headings, data=table.rows) + + return table_df + + +def context_table_to_seed(table: Table, model_name: str) -> str: + table_df = context_table_to_df(table) + + csv_fqn = test.TEMP_SEED_DIR / f'{model_name.lower()}.csv' + + table_df.to_csv(path_or_buf=csv_fqn, index=False) + + return csv_fqn.stem + + +def sample_data_to_database(context, model_name: str): + table = context.table + + input_seed_name = context_table_to_seed(table, model_name) + + if hasattr(context, 'seed_config') and hasattr(context, 'sample_table_name'): + dbt_file_utils.write_seed_properties(input_seed_name, + context.seed_config[context.sample_table_name]) + else: + dbt_file_utils.write_seed_properties(input_seed_name, {'column_types': {k: 'VARCHAR' for k in table.headings}}) + + seeds_logs = dbt_runner.run_dbt_seeds([input_seed_name], full_refresh=True) + + assert "Completed successfully" in seeds_logs + + return input_seed_name diff --git a/test/dbt_file_utils.py b/test/dbt_file_utils.py new file mode 100755 index 000000000..023a29cef --- /dev/null +++ b/test/dbt_file_utils.py @@ -0,0 +1,58 @@ +import ruamel.yaml +import test + +def write_seed_properties(seed_name: str, seed_config: dict): + yml = ruamel.yaml.YAML() + + seed_property = { + 'version': 2, + 'seeds': [ + {'name': seed_name, + 'config': { + 'schema': 'development', + 'quote_columns': True, + 'column_types': seed_config['column_types'] + }} + ] + } + + with open(test.SEEDS_DIR / 'properties.yml', 'w+') as f: + yml.width = 150 + yml.indent(sequence=4, offset=2) + + yml.dump(seed_property, f) + + +def write_model_test_properties(actual_model_name, expected_model_name, unique_id, columns_to_compare): + yml = ruamel.yaml.YAML() + + test_property = { + 'version': 2, + 'models': [ + {'name': actual_model_name, + 'tests': [{ + "expect_tables_to_match": { + "expected_seed": expected_model_name, + "unique_id": unique_id, + "compare_columns": columns_to_compare + } + }]} + ] + } + + with open(test.TEST_MODELS_ROOT / 'test.yml', 'w+') as f: + yml.width = 150 + yml.indent(sequence=4, offset=2) + + yml.dump(test_property, f) + + +def generate_model(model_name, sql): + template = f""" + {{{{- config(materialized='table') -}}}} + + {sql} + """ + + with open(test.TEST_MODELS_ROOT / f"{model_name}.sql", "w") as f: + f.write(template.strip()) diff --git a/test/dbtvault_generator.py b/test/dbtvault_generator.py index 276ea837a..fae449768 100644 --- a/test/dbtvault_generator.py +++ b/test/dbtvault_generator.py @@ -730,7 +730,7 @@ def add_seed_config(seed_name: str, seed_config: dict, include_columns=None, yml = ruamel.yaml.YAML() yml.preserve_quotes = True yml.indent(sequence=4, offset=2) - properties_path = TEMP_SEED_DIR / 'vault_properties.yml' + properties_path = SEEDS_DIR / 'properties.yml' if include_columns: seed_config['column_types'] = {k: v for k, v in seed_config['column_types'].items() if diff --git a/test/dbtvault_test/macros/harness_helpers.sql b/test/dbtvault_test/macros/harness_helpers.sql new file mode 100755 index 000000000..6c351f543 --- /dev/null +++ b/test/dbtvault_test/macros/harness_helpers.sql @@ -0,0 +1,80 @@ +{%- macro check_table_exists(model_name) -%} + + {%- set source_relation = adapter.get_relation(database=target.database, + schema=dbtvault_test.get_schema_name(), + identifier=model_name) -%} + + {%- if source_relation -%} + {%- do log("Table '{}' exists.".format(model_name), true) -%} + {%- do return(True) %} + {%- else -%} + {%- do log("Table '{}' does not exist.".format(model_name), true) -%} + {%- do return(False) %} + {%- endif -%} + + {%- do return(False) %} + +{%- endmacro -%} + +{%- macro check_source_exists(source_name, table_name) -%} + + {%- set source = source(source_name, table_name) -%} + {%- set source_relation = adapter.get_relation(database=source.database, + schema=source.schema, + identifier=source.identifier)-%} + + + {%- if source_relation.is_table or source_relation.is_view -%} + {%- do log("Source '{}:{}' exists.".format(source_name, table_name), true) -%} + {%- do return(True) %} + {%- else -%} + {%- do log("Source '{}:{}' does not exist.".format(source_name, table_name), true) -%} + {%- do return(False) %} + {%- endif -%} + +{%- endmacro -%} + + +{%- macro recreate_schema(schema_name=None) -%} + + {%- if not schema_name -%} + {%- set schema_name = dbtvault_test.get_schema_name() %} + {%- endif -%} + + {%- set schema_relation = api.Relation.create(database=target.database, schema=schema_name) -%} + + {%- do adapter.drop_schema(schema_relation) -%} + {%- do adapter.create_schema(schema_relation) -%} + +{%- endmacro -%} + + +{%- macro get_hash_length(hash_alg, columns, schema_name, table_name) -%} + + {{- adapter.dispatch('get_hash_length', 'dbtvault_test')(hash_alg=hash_alg, columns=columns, schema_name=schema_name, table_name=table_name) -}} + +{%- endmacro -%} + + +{%- macro postgres__get_hash_length(hash_alg, columns, schema_name, table_name) -%} + + {%- set hash_alg = var('hash', 'MD5') -%} + {%- if hash_alg == 'MD5' -%} + + WITH CTE AS ( + SELECT DECODE(MD5("{{ columns }}"), 'hex') AS HK + , "{{ columns }}" AS {{ columns }} + FROM "{{ schema_name }}".{{ table_name }} + ) + SELECT + {{ columns }} + , length(HK) AS HASH_VALUE_LENGTH + FROM CTE + + {%- else -%} + {%- do log('Test is currently only supported for MD5 hashing', info=True) -%} + {%- do return(False) -%} + {%- endif -%} + + +{%- endmacro -%} \ No newline at end of file diff --git a/test/features/staging/hashing.feature b/test/features/staging/hashing.feature new file mode 100644 index 000000000..0fcb69e55 --- /dev/null +++ b/test/features/staging/hashing.feature @@ -0,0 +1,25 @@ +@postgres +Feature: [HASH] Check the hash string length for postgres + + Scenario: [HASH-01] Check hash value length for MD5 hashing in postgres + Given there is data available + | VALUE_STRING | + | postgres_hash | + And using hash calculation on table + | VALUE_STRING | + | postgres_hash | + Then the SAMPLE_DATA table should contain the following data + | value_string | hash_value_length | + | postgres_hash | 16 | + + @enable_sha + Scenario: [HASH-02] Check hash value length for SHA hashing in postgres + Given there is data available + | VALUE_STRING | + | postgres_hash | + And using hash calculation on table + | VALUE_STRING | + | postgres_hash | + Then the SAMPLE_DATA table should contain the following data + | value_string | hash_value_length | + | postgres_hash | 32 | \ No newline at end of file diff --git a/test/features/steps/shared_steps.py b/test/features/steps/shared_steps.py index bb9a48b50..236e56a74 100644 --- a/test/features/steps/shared_steps.py +++ b/test/features/steps/shared_steps.py @@ -4,7 +4,8 @@ from behave.model import Table, Row from env import env_utils -from test import dbtvault_generator, dbt_runner, behave_helpers, context_utils, step_helpers +from test import dbtvault_generator, dbt_runner, behave_helpers, context_utils, step_helpers, context_helpers +from test import dbt_file_utils def set_stage_metadata(context, stage_model_name) -> dict: @@ -715,3 +716,48 @@ def step_impl(context, model_name): @given("I am using the {database_name} database") def step_impl(context, database_name): context.database_name = database_name + + +@given("there is data available") +def step_impl(context): + context.sample_table_name = "sample_data" + + context.input_seed_name = context_helpers.sample_data_to_database(context, context.sample_table_name) + + logs = dbt_runner.run_dbt_operation(macro_name='check_table_exists', + args={"model_name": context.sample_table_name}) + + assert f"Table '{context.sample_table_name}' exists." in logs + + +@step("using hash calculation on table") +def step_impl(context): + columns = context.table.headings[0] + sample_table_name = context.sample_table_name + context.sample_schema_name = "DEVELOPMENT_DBTVAULT_USER" + sample_schema_name = context.sample_schema_name + model_name = f'{context.sample_table_name}_model' + + sql = f"{{{{- dbtvault_test.get_hash_length(hash_alg, \042{columns}\042, \042{sample_schema_name}\042, \042{sample_table_name}\042) -}}}}" + + dbt_file_utils.generate_model(model_name, sql) + + logs = dbt_runner.run_dbt_models(mode="run", model_names=[model_name]) + + +@then("the {table_name} table should contain the following data") +def step_impl(context, table_name): + context.table_name = table_name.lower() + context.model_name = f'{context.table_name}_model' + context.expected_seed_name = context_helpers.sample_data_to_database(context, f"{context.table_name}_expected") + columns_to_compare = context.table.headings + context.unique_id = context.table.headings[0] + + dbt_file_utils.write_model_test_properties(actual_model_name=context.model_name, + expected_model_name=context.expected_seed_name, + unique_id=context.unique_id, + columns_to_compare=columns_to_compare) + + logs = dbt_runner.run_dbt_command(["dbt", "test"]) + + assert "1 of 1 PASS" in logs diff --git a/test/result_helpers.py b/test/result_helpers.py new file mode 100755 index 000000000..f1baf6592 --- /dev/null +++ b/test/result_helpers.py @@ -0,0 +1,127 @@ +import datetime +import re +from typing import List + +from behave.model import Scenario, Step, Feature +from boltons import tbutils + +from context_helpers import context_table_to_df + + +def format_scenario_name(scenario_name: str): + first_pass = scenario_name.replace('[', '').replace(']', '').replace('-', ' ').replace('/', ' ').replace('\\', ' ') + + minimised_space = re.sub(' +', ' ', first_pass).replace(' ', '_').replace('.', '_') + + return minimised_space.lower() + + +def format_feature_name(feature_name: str): + return feature_name.split('.')[0].split('/')[-1] + + +def serialise_feature_initial(feature: Feature, invocation_id: str): + file_generated_at = str(datetime.datetime.now(datetime.timezone.utc).isoformat()) + + feature_dict = { + 'feature_name': '', + 'invocation_id': invocation_id, + 'feature_code': f"{feature.name.split(']')[0]}]", + 'feature_path': getattr(feature, 'filename', ''), + 'generated_at': file_generated_at, + 'description': getattr(feature, 'description', ''), + 'status': '', + 'duration': getattr(feature, 'duration', ''), + 'keyword': getattr(feature, 'keyword', ''), + 'exc_traceback': '', + 'scenarios': [], + 'tags': getattr(feature, 'tags', '') + } + + if feature_path := feature_dict.get('feature_path'): + feature_dict['feature_name'] = format_feature_name(feature_path) + + return feature_dict + + +def serialise_feature_post_run(feature: Feature, existing_results: dict): + feature_dict = { + 'feature_name': existing_results['feature_name'], + 'feature_code': existing_results['feature_code'], + 'invocation_id': existing_results['invocation_id'], + 'feature_path': getattr(feature, 'filename', ''), + 'generated_at': existing_results['generated_at'], + 'description': getattr(feature, 'description', ''), + 'status': getattr(feature.status, 'name', ''), + 'duration': getattr(feature, 'duration', ''), + 'keyword': getattr(feature, 'keyword', ''), + 'exc_traceback': getattr(feature, 'exc_traceback', ''), + 'scenarios': existing_results['scenarios'], + 'tags': getattr(feature, 'tags', '') + } + + if feature_tb := feature_dict['exc_traceback']: + feature_dict['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=feature_tb).to_dict() + + return feature_dict + + +def serialise_scenario(scenario: Scenario, invocation_id: str): + steps = process_steps(scenario.steps, invocation_id) + + scenario_dict = { + 'scenario_name': '', + 'feature_name': '', + 'invocation_id': invocation_id, + 'feature': getattr(scenario.feature, 'name', ''), + 'feature_code': '', + 'feature_path': getattr(scenario, 'filename', ''), + 'status': getattr(scenario.status, 'name', ''), + 'steps_hash': steps, + 'duration': getattr(scenario, 'duration', ''), + 'exc_traceback': getattr(scenario, 'exc_traceback', ''), + 'background': getattr(scenario, 'background', ''), + 'tags': getattr(scenario, 'effective_tags', '') + } + + if feature_name := scenario_dict.get('feature'): + scenario_dict['feature_code'] = f"{feature_name.split(']')[0]}]" + + if path := scenario_dict.get('feature_path'): + scenario_dict['feature_name'] = format_feature_name(path) + + if name := getattr(scenario, 'name', ''): + scenario_dict['scenario_name'] = format_scenario_name(name) + + if scenario_tb := scenario_dict['exc_traceback']: + scenario_dict['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=scenario_tb).to_dict() + + return scenario_dict + + +def process_steps(steps: List[Step], invocation_id: str): + step_dict = dict() + + for step_num, step in enumerate(steps): + step_dict[step_num] = { + 'keyword': getattr(step, 'keyword', ''), + 'invocation_id': invocation_id, + 'duration': getattr(step, 'duration', ''), + 'exc_traceback': getattr(step, 'exc_traceback', ''), + 'step_type': getattr(step, 'step_type', ''), + 'status': getattr(step.status, 'name', ''), + 'name': getattr(step, 'name', ''), + 'full_name': f'{step.keyword} {step.name}', + 'text': '', + 'poc_data': dict(), + 'filename': getattr(step.location, 'filename', ''), + 'line_number': getattr(step, 'line', '') + } + + if step.table: + step_dict[step_num]['poc_data'] = context_table_to_df(step.table).to_dict(orient='index') + + if step_tb := step_dict[step_num]['exc_traceback']: + step_dict[step_num]['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=step_tb).to_dict() + + return step_dict From b6c9aa5fc848bb6b78eac7880a2224307f71841e Mon Sep 17 00:00:00 2001 From: Jocelyn Shannon Date: Tue, 9 May 2023 09:24:48 +0000 Subject: [PATCH 03/24] Revert "trialling hashing test" This reverts commit 5c21cb48680d47db039348894a426b9607a340b5. --- test/context_helpers.py | 40 ------ test/dbt_file_utils.py | 58 -------- test/dbtvault_generator.py | 2 +- test/dbtvault_test/macros/harness_helpers.sql | 80 ----------- test/features/staging/hashing.feature | 25 ---- test/features/steps/shared_steps.py | 48 +------ test/result_helpers.py | 127 ------------------ 7 files changed, 2 insertions(+), 378 deletions(-) delete mode 100755 test/context_helpers.py delete mode 100755 test/dbt_file_utils.py delete mode 100755 test/dbtvault_test/macros/harness_helpers.sql delete mode 100644 test/features/staging/hashing.feature delete mode 100755 test/result_helpers.py diff --git a/test/context_helpers.py b/test/context_helpers.py deleted file mode 100755 index 5c78d423a..000000000 --- a/test/context_helpers.py +++ /dev/null @@ -1,40 +0,0 @@ -import pandas as pd -from behave.model import Table - -from test import dbt_file_utils -import test -from test import dbt_runner - - -def context_table_to_df(table: Table) -> pd.DataFrame: - table_df = pd.DataFrame(columns=table.headings, data=table.rows) - - return table_df - - -def context_table_to_seed(table: Table, model_name: str) -> str: - table_df = context_table_to_df(table) - - csv_fqn = test.TEMP_SEED_DIR / f'{model_name.lower()}.csv' - - table_df.to_csv(path_or_buf=csv_fqn, index=False) - - return csv_fqn.stem - - -def sample_data_to_database(context, model_name: str): - table = context.table - - input_seed_name = context_table_to_seed(table, model_name) - - if hasattr(context, 'seed_config') and hasattr(context, 'sample_table_name'): - dbt_file_utils.write_seed_properties(input_seed_name, - context.seed_config[context.sample_table_name]) - else: - dbt_file_utils.write_seed_properties(input_seed_name, {'column_types': {k: 'VARCHAR' for k in table.headings}}) - - seeds_logs = dbt_runner.run_dbt_seeds([input_seed_name], full_refresh=True) - - assert "Completed successfully" in seeds_logs - - return input_seed_name diff --git a/test/dbt_file_utils.py b/test/dbt_file_utils.py deleted file mode 100755 index 023a29cef..000000000 --- a/test/dbt_file_utils.py +++ /dev/null @@ -1,58 +0,0 @@ -import ruamel.yaml -import test - -def write_seed_properties(seed_name: str, seed_config: dict): - yml = ruamel.yaml.YAML() - - seed_property = { - 'version': 2, - 'seeds': [ - {'name': seed_name, - 'config': { - 'schema': 'development', - 'quote_columns': True, - 'column_types': seed_config['column_types'] - }} - ] - } - - with open(test.SEEDS_DIR / 'properties.yml', 'w+') as f: - yml.width = 150 - yml.indent(sequence=4, offset=2) - - yml.dump(seed_property, f) - - -def write_model_test_properties(actual_model_name, expected_model_name, unique_id, columns_to_compare): - yml = ruamel.yaml.YAML() - - test_property = { - 'version': 2, - 'models': [ - {'name': actual_model_name, - 'tests': [{ - "expect_tables_to_match": { - "expected_seed": expected_model_name, - "unique_id": unique_id, - "compare_columns": columns_to_compare - } - }]} - ] - } - - with open(test.TEST_MODELS_ROOT / 'test.yml', 'w+') as f: - yml.width = 150 - yml.indent(sequence=4, offset=2) - - yml.dump(test_property, f) - - -def generate_model(model_name, sql): - template = f""" - {{{{- config(materialized='table') -}}}} - - {sql} - """ - - with open(test.TEST_MODELS_ROOT / f"{model_name}.sql", "w") as f: - f.write(template.strip()) diff --git a/test/dbtvault_generator.py b/test/dbtvault_generator.py index fae449768..276ea837a 100644 --- a/test/dbtvault_generator.py +++ b/test/dbtvault_generator.py @@ -730,7 +730,7 @@ def add_seed_config(seed_name: str, seed_config: dict, include_columns=None, yml = ruamel.yaml.YAML() yml.preserve_quotes = True yml.indent(sequence=4, offset=2) - properties_path = SEEDS_DIR / 'properties.yml' + properties_path = TEMP_SEED_DIR / 'vault_properties.yml' if include_columns: seed_config['column_types'] = {k: v for k, v in seed_config['column_types'].items() if diff --git a/test/dbtvault_test/macros/harness_helpers.sql b/test/dbtvault_test/macros/harness_helpers.sql deleted file mode 100755 index 6c351f543..000000000 --- a/test/dbtvault_test/macros/harness_helpers.sql +++ /dev/null @@ -1,80 +0,0 @@ -{%- macro check_table_exists(model_name) -%} - - {%- set source_relation = adapter.get_relation(database=target.database, - schema=dbtvault_test.get_schema_name(), - identifier=model_name) -%} - - {%- if source_relation -%} - {%- do log("Table '{}' exists.".format(model_name), true) -%} - {%- do return(True) %} - {%- else -%} - {%- do log("Table '{}' does not exist.".format(model_name), true) -%} - {%- do return(False) %} - {%- endif -%} - - {%- do return(False) %} - -{%- endmacro -%} - -{%- macro check_source_exists(source_name, table_name) -%} - - {%- set source = source(source_name, table_name) -%} - {%- set source_relation = adapter.get_relation(database=source.database, - schema=source.schema, - identifier=source.identifier)-%} - - - {%- if source_relation.is_table or source_relation.is_view -%} - {%- do log("Source '{}:{}' exists.".format(source_name, table_name), true) -%} - {%- do return(True) %} - {%- else -%} - {%- do log("Source '{}:{}' does not exist.".format(source_name, table_name), true) -%} - {%- do return(False) %} - {%- endif -%} - -{%- endmacro -%} - - -{%- macro recreate_schema(schema_name=None) -%} - - {%- if not schema_name -%} - {%- set schema_name = dbtvault_test.get_schema_name() %} - {%- endif -%} - - {%- set schema_relation = api.Relation.create(database=target.database, schema=schema_name) -%} - - {%- do adapter.drop_schema(schema_relation) -%} - {%- do adapter.create_schema(schema_relation) -%} - -{%- endmacro -%} - - -{%- macro get_hash_length(hash_alg, columns, schema_name, table_name) -%} - - {{- adapter.dispatch('get_hash_length', 'dbtvault_test')(hash_alg=hash_alg, columns=columns, schema_name=schema_name, table_name=table_name) -}} - -{%- endmacro -%} - - -{%- macro postgres__get_hash_length(hash_alg, columns, schema_name, table_name) -%} - - {%- set hash_alg = var('hash', 'MD5') -%} - {%- if hash_alg == 'MD5' -%} - - WITH CTE AS ( - SELECT DECODE(MD5("{{ columns }}"), 'hex') AS HK - , "{{ columns }}" AS {{ columns }} - FROM "{{ schema_name }}".{{ table_name }} - ) - SELECT - {{ columns }} - , length(HK) AS HASH_VALUE_LENGTH - FROM CTE - - {%- else -%} - {%- do log('Test is currently only supported for MD5 hashing', info=True) -%} - {%- do return(False) -%} - {%- endif -%} - - -{%- endmacro -%} \ No newline at end of file diff --git a/test/features/staging/hashing.feature b/test/features/staging/hashing.feature deleted file mode 100644 index 0fcb69e55..000000000 --- a/test/features/staging/hashing.feature +++ /dev/null @@ -1,25 +0,0 @@ -@postgres -Feature: [HASH] Check the hash string length for postgres - - Scenario: [HASH-01] Check hash value length for MD5 hashing in postgres - Given there is data available - | VALUE_STRING | - | postgres_hash | - And using hash calculation on table - | VALUE_STRING | - | postgres_hash | - Then the SAMPLE_DATA table should contain the following data - | value_string | hash_value_length | - | postgres_hash | 16 | - - @enable_sha - Scenario: [HASH-02] Check hash value length for SHA hashing in postgres - Given there is data available - | VALUE_STRING | - | postgres_hash | - And using hash calculation on table - | VALUE_STRING | - | postgres_hash | - Then the SAMPLE_DATA table should contain the following data - | value_string | hash_value_length | - | postgres_hash | 32 | \ No newline at end of file diff --git a/test/features/steps/shared_steps.py b/test/features/steps/shared_steps.py index 236e56a74..bb9a48b50 100644 --- a/test/features/steps/shared_steps.py +++ b/test/features/steps/shared_steps.py @@ -4,8 +4,7 @@ from behave.model import Table, Row from env import env_utils -from test import dbtvault_generator, dbt_runner, behave_helpers, context_utils, step_helpers, context_helpers -from test import dbt_file_utils +from test import dbtvault_generator, dbt_runner, behave_helpers, context_utils, step_helpers def set_stage_metadata(context, stage_model_name) -> dict: @@ -716,48 +715,3 @@ def step_impl(context, model_name): @given("I am using the {database_name} database") def step_impl(context, database_name): context.database_name = database_name - - -@given("there is data available") -def step_impl(context): - context.sample_table_name = "sample_data" - - context.input_seed_name = context_helpers.sample_data_to_database(context, context.sample_table_name) - - logs = dbt_runner.run_dbt_operation(macro_name='check_table_exists', - args={"model_name": context.sample_table_name}) - - assert f"Table '{context.sample_table_name}' exists." in logs - - -@step("using hash calculation on table") -def step_impl(context): - columns = context.table.headings[0] - sample_table_name = context.sample_table_name - context.sample_schema_name = "DEVELOPMENT_DBTVAULT_USER" - sample_schema_name = context.sample_schema_name - model_name = f'{context.sample_table_name}_model' - - sql = f"{{{{- dbtvault_test.get_hash_length(hash_alg, \042{columns}\042, \042{sample_schema_name}\042, \042{sample_table_name}\042) -}}}}" - - dbt_file_utils.generate_model(model_name, sql) - - logs = dbt_runner.run_dbt_models(mode="run", model_names=[model_name]) - - -@then("the {table_name} table should contain the following data") -def step_impl(context, table_name): - context.table_name = table_name.lower() - context.model_name = f'{context.table_name}_model' - context.expected_seed_name = context_helpers.sample_data_to_database(context, f"{context.table_name}_expected") - columns_to_compare = context.table.headings - context.unique_id = context.table.headings[0] - - dbt_file_utils.write_model_test_properties(actual_model_name=context.model_name, - expected_model_name=context.expected_seed_name, - unique_id=context.unique_id, - columns_to_compare=columns_to_compare) - - logs = dbt_runner.run_dbt_command(["dbt", "test"]) - - assert "1 of 1 PASS" in logs diff --git a/test/result_helpers.py b/test/result_helpers.py deleted file mode 100755 index f1baf6592..000000000 --- a/test/result_helpers.py +++ /dev/null @@ -1,127 +0,0 @@ -import datetime -import re -from typing import List - -from behave.model import Scenario, Step, Feature -from boltons import tbutils - -from context_helpers import context_table_to_df - - -def format_scenario_name(scenario_name: str): - first_pass = scenario_name.replace('[', '').replace(']', '').replace('-', ' ').replace('/', ' ').replace('\\', ' ') - - minimised_space = re.sub(' +', ' ', first_pass).replace(' ', '_').replace('.', '_') - - return minimised_space.lower() - - -def format_feature_name(feature_name: str): - return feature_name.split('.')[0].split('/')[-1] - - -def serialise_feature_initial(feature: Feature, invocation_id: str): - file_generated_at = str(datetime.datetime.now(datetime.timezone.utc).isoformat()) - - feature_dict = { - 'feature_name': '', - 'invocation_id': invocation_id, - 'feature_code': f"{feature.name.split(']')[0]}]", - 'feature_path': getattr(feature, 'filename', ''), - 'generated_at': file_generated_at, - 'description': getattr(feature, 'description', ''), - 'status': '', - 'duration': getattr(feature, 'duration', ''), - 'keyword': getattr(feature, 'keyword', ''), - 'exc_traceback': '', - 'scenarios': [], - 'tags': getattr(feature, 'tags', '') - } - - if feature_path := feature_dict.get('feature_path'): - feature_dict['feature_name'] = format_feature_name(feature_path) - - return feature_dict - - -def serialise_feature_post_run(feature: Feature, existing_results: dict): - feature_dict = { - 'feature_name': existing_results['feature_name'], - 'feature_code': existing_results['feature_code'], - 'invocation_id': existing_results['invocation_id'], - 'feature_path': getattr(feature, 'filename', ''), - 'generated_at': existing_results['generated_at'], - 'description': getattr(feature, 'description', ''), - 'status': getattr(feature.status, 'name', ''), - 'duration': getattr(feature, 'duration', ''), - 'keyword': getattr(feature, 'keyword', ''), - 'exc_traceback': getattr(feature, 'exc_traceback', ''), - 'scenarios': existing_results['scenarios'], - 'tags': getattr(feature, 'tags', '') - } - - if feature_tb := feature_dict['exc_traceback']: - feature_dict['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=feature_tb).to_dict() - - return feature_dict - - -def serialise_scenario(scenario: Scenario, invocation_id: str): - steps = process_steps(scenario.steps, invocation_id) - - scenario_dict = { - 'scenario_name': '', - 'feature_name': '', - 'invocation_id': invocation_id, - 'feature': getattr(scenario.feature, 'name', ''), - 'feature_code': '', - 'feature_path': getattr(scenario, 'filename', ''), - 'status': getattr(scenario.status, 'name', ''), - 'steps_hash': steps, - 'duration': getattr(scenario, 'duration', ''), - 'exc_traceback': getattr(scenario, 'exc_traceback', ''), - 'background': getattr(scenario, 'background', ''), - 'tags': getattr(scenario, 'effective_tags', '') - } - - if feature_name := scenario_dict.get('feature'): - scenario_dict['feature_code'] = f"{feature_name.split(']')[0]}]" - - if path := scenario_dict.get('feature_path'): - scenario_dict['feature_name'] = format_feature_name(path) - - if name := getattr(scenario, 'name', ''): - scenario_dict['scenario_name'] = format_scenario_name(name) - - if scenario_tb := scenario_dict['exc_traceback']: - scenario_dict['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=scenario_tb).to_dict() - - return scenario_dict - - -def process_steps(steps: List[Step], invocation_id: str): - step_dict = dict() - - for step_num, step in enumerate(steps): - step_dict[step_num] = { - 'keyword': getattr(step, 'keyword', ''), - 'invocation_id': invocation_id, - 'duration': getattr(step, 'duration', ''), - 'exc_traceback': getattr(step, 'exc_traceback', ''), - 'step_type': getattr(step, 'step_type', ''), - 'status': getattr(step.status, 'name', ''), - 'name': getattr(step, 'name', ''), - 'full_name': f'{step.keyword} {step.name}', - 'text': '', - 'poc_data': dict(), - 'filename': getattr(step.location, 'filename', ''), - 'line_number': getattr(step, 'line', '') - } - - if step.table: - step_dict[step_num]['poc_data'] = context_table_to_df(step.table).to_dict(orient='index') - - if step_tb := step_dict[step_num]['exc_traceback']: - step_dict[step_num]['exc_traceback'] = tbutils.TracebackInfo.from_traceback(tb=step_tb).to_dict() - - return step_dict From fcd79c225f451ab9f3239b6213f917ea75564f50 Mon Sep 17 00:00:00 2001 From: Alex Higgs Date: Fri, 12 May 2023 09:30:16 +0000 Subject: [PATCH 04/24] Rename dev folder --- .gitignore | 2 +- .run/All Features.run.xml | 2 +- .run/Bridge Features.run.xml | 2 +- .run/Effectivity Satellite Features.run.xml | 2 +- .run/Harness Tests.run.xml | 2 +- .run/Hub Features.run.xml | 2 +- .run/Link Features.run.xml | 2 +- .run/MA Satellite Features.run.xml | 2 +- .run/Macro Tests.run.xml | 2 +- .run/PIT Features.run.xml | 2 +- .run/Satellite Features.run.xml | 2 +- .run/Staging Features.run.xml | 2 +- .run/T Link Features.run.xml | 2 +- .run/XTS Features.run.xml | 2 +- .../.github/ISSUE_TEMPLATE/bug_report.md | 0 .../.github/ISSUE_TEMPLATE/feature_request.md | 0 {dbtvault-dev => automate-dv-dev}/.gitignore | 0 {dbtvault-dev => automate-dv-dev}/CODE_OF_CONDUCT.md | 0 {dbtvault-dev => automate-dv-dev}/CONTRIBUTING.md | 0 {dbtvault-dev => automate-dv-dev}/LICENSE.md | 0 {dbtvault-dev => automate-dv-dev}/NOTICE | 0 {dbtvault-dev => automate-dv-dev}/README.md | 0 {dbtvault-dev => automate-dv-dev}/dbt_project.yml | 0 .../macros/internal/helpers/dateadd.sql | 0 .../macros/internal/helpers/is_checks.sql | 0 .../macros/internal/helpers/logging/log_relation_sources.sql | 0 .../macros/internal/helpers/prepend_generated_by.sql | 0 .../helpers/stage_processing_macros/extract_column_names.sql | 0 .../stage_processing_macros/extract_null_column_names.sql | 0 .../internal/helpers/stage_processing_macros/print_list.sql | 0 .../stage_processing_macros/process_columns_to_escape.sql | 0 .../stage_processing_macros/process_columns_to_select.sql | 0 .../stage_processing_macros/process_hash_column_excludes.sql | 0 .../macros/internal/metadata_processing/alias.sql | 0 .../macros/internal/metadata_processing/alias_all.sql | 0 .../macros/internal/metadata_processing/as_constant.sql | 0 .../metadata_processing/check_required_parameters.sql | 0 .../macros/internal/metadata_processing/concat_ws.sql | 0 .../internal/metadata_processing/escape_column_name.sql | 0 .../internal/metadata_processing/escape_column_names.sql | 0 .../internal/metadata_processing/expand_column_list.sql | 0 .../internal/metadata_processing/get_escape_characters.sql | 0 .../macros/internal/metadata_processing/multikey.sql | 0 .../metadata_processing/process_payload_column_excludes.sql | 0 .../macros/materialisations/drop_temporary.sql | 0 .../materialisations/incremental_bridge_materialization.sql | 0 .../materialisations/incremental_pit_bridge_replace.sql | 0 .../materialisations/incremental_pit_materialization.sql | 0 .../macros/materialisations/mat_is_checks.sql | 0 .../materialisations/period_mat_helpers/check_datediff.sql | 0 .../period_mat_helpers/get_period_boundaries.sql | 0 .../period_mat_helpers/get_period_filter_sql.sql | 0 .../period_mat_helpers/get_period_of_load.sql | 0 .../period_mat_helpers/get_start_stop_dates.sql | 0 .../replace_placeholder_with_period_filter.sql | 0 .../materialisations/rank_mat_helpers/get_min_max_ranks.sql | 0 .../rank_mat_helpers/replace_placeholder_with_rank_filter.sql | 0 .../macros/materialisations/shared_helpers.sql | 0 .../vault_insert_by_period_materialization.sql | 0 .../materialisations/vault_insert_by_rank_materialization.sql | 0 .../macros/staging/derive_columns.sql | 0 .../macros/staging/hash_columns.sql | 0 .../macros/staging/null_columns.sql | 0 .../macros/staging/rank_columns.sql | 0 .../macros/staging/source_columns.sql | 0 {dbtvault-dev => automate-dv-dev}/macros/staging/stage.sql | 0 .../macros/supporting/as_of_date_window.sql | 0 .../macros/supporting/bridge_shared.sql | 0 .../macros/supporting/casting/cast_binary.sql | 0 .../macros/supporting/casting/cast_date.sql | 0 .../macros/supporting/casting/cast_datetime.sql | 0 .../macros/supporting/data_types/type_binary.sql | 0 .../macros/supporting/data_types/type_string.sql | 0 .../macros/supporting/data_types/type_timestamp.sql | 0 .../macros/supporting/get_query_results_as_dict.sql | 0 .../macros/supporting/ghost_records/binary_ghost.sql | 0 .../macros/supporting/ghost_records/create_ghost_record.sql | 0 .../macros/supporting/ghost_records/date_ghost.sql | 0 .../macros/supporting/ghost_records/null_ghost.sql | 0 {dbtvault-dev => automate-dv-dev}/macros/supporting/hash.sql | 0 .../macros/supporting/hash_components/null_expression.sql | 0 .../macros/supporting/hash_components/select_hash_alg.sql | 0 .../supporting/hash_components/standard_column_wrapper.sql | 0 .../macros/supporting/max_datetime.sql | 0 .../macros/supporting/prefix.sql | 0 .../macros/tables/bigquery/bridge.sql | 0 .../macros/tables/bigquery/eff_sat.sql | 0 .../macros/tables/bigquery/hub.sql | 0 .../macros/tables/bigquery/link.sql | 0 .../macros/tables/bigquery/ma_sat.sql | 0 .../macros/tables/bigquery/pit.sql | 0 .../macros/tables/bigquery/sat.sql | 0 .../macros/tables/bigquery/t_link.sql | 0 .../macros/tables/bigquery/xts.sql | 0 .../macros/tables/databricks/hub.sql | 0 .../macros/tables/databricks/link.sql | 0 .../macros/tables/databricks/sat.sql | 0 .../macros/tables/postgres/hub.sql | 0 .../macros/tables/postgres/link.sql | 0 .../macros/tables/postgres/sat.sql | 0 .../macros/tables/snowflake/bridge.sql | 0 .../macros/tables/snowflake/eff_sat.sql | 0 .../macros/tables/snowflake/hub.sql | 0 .../macros/tables/snowflake/link.sql | 0 .../macros/tables/snowflake/ma_sat.sql | 0 .../macros/tables/snowflake/pit.sql | 0 .../macros/tables/snowflake/sat.sql | 0 .../macros/tables/snowflake/t_link.sql | 0 .../macros/tables/snowflake/xts.sql | 0 .../macros/tables/sqlserver/bridge.sql | 0 .../macros/tables/sqlserver/eff_sat.sql | 0 .../macros/tables/sqlserver/hub.sql | 0 .../macros/tables/sqlserver/link.sql | 0 .../macros/tables/sqlserver/ma_sat.sql | 0 .../macros/tables/sqlserver/pit.sql | 0 .../macros/tables/sqlserver/sat.sql | 0 .../macros/tables/sqlserver/t_link.sql | 0 .../macros/tables/sqlserver/xts.sql | 0 {dbtvault-dev => automate-dv-dev}/packages.yml | 0 tasks.py | 4 ++-- 120 files changed, 16 insertions(+), 16 deletions(-) rename {dbtvault-dev => automate-dv-dev}/.github/ISSUE_TEMPLATE/bug_report.md (100%) rename {dbtvault-dev => automate-dv-dev}/.github/ISSUE_TEMPLATE/feature_request.md (100%) rename {dbtvault-dev => automate-dv-dev}/.gitignore (100%) rename {dbtvault-dev => automate-dv-dev}/CODE_OF_CONDUCT.md (100%) rename {dbtvault-dev => automate-dv-dev}/CONTRIBUTING.md (100%) rename {dbtvault-dev => automate-dv-dev}/LICENSE.md (100%) rename {dbtvault-dev => automate-dv-dev}/NOTICE (100%) rename {dbtvault-dev => automate-dv-dev}/README.md (100%) rename {dbtvault-dev => automate-dv-dev}/dbt_project.yml (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/dateadd.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/is_checks.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/logging/log_relation_sources.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/prepend_generated_by.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/extract_column_names.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/extract_null_column_names.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/print_list.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/process_columns_to_escape.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/process_columns_to_select.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/helpers/stage_processing_macros/process_hash_column_excludes.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/alias.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/alias_all.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/as_constant.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/check_required_parameters.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/concat_ws.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/escape_column_name.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/escape_column_names.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/expand_column_list.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/get_escape_characters.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/multikey.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/internal/metadata_processing/process_payload_column_excludes.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/drop_temporary.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/incremental_bridge_materialization.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/incremental_pit_bridge_replace.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/incremental_pit_materialization.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/mat_is_checks.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/check_datediff.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/get_period_boundaries.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/get_period_filter_sql.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/get_period_of_load.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/get_start_stop_dates.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/period_mat_helpers/replace_placeholder_with_period_filter.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/rank_mat_helpers/get_min_max_ranks.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/rank_mat_helpers/replace_placeholder_with_rank_filter.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/shared_helpers.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/vault_insert_by_period_materialization.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/materialisations/vault_insert_by_rank_materialization.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/derive_columns.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/hash_columns.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/null_columns.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/rank_columns.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/source_columns.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/staging/stage.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/as_of_date_window.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/bridge_shared.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/casting/cast_binary.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/casting/cast_date.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/casting/cast_datetime.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/data_types/type_binary.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/data_types/type_string.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/data_types/type_timestamp.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/get_query_results_as_dict.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/ghost_records/binary_ghost.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/ghost_records/create_ghost_record.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/ghost_records/date_ghost.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/ghost_records/null_ghost.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/hash.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/hash_components/null_expression.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/hash_components/select_hash_alg.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/hash_components/standard_column_wrapper.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/max_datetime.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/supporting/prefix.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/bridge.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/eff_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/hub.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/ma_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/pit.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/t_link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/bigquery/xts.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/databricks/hub.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/databricks/link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/databricks/sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/postgres/hub.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/postgres/link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/postgres/sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/bridge.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/eff_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/hub.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/ma_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/pit.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/t_link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/snowflake/xts.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/bridge.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/eff_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/hub.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/ma_sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/pit.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/sat.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/t_link.sql (100%) rename {dbtvault-dev => automate-dv-dev}/macros/tables/sqlserver/xts.sql (100%) rename {dbtvault-dev => automate-dv-dev}/packages.yml (100%) diff --git a/.gitignore b/.gitignore index c24921c4f..e320e1c27 100644 --- a/.gitignore +++ b/.gitignore @@ -19,7 +19,7 @@ test/backup_files/dbt_project.bak.yml *invoke.yml *.pytest_cache -/dbtvault-dev/dbt_modules/ +/automate-dv-dev/dbt_modules/ test/dbtvault_test/seeds/vault_properties.yml diff --git a/.run/All Features.run.xml b/.run/All Features.run.xml index 4d631ec7a..3e94077be 100644 --- a/.run/All Features.run.xml +++ b/.run/All Features.run.xml @@ -1,6 +1,6 @@ - +