Skip to content

Commit

Permalink
Enable CI for forked PRs (dataform-co#832)
Browse files Browse the repository at this point in the history
* Integration tests passing

* Cleanup

* Checkpoint

* Migrate over to new project and keychain

* Remove old cloudbuild configs

* Empty

* Empty

* Empty

* Test smaller machine

* Revert "Test smaller machine"

This reverts commit a520725.

* Comment
  • Loading branch information
lewish authored Jun 25, 2020
1 parent a7c3d13 commit af093e3
Show file tree
Hide file tree
Showing 16 changed files with 48 additions and 31 deletions.
File renamed without changes.
12 changes: 0 additions & 12 deletions cloudbuild-triggers.json

This file was deleted.

9 changes: 9 additions & 0 deletions scripts/create_secret
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

gcloud kms encrypt \
--ciphertext-file=$1.enc \
--plaintext-file=$1 \
--project=dataform-public \
--keyring=dataform-builder-keyring \
--key=dataform-builder-key \
--location=global
9 changes: 9 additions & 0 deletions scripts/decrypt_secret
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

gcloud kms decrypt \
--ciphertext-file=$1.enc \
--plaintext-file=$1 \
--project=dataform-public \
--keyring=dataform-builder-keyring \
--key=dataform-builder-key \
--location=global
20 changes: 12 additions & 8 deletions test_credentials/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -6,30 +6,34 @@ gcloud_secret(
name = "bigquery.json",
testonly = 1,
ciphertext_file = ":bigquery.json.enc",
key = "dataform-co-build",
keyring = "dataform-co-build",
project = "dataform-public",
key = "dataform-builder-key",
keyring = "dataform-builder-keyring",
)

gcloud_secret(
name = "redshift.json",
testonly = 1,
ciphertext_file = ":redshift.json.enc",
key = "dataform-co-build",
keyring = "dataform-co-build",
project = "dataform-public",
key = "dataform-builder-key",
keyring = "dataform-builder-keyring",
)

gcloud_secret(
name = "snowflake.json",
testonly = 1,
ciphertext_file = ":snowflake.json.enc",
key = "dataform-co-build",
keyring = "dataform-co-build",
project = "dataform-public",
key = "dataform-builder-key",
keyring = "dataform-builder-keyring",
)

gcloud_secret(
name = "sqldatawarehouse.json",
testonly = 1,
ciphertext_file = ":sqldatawarehouse.json.enc",
key = "dataform-co-build",
keyring = "dataform-co-build",
project = "dataform-public",
key = "dataform-builder-key",
keyring = "dataform-builder-keyring",
)
Binary file modified test_credentials/bigquery.json.enc
Binary file not shown.
Binary file modified test_credentials/redshift.json.enc
Binary file not shown.
Binary file modified test_credentials/snowflake.json.enc
Binary file not shown.
Binary file modified test_credentials/sqldatawarehouse.json.enc
Binary file not shown.
6 changes: 5 additions & 1 deletion tests/integration/redshift.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,11 @@ suite("@dataform/integration/redshift", ({ before, after }) => {
dbadapter.execute(adapter.dropIfExists(assertion.target, "view"))
)
);
await dropFunctions.reduce((promiseChain, fn) => promiseChain.then(fn), Promise.resolve());
try {
await dropFunctions.reduce((promiseChain, fn) => promiseChain.then(fn), Promise.resolve());
} catch (e) {
// This seems to throw if the tables don't exist.
}

// Run the tests.
const testResults = await dfapi.test(dbadapter, compiledGraph.tests);
Expand Down
11 changes: 6 additions & 5 deletions tests/integration/snowflake.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ suite("@dataform/integration/snowflake", ({ before, after }) => {
await dropAllTables(tablesToDelete, adapter, dbadapter);

// Drop schemas to make sure schema creation works.
await dbadapter.execute(`drop schema if exists "TADA"."df_integration_test"`);
await dbadapter.execute(`drop schema if exists "TADA2"."df_integration_test"`);
await dbadapter.execute(`drop schema if exists "INTEGRATION_TESTS"."df_integration_test"`);
await dbadapter.execute(`drop schema if exists "INTEGRATION_TESTS2"."df_integration_test"`);

// Run the tests.
const testResults = await dfapi.test(dbadapter, compiledGraph.tests);
Expand Down Expand Up @@ -71,6 +71,7 @@ suite("@dataform/integration/snowflake", ({ before, after }) => {
let executionGraph = await dfapi.build(compiledGraph, {}, dbadapter);
let executedGraph = await dfapi.run(executionGraph, dbadapter).result();

const executionActionMap = keyBy(executionGraph.actions, v => v.name);
const actionMap = keyBy(executedGraph.actions, v => v.name);
expect(Object.keys(actionMap).length).eql(14);

Expand Down Expand Up @@ -106,7 +107,7 @@ suite("@dataform/integration/snowflake", ({ before, after }) => {

// Check the status of the view in the non-default database.
const tada2DatabaseView = keyBy(compiledGraph.tables, t => t.name)[
"TADA2.DF_INTEGRATION_TEST.SAMPLE_DATA_2"
"INTEGRATION_TESTS2.DF_INTEGRATION_TEST.SAMPLE_DATA_2"
];
const tada2DatabaseViewRows = await getTableRows(tada2DatabaseView.target, adapter, dbadapter);
expect(tada2DatabaseViewRows.length).equals(3);
Expand All @@ -119,7 +120,7 @@ suite("@dataform/integration/snowflake", ({ before, after }) => {
expect(incrementalRows.length).equals(3);

const incrementalTable2 = keyBy(compiledGraph.tables, t => t.name)[
"TADA2.DF_INTEGRATION_TEST.EXAMPLE_INCREMENTAL_TADA2"
"INTEGRATION_TESTS2.DF_INTEGRATION_TEST.EXAMPLE_INCREMENTAL_TADA2"
];
const incrementalRows2 = await getTableRows(incrementalTable2.target, adapter, dbadapter);
expect(incrementalRows2.length).equals(3);
Expand Down Expand Up @@ -157,7 +158,7 @@ suite("@dataform/integration/snowflake", ({ before, after }) => {
expect(incrementalRows.length).equals(5);

incrementalTable = keyBy(compiledGraph.tables, t => t.name)[
"TADA2.DF_INTEGRATION_TEST.EXAMPLE_INCREMENTAL_TADA2"
"INTEGRATION_TESTS2.DF_INTEGRATION_TEST.EXAMPLE_INCREMENTAL_TADA2"
];
incrementalRows = await getTableRows(incrementalTable2.target, adapter, dbadapter);
expect(incrementalRows.length).equals(5);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ FROM
)

// This example uses a non default database; metadata retrieved should indicate to increment regardless.
config { type: "incremental", database: "TADA2" }
config { type: "incremental", database: "INTEGRATION_TESTS2" }

SELECT user_timestamp, user_id
FROM example_data
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
config {
type: "view",
database: "TADA2",
database: "INTEGRATION_TESTS2",
assertions: {
uniqueKey: ["val1", "val2"],
nonNull: [
Expand Down
5 changes: 2 additions & 3 deletions tests/integration/sqldatawarehouse.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ suite("@dataform/integration/sqldatawarehouse", ({ before, after }) => {
let executionGraph = await dfapi.build(compiledGraph, {}, dbadapter);
let executedGraph = await dfapi.run(executionGraph, dbadapter).result();

const executionActionMap = keyBy(executionGraph.actions, v => v.name);
const actionMap = keyBy(executedGraph.actions, v => v.name);
expect(Object.keys(actionMap).length).eql(11);

Expand All @@ -80,9 +81,7 @@ suite("@dataform/integration/sqldatawarehouse", ({ before, after }) => {
const expectedResult = expectedFailedActions.includes(actionName)
? dataform.ActionResult.ExecutionStatus.FAILED
: dataform.ActionResult.ExecutionStatus.SUCCESSFUL;
expect(actionMap[actionName].status, JSON.stringify(executionGraph, null, 4)).equals(
expectedResult
);
expect(actionMap[actionName].status).equals(expectedResult);
}

expect(
Expand Down
2 changes: 2 additions & 0 deletions tools/gcloud/secrets.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def _gcloud_secret_impl(ctx):
"--keyring=%s" % ctx.attr.keyring,
"--key=%s" % ctx.attr.key,
"--location=%s" % ctx.attr.location,
"--project=%s" % ctx.attr.project,
],
execution_requirements = {
"local": "1",
Expand All @@ -26,6 +27,7 @@ gcloud_secret = rule(
"ciphertext_file": attr.label(allow_single_file = True),
"keyring": attr.string(default = "", mandatory = True),
"key": attr.string(default = "", mandatory = True),
"project": attr.string(default = "", mandatory = True),
"location": attr.string(default = "global"),
},
)
1 change: 1 addition & 0 deletions tools/stackdriver-github-bridge/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ gcloud_secret(
name = "env.yaml",
ciphertext_file = ":env.yaml.enc",
key = "dataform-co-build",
project = "tada-analytics",
keyring = "dataform-co-build",
)

Expand Down

0 comments on commit af093e3

Please sign in to comment.