Skip to content

Commit b4b96ab

Browse files
authored
feat(ui/ingest): unity-catalog => databricks (#14636)
1 parent 33089b4 commit b4b96ab

File tree

8 files changed

+39
-24
lines changed

8 files changed

+39
-24
lines changed

datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,17 @@ enum IngestionSourceType {
105105

106106
const DEFAULT_PAGE_SIZE = 25;
107107

108+
const mapSourceTypeAliases = <T extends { type: string }>(source?: T): T | undefined => {
109+
if (source) {
110+
let { type } = source;
111+
if (type === 'unity-catalog') {
112+
type = 'databricks';
113+
}
114+
return { ...source, type };
115+
}
116+
return undefined;
117+
};
118+
108119
const removeExecutionsFromIngestionSource = (source) => {
109120
if (source) {
110121
return {
@@ -561,7 +572,7 @@ export const IngestionSourceList = ({ showCreateModal, setShowCreateModal }: Pro
561572
</PaginationContainer>
562573
</SourceContainer>
563574
<IngestionSourceBuilderModal
564-
initialState={removeExecutionsFromIngestionSource(focusSource)}
575+
initialState={mapSourceTypeAliases(removeExecutionsFromIngestionSource(focusSource))}
565576
open={isBuildingSource}
566577
onSubmit={onSubmit}
567578
onCancel={onCancel}

datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -234,12 +234,12 @@ import {
234234
AZURE,
235235
BIGQUERY_BETA,
236236
CSV,
237+
DATABRICKS,
237238
DBT_CLOUD,
238239
MYSQL,
239240
OKTA,
240241
POWER_BI,
241242
SAC,
242-
UNITY_CATALOG,
243243
VERTICA,
244244
} from '@app/ingest/source/builder/constants';
245245
import { BIGQUERY } from '@app/ingest/source/conf/bigquery/bigquery';
@@ -499,7 +499,7 @@ export const RECIPE_FIELDS: RecipeFields = {
499499
],
500500
filterSectionTooltip: 'Include or exclude specific Schemas, Tables and Views from ingestion.',
501501
},
502-
[UNITY_CATALOG]: {
502+
[DATABRICKS]: {
503503
fields: [WORKSPACE_URL, TOKEN],
504504
filterFields: [
505505
UNITY_METASTORE_ID_ALLOW,
@@ -616,11 +616,4 @@ export const RECIPE_FIELDS: RecipeFields = {
616616

617617
export const CONNECTORS_WITH_FORM = new Set(Object.keys(RECIPE_FIELDS));
618618

619-
export const CONNECTORS_WITH_TEST_CONNECTION = new Set([
620-
SNOWFLAKE,
621-
LOOKER,
622-
BIGQUERY_BETA,
623-
BIGQUERY,
624-
UNITY_CATALOG,
625-
SAC,
626-
]);
619+
export const CONNECTORS_WITH_TEST_CONNECTION = new Set([SNOWFLAKE, LOOKER, BIGQUERY_BETA, BIGQUERY, DATABRICKS, SAC]);

datahub-web-react/src/app/ingest/source/builder/sources.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,12 @@
3232
"recipe": "source: \n type: snowflake\n config:\n account_id: null\n include_table_lineage: true\n include_view_lineage: true\n include_tables: true\n include_views: true\n profiling:\n enabled: true\n profile_table_level_only: true\n stateful_ingestion:\n enabled: true"
3333
},
3434
{
35-
"urn": "urn:li:dataPlatform:unity-catalog",
36-
"name": "unity-catalog",
35+
"urn": "urn:li:dataPlatform:databricks",
36+
"name": "databricks",
3737
"displayName": "Databricks",
3838
"description": "Import Metastores, Schemas, Tables, lineage, queries, and statistics from Databricks Unity Catalog.",
39-
"docsUrl": "https://docs.datahub.com/docs/generated/ingestion/sources/databricks/#module-unity-catalog",
40-
"recipe": "source:\n type: unity-catalog\n config:\n # Coordinates\n workspace_url: null\n include_table_lineage: true\n include_column_lineage: false\n stateful_ingestion:\n enabled: true"
39+
"docsUrl": "https://docs.datahub.com/docs/generated/ingestion/sources/databricks/",
40+
"recipe": "source:\n type: databricks\n config:\n # Coordinates\n workspace_url: null\n include_table_lineage: true\n include_column_lineage: false\n stateful_ingestion:\n enabled: true"
4141
},
4242
{
4343
"urn": "urn:li:dataPlatform:looker",

datahub-web-react/src/app/ingestV2/source/IngestionSourceList.tsx

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,17 @@ export enum IngestionSourceType {
114114

115115
const DEFAULT_PAGE_SIZE = 25;
116116

117+
const mapSourceTypeAliases = <T extends { type: string }>(source?: T): T | undefined => {
118+
if (source) {
119+
let { type } = source;
120+
if (type === 'unity-catalog') {
121+
type = 'databricks';
122+
}
123+
return { ...source, type };
124+
}
125+
return undefined;
126+
};
127+
117128
const removeExecutionsFromIngestionSource = (source) => {
118129
if (source) {
119130
return {
@@ -695,7 +706,7 @@ export const IngestionSourceList = ({
695706
)}
696707
</SourceContainer>
697708
<IngestionSourceBuilderModal
698-
initialState={removeExecutionsFromIngestionSource(focusSource)}
709+
initialState={mapSourceTypeAliases(removeExecutionsFromIngestionSource(focusSource))}
699710
open={showCreateModal}
700711
onSubmit={onSubmit}
701712
onCancel={onCancel}

datahub-web-react/src/app/ingestV2/source/builder/RecipeForm/constants.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,12 +233,12 @@ import {
233233
import {
234234
AZURE,
235235
CSV,
236+
DATABRICKS,
236237
DBT_CLOUD,
237238
MYSQL,
238239
OKTA,
239240
POWER_BI,
240241
SAC,
241-
UNITY_CATALOG,
242242
VERTICA,
243243
} from '@app/ingestV2/source/builder/constants';
244244
import { BIGQUERY } from '@app/ingestV2/source/conf/bigquery/bigquery';
@@ -471,7 +471,7 @@ export const RECIPE_FIELDS: RecipeFields = {
471471
],
472472
filterSectionTooltip: 'Include or exclude specific Schemas, Tables and Views from ingestion.',
473473
},
474-
[UNITY_CATALOG]: {
474+
[DATABRICKS]: {
475475
fields: [WORKSPACE_URL, TOKEN],
476476
filterFields: [
477477
UNITY_METASTORE_ID_ALLOW,

datahub-web-react/src/app/ingestV2/source/builder/sources.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,12 @@
3232
"recipe": "source: \n type: snowflake\n config:\n account_id: null\n include_table_lineage: true\n include_view_lineage: true\n include_tables: true\n include_views: true\n profiling:\n enabled: true\n profile_table_level_only: true\n stateful_ingestion:\n enabled: true"
3333
},
3434
{
35-
"urn": "urn:li:dataPlatform:unity-catalog",
36-
"name": "unity-catalog",
35+
"urn": "urn:li:dataPlatform:databricks",
36+
"name": "databricks",
3737
"displayName": "Databricks",
3838
"description": "Import Metastores, Schemas, Tables, lineage, queries, and statistics from Databricks Unity Catalog.",
39-
"docsUrl": "https://docs.datahub.com/docs/generated/ingestion/sources/databricks/#module-unity-catalog",
40-
"recipe": "source:\n type: unity-catalog\n config:\n # Coordinates\n workspace_url: null\n include_table_lineage: true\n include_column_lineage: false\n stateful_ingestion:\n enabled: true"
39+
"docsUrl": "https://docs.datahub.com/docs/generated/ingestion/sources/databricks/",
40+
"recipe": "source:\n type: databricks\n config:\n # Coordinates\n workspace_url: null\n include_table_lineage: true\n include_column_lineage: false\n stateful_ingestion:\n enabled: true"
4141
},
4242
{
4343
"urn": "urn:li:dataPlatform:looker",

metadata-ingestion/docs/sources/databricks/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ DataHub supports integration with Databricks ecosystem using a multitude of conn
22

33
## Databricks Unity Catalog (new)
44

5-
The recently introduced [Unity Catalog](https://www.databricks.com/product/unity-catalog) provides a new way to govern your assets within the Databricks lakehouse. If you have Unity Catalog Enabled Workspace, you can use the `unity-catalog` source (aka `databricks` source, see below for details) to integrate your metadata into DataHub as an alternate to the Hive pathway. This also ingests hive metastore catalog in Databricks and is recommended approach to ingest Databricks ecosystem in DataHub.
5+
The recently introduced [Unity Catalog](https://www.databricks.com/product/unity-catalog) provides a new way to govern your assets within the Databricks lakehouse. If you have Unity Catalog Enabled Workspace, you can use the `databricks` source (aka `unity-catalog` source, see below for details) to integrate your metadata into DataHub as an alternate to the Hive pathway. This also ingests hive metastore catalog in Databricks and is recommended approach to ingest Databricks ecosystem in DataHub.
66

77
## Databricks Hive (old)
88

metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
source:
2-
type: unity-catalog
2+
type: databricks
33
config:
44
workspace_url: https://my-workspace.cloud.databricks.com
55
token: "<token>"

0 commit comments

Comments
 (0)