forked from datahub-project/datahub
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(ingest): data-lake - remove spark requirement if not profiling (d…
- Loading branch information
Showing
19 changed files
with
1,829 additions
and
501 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
32 changes: 25 additions & 7 deletions
32
metadata-ingestion/src/datahub/ingestion/source/aws/s3_util.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,17 +1,35 @@ | ||
from typing import Optional | ||
import os | ||
|
||
S3_PREFIXES = ["s3://", "s3n://", "s3a://"] | ||
|
||
def make_s3_urn(s3_uri: str, env: str, suffix: Optional[str] = None) -> str: | ||
|
||
if not s3_uri.startswith("s3://"): | ||
raise ValueError("S3 URIs should begin with 's3://'") | ||
def is_s3_uri(uri: str) -> bool: | ||
return any(uri.startswith(prefix) for prefix in S3_PREFIXES) | ||
|
||
|
||
def strip_s3_prefix(s3_uri: str) -> str: | ||
# remove S3 prefix (s3://) | ||
s3_name = s3_uri[5:] | ||
for s3_prefix in S3_PREFIXES: | ||
if s3_uri.startswith(s3_prefix): | ||
plain_base_path = s3_uri[len(s3_prefix) :] | ||
return plain_base_path | ||
|
||
raise ValueError( | ||
f"Not an S3 URI. Must start with one of the following prefixes: {str(S3_PREFIXES)}" | ||
) | ||
|
||
|
||
def make_s3_urn(s3_uri: str, env: str) -> str: | ||
|
||
s3_name = strip_s3_prefix(s3_uri) | ||
|
||
if s3_name.endswith("/"): | ||
s3_name = s3_name[:-1] | ||
|
||
if suffix is not None: | ||
return f"urn:li:dataset:(urn:li:dataPlatform:s3,{s3_name}_{suffix},{env})" | ||
name, extension = os.path.splitext(s3_name) | ||
|
||
if extension != "": | ||
extension = extension[1:] # remove the dot | ||
return f"urn:li:dataset:(urn:li:dataPlatform:s3,{name}_{extension},{env})" | ||
|
||
return f"urn:li:dataset:(urn:li:dataPlatform:s3,{s3_name},{env})" |
Oops, something went wrong.