From 8627be4de351015948f128ccc01172b048bb757c Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Tue, 4 Mar 2025 15:17:47 +0800 Subject: [PATCH 1/3] feat: add support for AWS China region --- docs/admin-guide.md | 29 +- docs/installation-guide.md | 21 +- docs/samples-guide.md | 2 +- docs/user-guide.md | 2 + .../adf-bootstrap/deployment/global.yml | 6 +- .../lambda_codebase/tests/stubs/slack.py | 52 +- .../lambda_codebase/tests/stubs/stub_iam.py | 18 +- .../tests/test_iam_cfn_deploy_role_policy.py | 28 +- .../lambda_codebase/tests/test_slack.py | 7 +- .../deployment/pipeline_management.yml | 5 + .../china-forward-function/handler.py | 30 ++ .../china-forward-function/requirements.txt | 0 .../stepfunction_helper.py | 55 ++ .../china-support/cn_northwest_bucket.yml | 26 + .../china-support/cn_northwest_deploy.yml | 83 +++ .../adf-build/china-support/create_s3_cn.py | 50 ++ .../bootstrap_repository/adf-build/main.py | 58 ++- .../cdk/cdk_constructs/adf_codebuild copy.py | 474 ++++++++++++++++++ .../cdk/cdk_constructs/adf_codebuild.py | 20 +- .../cdk/cdk_constructs/adf_codepipeline.py | 3 +- .../retrieve_organization_accounts copy.py | 294 +++++++++++ .../helpers/retrieve_organization_accounts.py | 2 + .../adf-build/shared/python/cloudformation.py | 49 +- .../adf-build/shared/python/partition.py | 41 +- .../adf-build/shared/python/s3.py | 23 +- .../shared/python/tests/test_partition.py | 16 +- src/lambda_codebase/jump_role_manager/main.py | 2 +- src/lambda_codebase/organization/main.py | 4 +- src/template.yml | 88 +++- 29 files changed, 1372 insertions(+), 116 deletions(-) create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/requirements.txt create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py create mode 100755 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py diff --git a/docs/admin-guide.md b/docs/admin-guide.md index 731eda722..b4a74732c 100644 --- a/docs/admin-guide.md +++ b/docs/admin-guide.md @@ -59,7 +59,7 @@ definitions in them as desired. ## adfconfig The `adfconfig.yml` file resides on the -[management account](#management-account) CodeCommit Repository (in `us-east-1`) +[management account](#management-account) CodeCommit Repository (in `us-east-1` or `cn-north-1`) and defines the general high-level configuration for the AWS Deployment Framework. @@ -964,7 +964,7 @@ To determine the current version, follow these steps: ### ADF version you have deployed To check the current version of ADF that you have deployed, go to the management -account in us-east-1. Check the CloudFormation stack output or tag of the +account in us-east-1 or cn-north-1. Check the CloudFormation stack output or tag of the `serverlessrepo-aws-deployment-framework` Stack. - In the outputs tab, it will show the version as the `ADFVersionNumber`. @@ -985,7 +985,7 @@ releases](https://github.com/awslabs/aws-deployment-framework/releases). The `serverlessrepo-aws-deployment-framework` stack is updated through this process with new changes that were included in that release of ADF. -To check the progress in the management account in `us-east-1`, follow these +To check the progress in the management account in `us-east-1` or `cn-north-1`, follow these steps: 1. Go to the [CloudFormation @@ -1028,7 +1028,7 @@ Which branch is used is determined by: Alternatively, you can also perform the update using the AWS CLI. -In the management account in `us-east-1`: +In the management account in `us-east-1` or `cn-north-1`: 1. Go to the Pull Request section of the `aws-deployment-framework-bootstrap` [CodeCommit @@ -1043,7 +1043,7 @@ In the management account in `us-east-1`: changes that it proposes. Once reviewed, merge the pull request to continue. Confirm the `aws-deployment-framework-bootstrap` pipeline in the management -account in `us-east-1`: +account in `us-east-1` or `cn-north-1`: 1. Go to the [CodePipeline console for the aws-deployment-framework-bootstrap pipeline](https://console.aws.amazon.com/codesuite/codepipeline/pipelines/aws-deployment-framework-bootstrap-pipeline/view?region=us-east-1). @@ -1059,7 +1059,7 @@ creation and on-boarding process in parallel. These are managed through Step Function state machines. 1. Navigate to the [AWS Step Functions service](https://us-east-1.console.aws.amazon.com/states/home?region=us-east-1#/statemachines) - in the management account in `us-east-1`. + in the management account in `us-east-1` or `cn-north-1`. 2. Check the `AccountManagementStateMachine...` state machine, all recent invocations since we performed the update should succeed. It could be the case that there are no invocations at all. In that case, wait a minute and @@ -1138,10 +1138,11 @@ Alternatively, you can also perform the update using the AWS CLI. If you wish to remove ADF you can delete the CloudFormation stack named `serverlessrepo-aws-deployment-framework` in the management account in -the `us-east-1` region. This will remove most resources created by ADF -in the management account. With the exception of S3 buckets and SSM parameters. -If you bootstrapped ADF into the management account you need to manually remove -the bootstrap stacks as well. +the `us-east-1` region for global partition deployments; for China deployments +in `cn-north-1` region. This will remove most resources created by ADF in the management +account. With the exception of S3 buckets and SSM parameters. If you bootstrapped +ADF into the management account you need to manually remove the bootstrap stacks +as well. Feel free to delete the S3 buckets, SSM parameters that start with the `/adf` prefix, as well as other CloudFormation stacks such as: @@ -1164,7 +1165,7 @@ the base stack when the account is moved to the Root of the AWS Organization. One thing to keep in mind if you are planning to re-install ADF is that you will want to clean up the parameter from SSM Parameter Store. You can safely remove all `/adf` prefixed SSM parameters. But most importantly, you need to -remove the `/adf/deployment_account_id` in `us-east-1` on the +remove the `/adf/deployment_account_id` in `us-east-1` or `cn-north-1` on the management account. As AWS Step Functions uses this parameter to determine if ADF has already got a deployment account setup. If you re-install ADF with this parameter set to a @@ -1187,7 +1188,7 @@ There are two ways to enable this: to deploy the latest version again, set the `Log Level` to `DEBUG` to get extra logging information about the issue you are experiencing. 2. If you are running an older version of ADF, please navigate to the - CloudFormation Console in `us-east-1` of the AWS Management account. + CloudFormation Console in `us-east-1` or `cn-north-1` of the AWS Management account. 3. Update the stack. 4. For any ADF deployment of `v3.2.0` and later, please change the `Log Level` parameter and set it to `DEBUG`. Deploy those changes and revert them after @@ -1202,7 +1203,7 @@ Please trace the failed component and dive into/report the debug information. The main components to look at are: -1. In the AWS Management Account in `us-east-1`: +1. In the AWS Management Account in `us-east-1` or `cn-north-1`: 2. The [CloudFormation aws-deployment-framework stack](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks?filteringStatus=active&filteringText=aws-deployment-framework&viewNested=true&hideStacks=false). 3. The [CloudWatch Logs for the Lambda functions deployed by ADF](https://console.aws.amazon.com/lambda/home?region=us-east-1#/functions?f0=true&n0=false&op=and&v0=ADF). 4. Check if the [CodeCommit pull @@ -1211,7 +1212,7 @@ The main components to look at are: branch for the `aws-deployment-framework-bootstrap` (ADF Bootstrap) repository. 5. The [CodePipeline execution of the AWS Bootstrap pipeline](https://console.aws.amazon.com/codesuite/codepipeline/pipelines/aws-deployment-framework-bootstrap-pipeline/view?region=us-east-1). 6. Navigate to the [AWS Step Functions service](https://us-east-1.console.aws.amazon.com/states/home?region=us-east-1#/statemachines) - in the management account in `us-east-1`. Check the state machines named + in the management account in `us-east-1` or `cn-north-1`. Check the state machines named `AccountManagementStateMachine...` and `AccountBootstrappingStateMachine...`. Look at recent executions only. - When you find one that has a failed execution, check the components that diff --git a/docs/installation-guide.md b/docs/installation-guide.md index 237ad23f8..c270f4e39 100644 --- a/docs/installation-guide.md +++ b/docs/installation-guide.md @@ -28,7 +28,7 @@ It is okay to install ADF and AWS Control Tower in different regions. For example: - Install AWS Control Tower in `eu-central-1`. -- Install ADF in `us-east-1`. +- Install ADF in `us-east-1` or `cn-north-1`. **If you want to use ADF and AWS Control Tower, we recommend that you setup AWS Control Tower prior to installing ADF.** @@ -43,12 +43,12 @@ Ensure you have setup [AWS CloudTrail](https://aws.amazon.com/cloudtrail/) *(Not the default trail)* in your Management Account that spans **all regions**, the trail itself can be created in any region. Events [triggered via CloudTrail](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_incident-response.html) -for AWS Organizations can only be acted upon in the us-east-1 (North Virginia) +for AWS Organizations can only be acted upon in the us-east-1 (North Virginia) or `cn-northwest-1` region. Please use the [AWS CloudTrail instructions](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-create-and-update-a-trail.html) -to configure the CloudTrail in the `us-east-1` region within the AWS +to configure the CloudTrail in the `us-east-1` or `cn-north-1` region within the AWS Organizations Management AWS Account. ### 1.2. Enable AWS Organizations API Access @@ -92,7 +92,6 @@ Please note that building on *Windows* is not supported, please use the This should return a table that is possibly empty. - Additionally, running `docker --version` should return version 19 or later. - - [make](https://www.gnu.org/software/make/) - To test if it is available, run `make --version`. This should return 4.3 or later. @@ -102,7 +101,6 @@ Please note that building on *Windows* is not supported, please use the - [jq](https://github.com/jqlang/jq) - To test if it is available, run `jq --version`. This version should be 1.6 or later. - - [sed](https://www.gnu.org/software/sed/) - To test if it is available, run `sed --version`. This should return 4.3 or later. @@ -289,7 +287,7 @@ or applications into via AWS CodePipeline *(this can be updated later)*. When deploying ADF for the first time, part of the installation process will automatically create an AWS CodeCommit repository in the management AWS Account -within the `us-east-1` region. It will also make the initial commit to the +within the `us-east-1` or `cn-north-1` region. It will also make the initial commit to the default branch of this repository with a default set of examples that act as a starting point to help define the AWS Account bootstrapping processes for your Organization. @@ -330,7 +328,7 @@ To gather the values, you can either find them in the `aws-deployment-framework-bootstrap` repository in the `adfconfig.yml` file. Or by looking up the values that were specified the last time ADF got installed/updated via the CloudFormation template parameters of the -`serverlessrepo-aws-deployment-framework` stack in `us-east-1`. +`serverlessrepo-aws-deployment-framework` stack in `us-east-1` or `cn-north-1`. #### Stack Name @@ -352,6 +350,7 @@ Value to use depends on the AWS partition it is deployed to: - For the AWS partition (most common), use; `us-east-1` - For the US-Gov partition, use: `us-gov-west-1` +- For the China partition, use `cn-north-1` **Explanation:** ADF needs to be deployed in the region where the control plane of the @@ -517,7 +516,7 @@ This can always be updated later via the `adfconfig.yml` file. You don't need to include the main region in this list. For example, if you use the example values for the default region and target regions, it will allow -pipelines to deploy to `eu-west-1`, `eu-central-`, and `us-east-1`. +pipelines to deploy to `eu-west-1`, `eu-central-`, `cn-north-1` and `us-east-1`. *This is not required when performing an update between versions of ADF.* *Only supported when installing ADF for the first time. @@ -647,8 +646,8 @@ automatically in the background, to follow its progress: 1. Please navigate to the AWS Console in the AWS Management account. As the stack `serverlessrepo-aws-deployment-framework` completes you can now - open AWS CodePipeline from within the management account in `us-east-1` and - see that there is an initial pipeline execution that started. + open AWS CodePipeline from within the management account in `us-east-1` or + `cn-north-1` and see that there is an initial pipeline execution that started. Upon first installation, this pipeline might fail to fetch the source code from the repository. Click the retry failed action button to try again. @@ -693,7 +692,7 @@ automatically in the background, to follow its progress: that started the bootstrap process for the deployment account. You can view the progress of this in the management account in the AWS Step Functions console for the step function `AccountBootstrappingStateMachine-` in the - `us-east-1` region. + `us-east-1` or `cn-north-1` region. 3. Once the Step Function has completed, switch roles over to the newly bootstrapped deployment account in the region you defined as your main diff --git a/docs/samples-guide.md b/docs/samples-guide.md index 0cc42686f..1395bbe91 100644 --- a/docs/samples-guide.md +++ b/docs/samples-guide.md @@ -70,7 +70,7 @@ Management Account. By default, there is a `global.yml` in the root of the be appended to as required. If we look at AWS Step Functions in the management account in `us-east-1` -we can see the progress of the bootstrap process. +or `cn-north-1` we can see the progress of the bootstrap process. ![run-state-machine](./images/run-state-machine.png) diff --git a/docs/user-guide.md b/docs/user-guide.md index ec19f63ef..082e1121b 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -980,6 +980,8 @@ There are five different styles that one could choose from. method](https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html). - In case the bucket is stored in `us-east-1`, it will return: `https://s3.amazonaws.com/${bucket}/${key}` + - In case the bucket is stored in `cn-north-1` or `cn-northwest-1`, it will return: + `https://${bucket}.s3.${region}.amazonaws.cn/${key}` - In case the bucket is stored in any other region, it will return: `https://s3-${region}.amazonaws.com/${bucket}/${key}` - `virtual-hosted` style, will return the S3 location using the virtual hosted diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml index 6d6220ad1..b746c312f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml @@ -809,6 +809,10 @@ Resources: commands: - aws s3 cp s3://$SHARED_MODULES_BUCKET/adf-build/ ./adf-build/ --recursive --only-show-errors - aws s3 cp --sse aws:kms --sse-kms-key-id $ADF_PIPELINE_ASSET_KMS_ARN ./adf-build/templates/ s3://$ADF_PIPELINE_ASSET_BUCKET/adf-build/templates/ --recursive --only-show-errors + - | + if [ "${AWS::Region}" = "cn-north-1" ]; then + pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple + fi - pip install -r adf-build/requirements.txt -r adf-build/helpers/requirements.txt -q -t ./adf-build pre_build: commands: @@ -1193,7 +1197,7 @@ Resources: StringEquals: aws:PrincipalOrgID: !Ref OrganizationId ArnLike: - aws:PrincipalArn: 'arn:aws:iam::*:role/adf-codecommit-role' + aws:PrincipalArn: !Sub 'arn:${AWS::Partition}:iam::*:role/adf-codecommit-role' Resource: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* Principal: diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py index 0b131fffc..d39dff7e2 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py @@ -2,45 +2,57 @@ # SPDX-License-Identifier: MIT-0 # pylint: skip-file +import os +import re +from boto3.session import Session +REGION = os.getenv("AWS_REGION", "us-east-1") +PARTITION = Session().get_partition_for_region(REGION) + +if PARTITION == "aws": + test_region = "eu-central-1" +else: + test_region = "cn-northwest-1" stub_approval_event = { 'Records': [{ 'EventSource': 'aws:sns', 'EventVersion': '1.0', - 'EventSubscriptionArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Sns': { 'Type': 'Notification', 'MessageId': '1', - 'TopicArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Subject': 'APPROVAL NEEDED: AWS CodePipeline adf-pipeline-sample-vpc for action Approve', - 'Message': '{"region":"eu-central-1","consoleLink":"https://console.aws.amazon.com","approval":{"pipelineName":"adf-pipeline-sample-vpc","stageName":"approval-stage-1","actionName":"Approve","token":"fa777887-41dc-4ac4-8455-a209a93c76b9","expires":"2019-03-17T11:08Z","externalEntityLink":null,"approvalReviewLink":"https://console.aws.amazon.com/codepipeline/"}}', + 'Message': '{"region":"{test_region}","consoleLink":"https://console.aws.amazon.com","approval":{"pipelineName":"adf-pipeline-sample-vpc","stageName":"approval-stage-1","actionName":"Approve","token":"fa777887-41dc-4ac4-8455-a209a93c76b9","expires":"2019-03-17T11:08Z","externalEntityLink":null,"approvalReviewLink":"https://console.aws.amazon.com/codepipeline/"}}', 'Timestamp': '3000-03-10T11:08:34.673Z', 'SignatureVersion': '1', 'Signature': '1', - 'SigningCertUrl': 'https://sns.eu-central-1.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': 'https://sns.eu-central-1.amazonaws.com', + 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', + 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', 'MessageAttributes': {} } }] } +stub_approval_event['Records'][0]['Sns']['Message'] = re.sub(r"{test_region}", test_region, stub_approval_event['Records'][0]['Sns']['Message']) + stub_bootstrap_event = { 'Records': [{ 'EventSource': 'aws:sns', 'EventVersion': '1.0', - 'EventSubscriptionArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Sns': { 'Type': 'Notification', 'MessageId': '1', - 'TopicArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Subject': 'AWS Deployment Framework Bootstrap', 'Message': 'Account 1111111 has now been bootstrapped into banking/production', 'Timestamp': '3000-03-10T11:08:34.673Z', 'SignatureVersion': '1', 'Signature': '1', - 'SigningCertUrl': 'https://sns.eu-central-1.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': 'https://sns.eu-central-1.amazonaws.com', + 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', + 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', 'MessageAttributes': {} } }] @@ -50,18 +62,18 @@ 'Records': [{ 'EventSource': 'aws:sns', 'EventVersion': '1.0', - 'EventSubscriptionArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Sns': { 'Type': 'Notification', 'MessageId': '1', - 'TopicArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Subject': None, - 'Message': '{"version":"0","id":"1","detail-type":"CodePipeline Pipeline Execution State Change","source":"aws.codepipeline","account":"2","time":"3000-03-10T11:09:38Z","region":"eu-central-1","resources":["arn:aws:codepipeline:eu-central-1:999999:adf-pipeline-sample-vpc"],"detail":{"pipeline":"adf-pipeline-sample-vpc","execution-id":"1","state":"FAILED","version":9.0}}', + 'Message': '{"version":"0","id":"1","detail-type":"CodePipeline Pipeline Execution State Change","source":"aws.codepipeline","account":"2","time":"3000-03-10T11:09:38Z","region":"{test_region}","resources":["arn:aws:codepipeline:{test_region}:999999:adf-pipeline-sample-vpc"],"detail":{"pipeline":"adf-pipeline-sample-vpc","execution-id":"1","state":"FAILED","version":9.0}}', 'Timestamp': '2019-03-10T11:09:49.953Z', 'SignatureVersion': '1', 'Signature': '2', - 'SigningCertUrl': 'https://sns.eu-central-1.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': 'https://sns.eu-central-1.amazonaws.com', + 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', + 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', 'MessageAttributes': {} } }] @@ -71,19 +83,21 @@ 'Records': [{ 'EventSource': 'aws:sns', 'EventVersion': '1.0', - 'EventSubscriptionArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Sns': { 'Type': 'Notification', 'MessageId': '1', - 'TopicArn': 'arn:aws:sns:eu-central-1:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', + 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', 'Subject': 'Failure - AWS Deployment Framework Bootstrap', - 'Message': '{"Error":"Exception","Cause":"{\\"errorMessage\\": \\"CloudFormation Stack Failed - Account: 111 Region: eu-central-1 Status: ROLLBACK_IN_PROGRESS\\", \\"errorType\\": \\"Exception\\", \\"stackTrace\\": [[\\"/var/task/wait_until_complete.py\\", 99, \\"lambda_handler\\", \\"status))\\"]]}"}', + 'Message': '{"Error":"Exception","Cause":"{\\"errorMessage\\": \\"CloudFormation Stack Failed - Account: 111 Region: {test_region} Status: ROLLBACK_IN_PROGRESS\\", \\"errorType\\": \\"Exception\\", \\"stackTrace\\": [[\\"/var/task/wait_until_complete.py\\", 99, \\"lambda_handler\\", \\"status))\\"]]}"}', 'Timestamp': '2019-03-10T11:09:49.953Z', 'SignatureVersion': '1', 'Signature': '2', - 'SigningCertUrl': 'https://sns.eu-central-1.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': 'https://sns.eu-central-1.amazonaws.com', + 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', + 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', 'MessageAttributes': {} } }] } + +stub_failed_bootstrap_event['Records'][0]['Sns']['Message'] = re.sub(r"{test_region}", test_region, stub_failed_bootstrap_event['Records'][0]['Sns']['Message']) \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py index f7f994397..f2b778e50 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py @@ -2,6 +2,16 @@ # SPDX-License-Identifier: MIT-0 # pylint: skip-file +import os +from boto3.session import Session + +REGION = os.getenv("AWS_REGION", "us-east-1") +PARTITION = Session().get_partition_for_region(REGION) + +if PARTITION == "aws": + test_region = "eu-west-1" +else: + test_region = "cn-northwest-1" """ Stubs for testing iam.py @@ -18,7 +28,7 @@ "Effect": "Allow", "Action": ["iam:ChangePassword"], "Resource": ( - "arn:aws:kms:eu-west-1:111111111111:key/existing_key" + f"arn:{PARTITION}:kms:{test_region}:111111111111:key/existing_key" ), }, { @@ -26,8 +36,8 @@ "Effect": "Allow", "Action": "s3:ListAllMyBuckets", "Resource": [ - "arn:aws:s3:::existing_bucket", - "arn:aws:s3:::existing_bucket/*", + f"arn:{PARTITION}:s3:::existing_bucket", + f"arn:{PARTITION}:s3:::existing_bucket/*", ], }, { @@ -38,4 +48,4 @@ }, ] } -} +} \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_iam_cfn_deploy_role_policy.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_iam_cfn_deploy_role_policy.py index be5c1eb66..55d7d0681 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_iam_cfn_deploy_role_policy.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_iam_cfn_deploy_role_policy.py @@ -4,12 +4,16 @@ # pylint: skip-file import json +import os +from boto3.session import Session from pytest import fixture, raises from mock import call, Mock from copy import deepcopy from .stubs import stub_iam from lambda_codebase.iam_cfn_deploy_role_policy import IAMCfnDeployRolePolicy +REGION = os.getenv("AWS_REGION", "us-east-1") +PARTITION = Session().get_partition_for_region(REGION) @fixture def iam_client(): @@ -114,8 +118,8 @@ def test_grant_access_to_s3_buckets_new_bucket_single_resource(iam_client): ) assert instance.policy_document['Statement'][1]['Resource'] == [ policy_doc_before['Statement'][1]['Resource'], - 'arn:aws:s3:::new_bucket', - 'arn:aws:s3:::new_bucket/*', + f'arn:{PARTITION}:s3:::new_bucket', + f'arn:{PARTITION}:s3:::new_bucket/*', ] assert instance.policy_document['Statement'][2] == ( policy_doc_before['Statement'][2] @@ -149,10 +153,10 @@ def test_grant_access_to_s3_buckets_new_buckets(iam_client): assert instance.policy_document['Statement'][1]['Resource'] == [ policy_doc_before['Statement'][1]['Resource'][0], policy_doc_before['Statement'][1]['Resource'][1], - 'arn:aws:s3:::new_bucket', - 'arn:aws:s3:::new_bucket/*', - 'arn:aws:s3:::another_new_bucket', - 'arn:aws:s3:::another_new_bucket/*', + f'arn:{PARTITION}:s3:::new_bucket', + f'arn:{PARTITION}:s3:::new_bucket/*', + f'arn:{PARTITION}:s3:::another_new_bucket', + f'arn:{PARTITION}:s3:::another_new_bucket/*', ] assert instance.policy_document['Statement'][2] == ( policy_doc_before['Statement'][2] @@ -187,8 +191,8 @@ def test_grant_access_to_kms_keys_new_key_single_resource(iam_client): instance.policy_document['Statement'][1]['Resource'][0] ) policy_doc_before = deepcopy(instance.policy_document) - - new_key_arn = 'arn:aws:kms:eu-west-1:111111111111:key/new_key' + test_region = "cn-north-1" if PARTITION == "aws-cn" else "eu-west-1" + new_key_arn = f'arn:{PARTITION}:kms:{test_region}:111111111111:key/new_key' instance.grant_access_to_kms_keys([ new_key_arn, ]) @@ -226,8 +230,8 @@ def test_grant_access_to_kms_keys_new_keys(iam_client): ] policy_doc_before = deepcopy(instance.policy_document) - new_key_arn_1 = 'arn:aws:kms:eu-west-1:111111111111:key/new_key_no_1' - new_key_arn_2 = 'arn:aws:kms:eu-west-1:111111111111:key/new_key_no_2' + new_key_arn_1 = f'arn:{PARTITION}:kms:eu-west-1:111111111111:key/new_key_no_1' + new_key_arn_2 = f'arn:{PARTITION}:kms:eu-west-1:111111111111:key/new_key_no_2' instance.grant_access_to_kms_keys([ new_key_arn_1, existing_key_arn_1, @@ -350,8 +354,8 @@ def test_update_iam_role_policies_updated(iam_client): policy_doc['Statement'][1]['Resource'] = [ policy_doc['Statement'][1]['Resource'][0], policy_doc['Statement'][1]['Resource'][1], - 'arn:aws:s3:::new_bucket', - 'arn:aws:s3:::new_bucket/*', + f'arn:{PARTITION}:s3:::new_bucket', + f'arn:{PARTITION}:s3:::new_bucket/*', ] policy_doc_json = json.dumps(policy_doc) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_slack.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_slack.py index 60c44b81d..1b3415062 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_slack.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/test_slack.py @@ -2,9 +2,11 @@ # SPDX-License-Identifier: MIT-0 # pylint: skip-file - +import os +from boto3.session import Session from pytest import fixture from ..slack import * + from .stubs.slack import ( stub_approval_event, stub_failed_pipeline_event, @@ -12,6 +14,9 @@ stub_failed_bootstrap_event, ) +REGION = os.getenv("AWS_REGION", "us-east-1") +PARTITION = Session().get_partition_for_region(REGION) + @fixture def stubs(): os.environ["ADF_PIPELINE_PREFIX"] = 'adf-pipeline-' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml index c523fe9f6..646b40772 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml @@ -667,6 +667,11 @@ Resources: python: 3.12 nodejs: 20 commands: + - | + if [ "${AWS::Region}" = "cn-north-1" ]; then + pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple + npm config set registry https://registry.npmmirror.com + fi - npm install aws-cdk@2.136.0 -g -y --quiet --no-progress - aws s3 cp s3://$SHARED_MODULES_BUCKET/adf-build/ ./adf-build/ --recursive --only-show-errors - pip install -r adf-build/requirements.txt -q -t ./adf-build diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py new file mode 100644 index 000000000..207a2163a --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py @@ -0,0 +1,30 @@ +# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +The forward function will forward events to the target SFN. +""" + +import logging +import os + +import boto3 +from stepfunction_helper import Stepfunction + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) +SFN_ARN = os.getenv("SFN_ARN", "") +sfn_name = SFN_ARN.split(':')[-1] + +def lambda_handler(event, context): + LOGGER.debug(event) + if "source" in event and event["source"] == "aws.organizations": + session = boto3.session.Session(region_name="cn-north-1") + sfn_instance = Stepfunction(session, LOGGER) + _, state_name = sfn_instance.invoke_sfn_execution( + sfn_arn=SFN_ARN, + input=event, + ) + LOGGER.info("Successfully invoke sfn %s with statemachine name %s.", sfn_name, state_name) + + diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/requirements.txt b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py new file mode 100644 index 000000000..3fa956992 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py @@ -0,0 +1,55 @@ +# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +import json +import uuid +from decimal import Decimal + + +def convert_decimals(obj): + if isinstance(obj, Decimal): + return str(obj) + elif isinstance(obj, list): + return [convert_decimals(item) for item in obj] + elif isinstance(obj, dict): + return {key: convert_decimals(value) for key, value in obj.items()} + else: + return obj + + +class Stepfunction: + """Class to handle Custom Stepfunction methods""" + + def __init__( + self, + session, + LOGGER + ): + self.logger = LOGGER + self.session = session + + def get_stepfunction_client(self): + return self.session.client("stepfunctions") + + def invoke_sfn_execution( + self, + sfn_arn, + input: dict, + execution_name=None): + try: + state_machine_arn = sfn_arn + sfn_client = self.get_stepfunction_client() + + if not execution_name: + execution_name = str(uuid.uuid4()) + event_body = json.dumps(convert_decimals(input), indent=2) + response = sfn_client.start_execution( + stateMachineArn=state_machine_arn, + name=execution_name, + input=event_body + ) + except Exception as e: + msg = f"Couldn't invoke stepfunction {sfn_arn}, error: {e}." + self.logger.error(msg) + raise + return response, execution_name diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml new file mode 100644 index 000000000..4c8aefef2 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml @@ -0,0 +1,26 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: >- + ADF CloudFormation Template - Create S3 bucket for cn-northwest-1 + +Parameters: + BucketName: + Type: String + +Resources: + BootstrapArtifactStorageBucket: + DeletionPolicy: Delete + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref BucketName + AccessControl: BucketOwnerFullControl + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + VersioningConfiguration: + Status: Enabled + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml new file mode 100644 index 000000000..a5c024935 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml @@ -0,0 +1,83 @@ +# // Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# // SPDX-License-Identifier: Apache-2.0 + +AWSTemplateFormatVersion: '2010-09-09' +Transform: 'AWS::Serverless-2016-10-31' +Description: ADF CloudFormation Stack for deploy extra resources in China cn-northwest-1. + +Parameters: + AcoountBootstrapingStateMachineArn: + Type: String + AdfLogLevel: + Type: String + +Globals: + Function: + Architectures: + - arm64 + CodeUri: china-forward-function + Runtime: python3.12 + Timeout: 300 + Tracing: Active + +Resources: + ForwardStateMachineFunctionRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-china-extra/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + + ForwardStateMachineFunctionRolePolicy: + Type: AWS::IAM::Policy + Properties: + PolicyName: "forward-state-machine-function-policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Action: "states:StartExecution" + Resource: !Ref AcoountBootstrapingStateMachineArn + - Effect: Allow + Action: + - "logs:CreateLogGroup" + - "logs:CreateLogStream" + - "logs:PutLogEvents" + - "xray:PutTelemetryRecords" + - "xray:PutTraceSegments" + - "cloudwatch:PutMetricData" + Resource: "*" + Roles: + - !Ref ForwardStateMachineFunctionRole + + + ForwardStateMachineFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: handler.lambda_handler + Description: "ADF Lambda Function - Forward events to statemachine" + Environment: + Variables: + SFN_ARN: !Ref AcoountBootstrapingStateMachineArn + ADF_LOG_LEVEL: !Ref AdfLogLevel + FunctionName: ForwardStateMachineFunction + Role: !GetAtt ForwardStateMachineFunctionRole.Arn + Events: + RuleEvent: + Type: EventBridgeRule + Properties: + Pattern: + source: + - aws.organizations + detail: + eventSource: + - organizations.amazonaws.com + eventName: + - MoveAccount \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py new file mode 100644 index 000000000..6ef9e0622 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py @@ -0,0 +1,50 @@ +# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Main entry point for create_s3_cn.py execution which +is executed from within AWS CodeBuild in the management account +""" +import os +import boto3 +from logger import configure_logger +from cloudformation import CloudFormation + +REGION_DEFAULT = os.environ["AWS_REGION"] +MANAGEMENT_ACCOUNT_ID = os.environ["MANAGEMENT_ACCOUNT_ID"] +LOGGER = configure_logger(__name__) + +def _create_s3_bucket(bucket_name): + try: + LOGGER.info(f"Deploy S3 bucket {bucket_name}...") + extra_deploy_region = "cn-northwest-1" + template_path = "adf-build/china-support/cn_northwest_bucket.yml" + stack_name = 'adf-regional-base-china-bucket' + parameters= [ + { + 'ParameterKey': 'BucketName', + 'ParameterValue': bucket_name, + 'UsePreviousValue': False, + }, + ] + cloudformation = CloudFormation( + region=extra_deploy_region, + deployment_account_region=extra_deploy_region, + role=boto3, + wait=True, + stack_name=stack_name, + account_id=MANAGEMENT_ACCOUNT_ID, + parameters = parameters, + local_template_path=template_path + ) + cloudformation.create_stack() + except Exception as error: + LOGGER.error(f"Failed to process _create_s3_bucket, error:\n {error}") + exit(1) + +def main(): + bucket_name = f"adf-china-bootstrap-cn-northwest-1-{MANAGEMENT_ACCOUNT_ID}" + _create_s3_bucket(bucket_name) + +if __name__ == '__main__': + main() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py index a4cd8eddb..84c359662 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py @@ -24,7 +24,7 @@ from errors import GenericAccountConfigureError, ParameterNotFoundError, Error from sts import STS from s3 import S3 -from partition import get_partition +from partition import get_partition, get_aws_domain from config import Config from organization_policy import OrganizationPolicy @@ -57,6 +57,10 @@ ADF_DEFAULT_SCM_FALLBACK_BRANCH = 'main' ADF_DEFAULT_DEPLOYMENT_MAPS_ALLOW_EMPTY_TARGET = 'disabled' ADF_DEFAULT_ORG_STAGE = "none" +CHINA_PRIMARY_REGION = "cn-north-1" +CHINA_SECONDARY_REGION = "cn-northwest-1" +ADF_REGIONAL_BASE_CHINA_EXTRA_STACK_NAME = "adf-regional-base-china-extra" +CHINA_SECONDARY_REGION_DEPLOY_TEMP = "china-support/cn_northwest_deploy" LOGGER = configure_logger(__name__) @@ -369,9 +373,10 @@ def await_sfn_executions(sfn_client): "Account Management State Machine encountered a failed, " "timed out, or aborted execution. Please look into this problem " "before retrying the bootstrap pipeline. You can navigate to: " - "https://%s.console.aws.amazon.com/states/home" + "https://%s.console.%s/states/home" "?region=%s#/statemachines/view/%s ", REGION_DEFAULT, + get_aws_domain(REGION_DEFAULT), REGION_DEFAULT, ACCOUNT_MANAGEMENT_STATE_MACHINE_ARN, ) @@ -401,10 +406,11 @@ def await_sfn_executions(sfn_client): "Account Bootstrapping State Machine encountered a failed, " "timed out, or aborted execution. Please look into this problem " "before retrying the bootstrap pipeline. You can navigate to: " - "https://%(region)s.console.aws.amazon.com/states/home" + "https://%(region)s.console.%(domain)s/states/home" "?region=%(region)s#/statemachines/view/%(sfn_arn)s", { "region": REGION_DEFAULT, + "domain": get_aws_domain(REGION_DEFAULT), "sfn_arn": ACCOUNT_BOOTSTRAPPING_STATE_MACHINE_ARN, }, ) @@ -459,6 +465,49 @@ def _sfn_execution_exists_with( return False +def _china_region_extra_deploy(): + if REGION_DEFAULT == CHINA_PRIMARY_REGION: + parameters = [ + { + 'ParameterKey': 'AcoountBootstrapingStateMachineArn', + 'ParameterValue': ACCOUNT_BOOTSTRAPPING_STATE_MACHINE_ARN, + 'UsePreviousValue': False, + }, + { + 'ParameterKey': 'AdfLogLevel', + 'ParameterValue': ADF_LOG_LEVEL, + 'UsePreviousValue': False, + }, + ] + try: + s3_china = S3( + region=REGION_DEFAULT, + bucket=S3_BUCKET_NAME + ) + cloudformation = CloudFormation( + region=CHINA_SECONDARY_REGION, + deployment_account_region=CHINA_SECONDARY_REGION, + role=boto3, + wait=True, + stack_name=ADF_REGIONAL_BASE_CHINA_EXTRA_STACK_NAME, + s3=s3_china, + s3_key_path='adf-build', + account_id=MANAGEMENT_ACCOUNT_ID, + template_file_prefix=CHINA_SECONDARY_REGION_DEPLOY_TEMP, + parameters=parameters + + ) + cloudformation.create_stack() + except Exception as error: + LOGGER.error( + "China extra stack adf-regional-base-china-extra deployment failed in region %(region)s, please check following error: " + "%(error)s", + { + "region": CHINA_SECONDARY_REGION, + "error": str(error), + }, + ) + sys.exit(2) def main(): # pylint: disable=R0915 LOGGER.info("ADF Version %s", ADF_VERSION) @@ -469,7 +518,8 @@ def main(): # pylint: disable=R0915 policies = OrganizationPolicy() config = Config() cache = Cache() - + # fix the china org service endpoint issue + _china_region_extra_deploy() try: parameter_store = ParameterStore(REGION_DEFAULT, boto3) deployment_account_id = parameter_store.fetch_parameter( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py new file mode 100644 index 000000000..12e60b0d6 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py @@ -0,0 +1,474 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +"""Construct related to CodeBuild Input +""" + +import os +from aws_cdk import ( + aws_codepipeline as _codepipeline, + aws_codebuild as _codebuild, + aws_iam as _iam, + aws_kms as _kms, + aws_ecr as _ecr, + aws_ec2 as _ec2, + Stack, + Duration, + Aws, +) +from constructs import Construct + +from cdk_constructs.adf_codepipeline import Action + +ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] +ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +DEFAULT_CODEBUILD_IMAGE = "STANDARD_5_0" +DEFAULT_BUILD_SPEC_FILENAME = 'buildspec.yml' +DEFAULT_DEPLOY_SPEC_FILENAME = 'deployspec.yml' +ADF_DEFAULT_BUILD_ROLE_NAME = 'adf-codebuild-role' +ADF_DEFAULT_BUILD_TIMEOUT = 20 + +def get_partition(region_name: str) -> str: + """Given the region, this function will return the appropriate partition. + + :param region_name: The name of the region (us-east-1, us-gov-west-1) + :return: Returns the partition name as a string. + """ + + if region_name.startswith('us-gov'): + return 'aws-us-gov' + elif region_name.startswith("cn-north"): + return "aws-cn" + return 'aws' + + +ADF_DEPLOYMENT_PARTITION = get_partition(ADF_DEPLOYMENT_REGION) + + +class CodeBuild(Construct): + # pylint: disable=no-value-for-parameter, too-many-locals + + def __init__( + self, + scope: Construct, + id: str, + shared_modules_bucket: str, + deployment_region_kms: str, + deployment_map_source: str, + deployment_map_name: str, + map_params: dict, + target, + **kwargs, + ): + super().__init__(scope, id, **kwargs) + stack = Stack.of(self) + + # if CodeBuild is being used as a deployment action we want to allow + # target specific values. + if target: + role_name = ( + target + .get('properties', {}) + .get('role', ADF_DEFAULT_BUILD_ROLE_NAME) + ) + role_arn = ( + f'arn:{stack.partition}:iam::{ADF_DEPLOYMENT_ACCOUNT_ID}:' + f'role/{role_name}' + ) + timeout = ( + target + .get('properties', {}) + .get('timeout', ( + map_params['default_providers']['deploy'] + .get('properties', {}) + .get('timeout', ADF_DEFAULT_BUILD_TIMEOUT) + )) + ) + build_env = _codebuild.BuildEnvironment( + build_image=CodeBuild.determine_build_image( + codebuild_id=id, + scope=scope, + target=target, + map_params=map_params, + ), + compute_type=getattr( + _codebuild.ComputeType, + ( + target + .get('properties', {}) + .get('size', ( + map_params['default_providers']['deploy'] + .get('properties', {}) + .get('size', "SMALL") + )) + .upper() + ), + ), + environment_variables=CodeBuild.generate_build_env_variables( + _codebuild, + shared_modules_bucket, + deployment_map_source, + deployment_map_name, + map_params, + target, + ), + privileged=( + target + .get('properties', {}) + .get('privileged', ( + map_params['default_providers']['deploy'] + .get('properties', {}) + .get('privileged', False) + )) + ), + ) + build_spec = CodeBuild.determine_build_spec( + id, + ( + map_params['default_providers']['deploy'] + .get('properties', {}) + ), + target, + ) + self.pipeline_project = _codebuild.PipelineProject( + self, + 'project', + environment=build_env, + encryption_key=_kms.Key.from_key_arn( + self, + 'default_deployment_account_key', + key_arn=deployment_region_kms, + ), + description=f"ADF CodeBuild Project for {id}", + project_name=f"adf-deploy-{id}", + timeout=Duration.minutes(timeout), + role=_iam.Role.from_role_arn( + self, + 'build_role', + role_arn=role_arn, + mutable=False, + ), + build_spec=build_spec, + ) + self._setup_vpc( + map_params['default_providers']['deploy'], + target=target, + ) + self.deploy = Action( + name=id, + provider="CodeBuild", + category="Build", + project_name=f"adf-deploy-{id}", + run_order=1, + target=target, + map_params=map_params, + action_name=id, + ).config + else: + role_name = ( + map_params['default_providers']['build'] + .get('properties', {}) + .get('role', ADF_DEFAULT_BUILD_ROLE_NAME) + ) + role_arn = ( + f'arn:{stack.partition}:iam::{ADF_DEPLOYMENT_ACCOUNT_ID}:' + f'role/{role_name}' + ) + timeout = ( + map_params['default_providers']['build'] + .get('properties', {}) + .get('timeout', ADF_DEFAULT_BUILD_TIMEOUT) + ) + build_env = _codebuild.BuildEnvironment( + build_image=CodeBuild.determine_build_image( + codebuild_id=id, + scope=scope, + target=target, + map_params=map_params + ), + compute_type=getattr( + _codebuild.ComputeType, + ( + map_params['default_providers']['build'] + .get('properties', {}) + .get('size', "SMALL") + .upper() + ), + ), + environment_variables=CodeBuild.generate_build_env_variables( + _codebuild, + shared_modules_bucket, + deployment_map_source, + deployment_map_name, + map_params, + ), + privileged=( + map_params['default_providers']['build'] + .get('properties', {}) + .get('privileged', False) + ), + ) + build_spec = CodeBuild.determine_build_spec( + id, + map_params['default_providers']['build'].get('properties', {}) + ) + self.pipeline_project = _codebuild.PipelineProject( + self, + 'project', + environment=build_env, + encryption_key=_kms.Key.from_key_arn( + self, + 'DefaultDeploymentAccountKey', + key_arn=deployment_region_kms, + ), + description=f"ADF CodeBuild Project for {map_params['name']}", + project_name=f"adf-build-{map_params['name']}", + timeout=Duration.minutes(timeout), + build_spec=build_spec, + role=_iam.Role.from_role_arn( + self, + 'default_build_role', + role_arn=role_arn, + mutable=False, + ), + ) + self._setup_vpc(map_params['default_providers']['build']) + self.build = _codepipeline.CfnPipeline.StageDeclarationProperty( + name="Build", + actions=[ + Action( + name="Build", + provider="CodeBuild", + category="Build", + run_order=1, + map_params=map_params, + action_name="build" + ).config + ] + ) + + def _setup_vpc(self, default_provider, target=None): + default_props = default_provider.get('properties', {}) + # This will either be empty (build stage) or configured (deploy stage) + target_props = (target or {}).get('properties', {}) + vpc_id = target_props.get('vpc_id', default_props.get('vpc_id')) + subnet_ids = target_props.get( + 'subnet_ids', + default_props.get('subnet_ids', []), + ) + security_group_ids = target_props.get( + 'security_group_ids', + default_props.get('security_group_ids', []), + ) + if vpc_id: + if not subnet_ids: + raise ValueError( + "CodeBuild environment of " + f"{self.pipeline_project.project_name} has a " + f"VPC Id ({vpc_id}) set, but no subnets are configured. " + "When specifying the VPC Id for a given CodeBuild " + "environment, you also need to specify the subnet_ids " + "and optionally the security_group_ids that should be " + "used by the CodeBuild instance." + ) + if not security_group_ids: + default_security_group = _ec2.CfnSecurityGroup( + self, + 'sg', + group_description=( + f"The default security group for {self.node.id}" + ), + security_group_egress=[ + { + "cidrIp": "0.0.0.0/0", + "ipProtocol": "-1", + } + ], + vpc_id=vpc_id, + ) + security_group_ids = [ + default_security_group.get_att("GroupId"), + ] + self.pipeline_project.node.default_child.add_property_override( + "VpcConfig", + { + "VpcId": vpc_id, + "Subnets": subnet_ids, + "SecurityGroupIds": security_group_ids, + }, + ) + elif subnet_ids or security_group_ids: + raise ValueError( + "CodeBuild environment of " + f"{self.pipeline_project.project_name} requires a VPC Id when " + "configured to connect to specific subnets." + ) + + @staticmethod + def _determine_stage_build_spec( + codebuild_id, + props, + stage_name, + default_filename, + ): + filename = props.get('spec_filename') + spec_inline = props.get('spec_inline', {}) + if filename and spec_inline: + raise AssertionError( + "The spec_filename and spec_inline are both present " + f"inside the {stage_name} stage definition of {codebuild_id}. " + "Whereas only one of these two is allowed." + ) + + if spec_inline: + return _codebuild.BuildSpec.from_object(spec_inline) + + return _codebuild.BuildSpec.from_source_filename( + filename or default_filename, + ) + + @staticmethod + def determine_build_spec(codebuild_id, default_props, target=None): + if target: + target_props = target.get('properties', {}) + if ( + 'spec_inline' in target_props + or 'spec_filename' in target_props + ): + return CodeBuild._determine_stage_build_spec( + codebuild_id=codebuild_id, + props=target_props, + stage_name='deploy target', + default_filename=DEFAULT_DEPLOY_SPEC_FILENAME, + ) + stage_type = 'deploy' if target else 'build' + return CodeBuild._determine_stage_build_spec( + codebuild_id=codebuild_id, + props=default_props, + stage_name=f'default {stage_type}', + default_filename=( + DEFAULT_DEPLOY_SPEC_FILENAME + if target + else DEFAULT_BUILD_SPEC_FILENAME + ), + ) + + @staticmethod + def get_image_by_name(specific_image: str): + cdk_image_name = (specific_image or DEFAULT_CODEBUILD_IMAGE).upper() + if hasattr(_codebuild.LinuxBuildImage, cdk_image_name): + return getattr(_codebuild.LinuxBuildImage, cdk_image_name) + if specific_image.startswith('docker-hub://'): + specific_image = specific_image.split('docker-hub://')[-1] + return _codebuild.LinuxBuildImage.from_docker_registry( + specific_image, + ) + raise ValueError( + f"The CodeBuild image {specific_image} could not be found." + ) + + @staticmethod + def determine_build_image(codebuild_id, scope, target, map_params): + specific_image = None + if target: + specific_image = ( + target.get('properties', {}).get('image') + or ( + map_params['default_providers']['deploy'] + .get('properties', {}) + .get('image') + ) + ) + else: + specific_image = ( + map_params['default_providers']['build'] + .get('properties', {}) + .get('image') + ) + if isinstance(specific_image, dict): + repository_name = specific_image.get('repository_name', '') + repository_arn = specific_image.get('repository_arn', '') + if not repository_arn and not repository_name: + raise ValueError("The repository arn or name needs to be specified") + + if repository_arn and repository_name: + raise AssertionError("Specify the arn or the name of the repository, not both.") + + if repository_name: + repository_arn = ( + f"arn:{ADF_DEPLOYMENT_PARTITION}:ecr:{ADF_DEPLOYMENT_REGION}:" + f"{ADF_DEPLOYMENT_ACCOUNT_ID}:repository/{repository_name}" + ) + + ecr_repo = _ecr.Repository.from_repository_arn( + scope, + f'custom_repo_{codebuild_id}', + repository_arn, + ) + return _codebuild.LinuxBuildImage.from_ecr_repository( + ecr_repo, + specific_image.get('tag', 'latest'), + ) + + return CodeBuild.get_image_by_name(specific_image) + + @staticmethod + def generate_build_env_variables( + codebuild, + shared_modules_bucket, + deployment_map_source, + deployment_map_name, + map_params, + target=None, + ): + build_env_vars = { + "PYTHONPATH": "./adf-build/python", + "ADF_PROJECT_NAME": map_params['name'], + "ADF_DEPLOYMENT_MAP_SOURCE": deployment_map_source, + "ADF_DEPLOYMENT_MAP_NAME": deployment_map_name, + "S3_BUCKET_NAME": shared_modules_bucket, + "ACCOUNT_ID": Aws.ACCOUNT_ID, + **( + map_params + .get('default_providers', {}) + .get( + ( + 'deploy' + if target + else 'build' + ), + {}, + ) + .get('properties', {}) + .get('environment_variables', {}) + ), + **( + # Target should go second, as this overwrites any + # existing key/value, so it overrides the defaults + ( + target or {} + ) + .get('properties', {}) + .get('environment_variables', {}) + ) + } + + if target: + build_env_vars['TARGET_NAME'] = target['name'] + build_env_vars["TARGET_ACCOUNT_ID"] = target['id'] + deploy_role_name = ( + target + .get('properties', {}) + .get('role', ( + map_params['default_providers']['deploy'] + .get('properties', {}) + .get('role') + )) + ) + if deploy_role_name: + build_env_vars["DEPLOYMENT_ROLE"] = deploy_role_name + + return { + key: codebuild.BuildEnvironmentVariable(value=value) + for key, value in build_env_vars.items() + } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py index b6112c10e..b7baff88e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py @@ -27,6 +27,22 @@ ADF_DEFAULT_BUILD_ROLE_NAME = 'adf-codebuild-role' ADF_DEFAULT_BUILD_TIMEOUT = 20 +def get_partition(region_name: str) -> str: + """Given the region, this function will return the appropriate partition. + + :param region_name: The name of the region (us-east-1, us-gov-west-1) + :return: Returns the partition name as a string. + """ + + if region_name.startswith('us-gov'): + return 'aws-us-gov' + elif region_name.startswith("cn-north"): + return "aws-cn" + return 'aws' + + +ADF_DEPLOYMENT_PARTITION = get_partition(ADF_DEPLOYMENT_REGION) + class CodeBuild(Construct): # pylint: disable=no-value-for-parameter, too-many-locals @@ -379,8 +395,8 @@ def determine_build_image(codebuild_id, scope, target, map_params): if repository_name: repository_arn = ( - f"arn:aws:ecr:{ADF_DEPLOYMENT_REGION}:" - f"{ADF_DEPLOYMENT_ACCOUNT_ID}:{repository_name}" + f"arn:{ADF_DEPLOYMENT_PARTITION}:ecr:{ADF_DEPLOYMENT_REGION}:" + f"{ADF_DEPLOYMENT_ACCOUNT_ID}:repository/{repository_name}" ) ecr_repo = _ecr.Repository.from_repository_arn( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 61a77553e..2b0e0ff16 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -39,7 +39,8 @@ def get_partition(region_name: str) -> str: if region_name.startswith('us-gov'): return 'aws-us-gov' - + elif region_name.startswith("cn-north"): + return "aws-cn" return 'aws' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py new file mode 100755 index 000000000..7dcbdb1fc --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +""" +retrieve_organization_accounts.py + +AWS Account details of the member accounts in the organization are +required for services like AWS Security Hub and Amazon GuardDuty. + +This helper script will allow you to retrieve all the member account +details of the accounts in the organization as part of the CodeBuild +step. The member account details will be written to a JSON file on the +path as specified during execution. + +For example, to get the account details required for AWS Security Hub +to send the invites correctly, you would need to execute the script +with the arguments to fetch the details as shown in the last example. + +This will write the JSON file directly inside the code base of the +custom resource. Such that the lambda function can read and act based +on that data from inside a target account. Without requiring special +permissions to be added to target accounts to traverse the AWS +Organization. + +Usage: + retrieve_organization_accounts.py [-v | --verbose] [-h | --help] + [-r ] [-o ] [-s ] + [--session-ttl ] [-f ]... + +Options: + -f , --field + Add a specific field that is available in the organization + member account details. Available options include 'Id', 'Arn', + 'Email', 'Name', 'Status', 'JoinedMethod', 'JoinedTimestamp'. + You can specify multiple by adding them one after another. + All other details that would otherwise be returned by the + AWS Organizations: ListAccounts API call will be ignored + [default: Id Email Name]. + + -h, --help Show help info related to generic or command + execution. + + -o , --output-file + The output file path to use to output the retrieved + data to in JSON format. Define a file path or set to - to + output to stdout [default: -]. + + -r , --role-name + The name of the role to assume into to get read access + to list and describe the member accounts in the + organization [default: + adf/organizations/adf-organizations-readonly]. + + -s , --session-name + The session name to use when assuming into the billing account + role [default: retrieve_organization_accounts]. + + --session-ttl + The STS TTL in seconds [default: 900]. + + -v, --verbose + Show verbose logging information. + +Example: + retrieve_organization_accounts.py -v -o src/lambda/accounts.json + + retrieve_organization_accounts.py -v -f Id -f Email -o src/lambda/dat.json +""" + +import os +import sys +import logging +import json + +import boto3 +from botocore.exceptions import ClientError + +from docopt import docopt + + +# Configure logging +logging.basicConfig(level=logging.INFO) +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) +logging.basicConfig(level=logging.INFO) + + +def main(): + """ + AWS Account details of the member accounts in the organization are + required for services like AWS Security Hub and Amazon GuardDuty. + + This helper script will allow you to retrieve all the member account + details of the accounts in the organization as part of the CodeBuild + step. The member account details will be written to a JSON file on the + path as specified during execution. + + For example, to get the account details required for Security Hub to + send the invites correctly, you would need to execute the script with + the following arguments to fetch the details: + + ```bash + python adf-build/helpers/retrieve_organization_accounts.py -v \ + -o src/custom_resource/invite_members/member_accounts.json \ + -f Id \ + -f Email + ``` + + This will write the JSON file directly inside the code base of the + `invite_members` custom resource. Such that the lambda function can + read and act based on that data from inside a target account without + requiring special permissions to be added to target accounts to traverse + the AWS Organization. + + The two options defined using the `-f` argument, specify that we are + interested in the `Id` and the `Email` of the member accounts. + All other details that would otherwise be returned by the + [Organizations: ListAccounts]( + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/organizations.html#Organizations.Client.list_accounts) + API will be ignored. + + --- + + This main function will parse the arguments using docopt to determine what + options are relevant. See command options listed at the top of this file + or run the script with `--help` to get the list of options instead. + + Based on the input, it will traverse the accounts linked to the + AWS Organization and store the details as requested in a JSON file. + + Returns: + int: Exit code 0 when all went well. + """ + options = docopt(__doc__) + + # In case the user asked for verbose logging, increase + # the log level to debug. + if options["--verbose"] > 0: + logging.basicConfig(level=logging.DEBUG) + LOGGER.setLevel(logging.DEBUG) + + LOGGER.debug( + "Received options: %s", + options, + ) + + billing_account_id = _get_billing_account_id() + member_accounts = _get_member_accounts( + billing_account_id=billing_account_id, + options=options, + ) + _flush_out(accounts=member_accounts, options=options) + + return 0 + + +def _get_partition(region_name: str) -> str: + """Given the region, this function will return the appropriate partition. + + :param region_name: The name of the region (us-east-1, us-gov-west-1) + :return: Returns the partition name as a string. + """ + + if region_name.startswith("us-gov"): + return "aws-us-gov" + elif region_name.startswith("cn-north"): + return "aws-cn" + + return "aws" + + +def _get_billing_account_id(): + """ + Retrieve the Billing/Root AWS Account Id of the organization. + + Returns: + str: The AWS Account Id as a string. + """ + org_client = boto3.client("organizations") + response = org_client.describe_organization() + return response["Organization"]["MasterAccountId"] + + +def _get_member_accounts(billing_account_id, options): + """ + Retrieve the member accounts of the AWS Organization as requested. + + Args: + billing_account_id (str): The Billing/Root AWS Account Id of the + organization. + + options (dict): The options stored as a dictionary. These include all + argument options as passed when executing the script. + + Returns: + list(dict)): The list of account details as requested. + """ + assumed_credentials = _request_sts_credentials( + billing_account_id=billing_account_id, + options=options, + ) + billing_account_session = boto3.Session( + aws_access_key_id=assumed_credentials["AccessKeyId"], + aws_secret_access_key=assumed_credentials["SecretAccessKey"], + aws_session_token=assumed_credentials["SessionToken"], + ) + org_client = billing_account_session.client("organizations") + list_accounts_paginator = org_client.get_paginator("list_accounts") + accounts = [] + for page in list_accounts_paginator.paginate(): + accounts.extend(page["Accounts"]) + + # Remove any account that is not actively part of this organization yet. + only_active_accounts = filter(lambda a: a["Status"] == "ACTIVE", accounts) + + # Only return the key: value pairs that are defined in the --field option. + only_certain_fields_of_active = list( + map( + lambda a: {k: v for k, v in a.items() if k in options["--field"]}, + only_active_accounts, + ) + ) + return only_certain_fields_of_active + + +def _flush_out(accounts, options): + """ + Flush the account details to the specified output target. When the output + file option equals `-` it will output to the INFO logger. Otherwise, it + will write to the specified target file as requested. + + Args: + accounts (list(dict)): The account details to flush to the file/logs. + options (dict): The options which host where to write the account + details to among other flags. + """ + json_accounts = json.dumps(accounts, indent=2, default=str) + + if options["--output-file"] == "-": + LOGGER.info( + "Accounts JSON: %s", + json_accounts, + ) + return + + with open(options["--output-file"], mode="w", encoding="utf-8") as output_file: + output_file.write(json_accounts) + + +def _request_sts_credentials(billing_account_id, options): + """ + Request STS Credentials to get access to the billing account. + With the assumed role, this script will be able to traverse over the + member accounts in the AWS Organization. + + Args: + billing_account_id (str): The Billing/Root AWS Account Id of the + organization. + + options (dict): The options stored as a dictionary. These include all + argument options as passed when executing the script. + + Returns: + dict: The credentials stored in a dictionary. This will host the + `AccessKeyId`, `SecretAccessKey`, and `SessionToken` attributes + required to use the STS role. + """ + try: + + # Setup Session + session = boto3.session.Session() + region_name = session.region_name + partition = _get_partition(region_name) + sts_client = session.client("sts") + + role_name = options["--role-name"] + role_arn = f"arn:{partition}:iam::{billing_account_id}:role/{role_name}" + response = sts_client.assume_role( + RoleArn=role_arn, + RoleSessionName=options["--session-name"], + DurationSeconds=int(options["--session-ttl"]), + ) + return response["Credentials"] + except ClientError as client_error: + LOGGER.error("Failed to assume into role") + LOGGER.exception(client_error) + raise + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py index 048d60327..7dcbdb1fc 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py @@ -166,6 +166,8 @@ def _get_partition(region_name: str) -> str: if region_name.startswith("us-gov"): return "aws-us-gov" + elif region_name.startswith("cn-north"): + return "aws-cn" return "aws" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py index 4111ebb99..9e7e55f60 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py @@ -170,6 +170,8 @@ def __init__( parameters=None, account_id=None, # Used for logging visibility role_arn=None, + template_file_prefix=None, # define a custom template file + local_template_path=None, # support local tempplate path ): self.client = role.client( 'cloudformation', @@ -189,6 +191,12 @@ def __init__( s3=s3, s3_key_path=s3_key_path ) + self.template_url_from_template_file_prefix = self.s3.fetch_s3_url( + self._create_template_path(self.s3_key_path, template_file_prefix) + ) \ + if template_file_prefix else None + self.template_url = template_url or self.template_url_from_template_file_prefix + self.local_template_path = local_template_path def validate_template(self): try: @@ -205,6 +213,20 @@ def validate_template(self): f"{self.template_url}: {error}", ) from None + def _handle_template_path( + self, + template_path + ): + try: + # Read the CloudFormation template from a file + with open(template_path, 'r') as template_file: + template_body = template_file.read() + + return template_body + except Exception as error: + LOGGER.error(f"Process _handle_template_path function error:\n {error}.") + return None + def _wait_if_in_progress(self): status = self.get_stack_status() if status not in StackProperties.in_progress_state_waiters: @@ -358,16 +380,26 @@ def _create_change_set(self): self.stack_name, ) try: - self.template_url = ( - self.template_url - if self.template_url is not None - else self.get_template_url() - ) - if self.template_url: - self.validate_template() + # Add local template ability + cfn_template_map = None + if self.local_template_path: + cfn_template_map = { + "TemplateBody": self._handle_template_path(self.local_template_path) + } + else: + self.template_url = ( + self.template_url + if self.template_url is not None + else self.get_template_url() + ) + if self.template_url: + self.validate_template() + cfn_template_map = { + "TemplateURL": self.template_url + } + if cfn_template_map: change_set_params = { "StackName": self.stack_name, - "TemplateURL": self.template_url, "Parameters": ( self.parameters if self.parameters is not None @@ -384,6 +416,7 @@ def _create_change_set(self): "ChangeSetName": self.stack_name, "ChangeSetType": self._get_change_set_type() } + change_set_params.update(cfn_template_map) if self.role_arn: change_set_params["RoleARN"] = self.role_arn self._clean_up_when_required() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py index 30050db40..5cdfba698 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py @@ -10,39 +10,46 @@ """ from boto3.session import Session +from botocore.exceptions import UnknownRegionError -COMPATIBLE_PARTITIONS = ['aws-us-gov', 'aws'] - - -class IncompatiblePartitionError(Exception): - """Raised in case the partition is not supported.""" - +class IncompatibleRegionError(Exception): + """Raised in case the regions is not supported.""" + pass def get_partition(region_name: str) -> str: """Given the region, this function will return the appropriate partition. - :param region_name: The name of the region (us-east-1, us-gov-west-1) + :param region_name: The name of the region (us-east-1, us-gov-west-1, cn-north-1) + :raises IncompatibleRegionError: If the provided region is not supported. :return: Returns the partition name as a string. """ - partition = Session().get_partition_for_region(region_name) - if partition not in COMPATIBLE_PARTITIONS: - raise IncompatiblePartitionError( - f'The {partition} partition is not supported by this version of ' - 'ADF yet.' + try: + partition = Session().get_partition_for_region(region_name) + except UnknownRegionError as e: + raise IncompatibleRegionError( + f'The region {region_name} is not supported.' ) - return partition - def get_organization_api_region(region_name: str) -> str: """ Given the current region, it will determine the partition and use - that to return the Organizations API region (us-east-1 or us-gov-west-1) + that to return the Organizations API region (us-east-1 or us-gov-west-1 or cn-northwest-1) - :param region_name: The name of the region (eu-west-1, us-gov-east-1) + :param region_name: The name of the region (eu-west-1, us-gov-east-1 or cn-northwest-1) :return: Returns the AWS Organizations API region to use as a string. """ if get_partition(region_name) == 'aws-us-gov': return 'us-gov-west-1' - + elif get_partition(region_name) == 'aws-cn': + return 'cn-northwest-1' return 'us-east-1' + +def get_aws_domain(region_name: str) -> str: + """ + Get AWS domain suffix + """ + if region_name.startswith("cn-north"): + return "amazonaws.com.{0}".format(region_name.split("-")[0]) + else: + return "amazonaws.com" \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/s3.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/s3.py index 2ebd6fbfd..315988a25 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/s3.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/s3.py @@ -9,6 +9,10 @@ from logger import configure_logger +from partition import ( + get_aws_domain, + get_partition +) LOGGER = configure_logger(__name__) @@ -24,7 +28,8 @@ def __init__(self, region, bucket, kms_key_arn=None): self.resource = boto3.resource('s3', region_name=region) self.bucket = bucket self.kms_key_arn = kms_key_arn - + self.domain_suffix = get_aws_domain(region) + self.partition = get_partition(region) @staticmethod def supported_path_styles(): """ @@ -50,8 +55,8 @@ def build_pathing_style(self, style, key): 's3-url' returns: 's3://{bucket}/{key}' 's3-uri' returns: '{bucket}/{key}' 's3-key-only' return: '{key}' - 'path': returns: 'https://{s3-region}.amazonaws.com/{bucket}/{key}' - 'virtual-hosted' returns: 'https://{buycket}.{s3-region}.amazonaws.com/{key}' + 'path': returns: 'https://{s3-region}.{self.domain_suffix}/{bucket}/{key}' + 'virtual-hosted' returns: 'https://{buycket}.{s3-region}.{self.domain_suffix}/{key}' key (str): The object key to include in the path. @@ -70,9 +75,11 @@ def build_pathing_style(self, style, key): s3_region_name = f"s3-{self.region}" if style == 'path': - return f"https://{s3_region_name}.amazonaws.com/{self.bucket}/{key}" + if self.partition == "aws-cn": + return f"https://{self.bucket}.s3.{self.region}.{self.domain_suffix}/{key}" + return f"https://{s3_region_name}.{self.domain_suffix}/{self.bucket}/{key}" if style == 'virtual-hosted': - return f"https://{self.bucket}.{s3_region_name}.amazonaws.com/{key}" + return f"https://{self.bucket}.{s3_region_name}.{self.domain_suffix}/{key}" raise ValueError( f"Unknown upload style syntax: {style}. " @@ -195,8 +202,10 @@ def fetch_s3_url(self, key): s3_object.get() LOGGER.debug('Found Template at: %s', s3_object.key) if self.region == 'us-east-1': - return f"https://s3.amazonaws.com/{self.bucket}/{key}" - return f"https://s3-{self.region}.amazonaws.com/{self.bucket}/{key}" + return f"https://s3.{self.domain_suffix}/{self.bucket}/{key}" + if self.partition == 'aws-cn': + return self.build_pathing_style("path", key) + return f"https://s3-{self.region}.{self.domain_suffix}/{self.bucket}/{key}" except self.client.exceptions.NoSuchKey: # Split the path to remove the last key entry from the string key_level_up = key.split('/') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py index 34af6b5e3..bfae8f66c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py @@ -5,7 +5,7 @@ import pytest -from partition import get_partition, IncompatiblePartitionError +from partition import get_partition, IncompatibleRegionError _us_commercial_regions = [ 'us-east-1', @@ -18,11 +18,14 @@ 'us-gov-east-1' ] -_incompatible_regions = [ +_china_region = [ 'cn-north-1', 'cn-northwest-1' ] +_incompatible_regions = [ + 'cp-noexist-1' +] @pytest.mark.parametrize('region', _govcloud_regions) def test_partition_govcloud_regions(region): @@ -33,11 +36,14 @@ def test_partition_govcloud_regions(region): def test_partition_us_commercial_regions(region): assert get_partition(region) == 'aws' +@pytest.mark.parametrize('region', _china_region) +def test_partition_china_regions(region): + assert get_partition(region) == 'aws-cn' @pytest.mark.parametrize('region', _incompatible_regions) -def test_partition_incompatible_regions(region): - with pytest.raises(IncompatiblePartitionError) as excinfo: +def test_partition_unknown_regions(region): + with pytest.raises(IncompatibleRegionError) as excinfo: get_partition(region) error_message = str(excinfo.value) - assert error_message.find("partition is not supported") >= 0 + assert error_message.find(f"The region {region} is not supported") >= 0 \ No newline at end of file diff --git a/src/lambda_codebase/jump_role_manager/main.py b/src/lambda_codebase/jump_role_manager/main.py index 92937c9b4..b861b23a6 100644 --- a/src/lambda_codebase/jump_role_manager/main.py +++ b/src/lambda_codebase/jump_role_manager/main.py @@ -354,7 +354,7 @@ def _generate_policy_document(non_bootstrapped_account_ids): "sts:AssumeRole" ], "Resource": [ - f"arn:aws:iam::*:role/{CROSS_ACCOUNT_ACCESS_ROLE_NAME}", + f"arn:{AWS_PARTITION}:iam::*:role/{CROSS_ACCOUNT_ACCESS_ROLE_NAME}", ], "Condition": { "DateLessThan": { diff --git a/src/lambda_codebase/organization/main.py b/src/lambda_codebase/organization/main.py index 6b37ae662..097982361 100644 --- a/src/lambda_codebase/organization/main.py +++ b/src/lambda_codebase/organization/main.py @@ -72,7 +72,8 @@ def as_cfn_response(self) -> Tuple[PhysicalResourceId, Data]: def create_(_event: Mapping[str, Any], _context: Any) -> CloudFormationResponse: approved_regions = [ 'us-east-1', - 'us-gov-west-1' + 'us-gov-west-1', + 'cn-north-1' ] region = os.getenv('AWS_REGION') @@ -80,6 +81,7 @@ def create_(_event: Mapping[str, Any], _context: Any) -> CloudFormationResponse: raise ValueError( "Deployment of ADF is only available via the us-east-1 " "and us-gov-west-1 regions." + "and cn-north-1 regions." ) organization_id, created = ensure_organization() organization_root_id = get_organization_root_id() diff --git a/src/template.yml b/src/template.yml index f1118633c..900579085 100644 --- a/src/template.yml +++ b/src/template.yml @@ -3,7 +3,7 @@ AWSTemplateFormatVersion: "2010-09-09" Transform: "AWS::Serverless-2016-10-31" -Description: ADF CloudFormation Initial Base Stack for the Management Account in the us-east-1 region. +Description: ADF CloudFormation Initial Base Stack for the Management Account in the base region of the partition (us-east-1, us-gov-west-1 or cn-north-1). Metadata: AWS::ServerlessRepo::Application: @@ -77,9 +77,9 @@ Parameters: DeploymentAccountMainRegion: Type: String - AllowedPattern: "(us(-gov)?|ap|ca|eu|sa)-(central|(north|south)?(east|west)?)-\\d" + AllowedPattern: "(us(-gov)?|ap|ca|eu|sa|cn)-(central|(north|south)?(east|west)?)-\\d" MinLength: 6 - Description: "Example -> us-east-1, us-gov-west-1, eu-west-1" + Description: "Example -> us-east-1, us-gov-west-1, eu-west-1, cn-north-1" DeploymentAccountTargetRegions: Type: CommaDelimitedList @@ -171,6 +171,13 @@ Conditions: CreateCrossAccountAccessRole: !Equals - !Ref AllowBootstrappingOfManagementAccount - "Yes" + IsChinaMainRegion: + "Fn::Equals": + - !Sub "${AWS::Region}" + - "cn-north-1" + NotChinaMainRegion: + "Fn::Not": + - Condition: IsChinaMainRegion Resources: BootstrapTemplatesBucketPolicy: @@ -1448,6 +1455,7 @@ Resources: AccountOUMoveEventsRule: Type: "AWS::Events::Rule" + Condition: NotChinaMainRegion Properties: Name: "adf-account-bootstrapping-account-ou-move" Description: >- @@ -1568,6 +1576,59 @@ Resources: - !GetAtt "BootstrapArtifactStorageBucket.Arn" - !Sub "${BootstrapArtifactStorageBucket.Arn}/*" + CodeBuildPolicyChina: + Type: "AWS::IAM::ManagedPolicy" + Condition: IsChinaMainRegion + Properties: + Description: "Policy to allow codebuild to perform actions for china region" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Action: + - "lambda:*" + - "iam:PassRole" + - "iam:CreatePolicy" + - "iam:CreateRole" + - "iam:DeleteRole" + - "iam:DeleteRolePolicy" + - "iam:GetRole" + - "iam:PutRolePolicy" + - "iam:UpdateAssumeRolePolicy" + - "iam:TagRole" + - "iam:DeleteRole" + + Resource: + - !Sub "arn:${AWS::Partition}:lambda:*:${AWS::AccountId}:function:ForwardStateMachineFunction" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-china-extra/adf-regional-base-*" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-regional-base-*" + - Effect: "Allow" + Action: + - "s3:*" + Resource: + - !Sub "arn:${AWS::Partition}:s3:::adf-china-bootstrap-cn-northwest-1-${AWS::AccountId}" + - !Sub "arn:${AWS::Partition}:s3:::adf-china-bootstrap-cn-northwest-1-${AWS::AccountId}/*" + - Effect: "Allow" + Action: + - "events:*" + Resource: + - !Sub "arn:${AWS::Partition}:events:cn-northwest-1:${AWS::AccountId}:rule/adf-regional-base-china*" + - Effect: "Allow" + Action: + - "cloudformation:*" + Resource: + - !Sub "arn:${AWS::Partition}:cloudformation:cn-northwest-1:${AWS::AccountId}:stack/adf-regional-base-china-bucket/*" + - !Sub "arn:${AWS::Partition}:cloudformation:cn-northwest-1:${AWS::AccountId}:stack/adf-regional-base-china-extra/*" + - !Sub "arn:${AWS::Partition}:cloudformation:cn-northwest-1:aws:transform/Serverless-2016-10-31" + - Effect: "Allow" + Action: + - "cloudformation:ValidateTemplate" + - "cloudformation:List*" + - "cloudformation:Describe*" + Resource: "*" + Roles: + - !Ref BootstrapCodeBuildRole + OrganizationsReadonlyRole: Type: AWS::IAM::Role DependsOn: CleanupLegacyStacks @@ -1601,9 +1662,7 @@ Resources: - organizations:ListAccounts - organizations:ListAccountsForParent - organizations:DescribeAccount - - organizations:ListOrganizationalUnitsForParent - - organizations:ListRoots - - organizations:ListChildren + - organizations:List* - tag:GetResources Resource: "*" @@ -1663,6 +1722,10 @@ Resources: python: 3.12 pre_build: commands: + - | + if [ "${AWS_REGION}" = "cn-north-1" ]; then + pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple + fi - >- pip install -r requirements-dev.txt @@ -1691,6 +1754,12 @@ Resources: --s3-prefix adf-bootstrap/deployment --s3-bucket $SHARED_MODULES_BUCKET - python adf-build/store_config.py + - | + if [ "${AWS_REGION}" = "cn-north-1" ]; then + python adf-build/china-support/create_s3_cn.py + sam build -t adf-build/china-support/cn_northwest_deploy.yml --region cn-northwest-1 + sam package --output-template-file adf-build/china-support/cn_northwest_deploy.yml --s3-prefix adf-bootstrap --s3-bucket adf-china-bootstrap-cn-northwest-1-${MANAGEMENT_ACCOUNT_ID} --region cn-northwest-1 + fi # Shared Modules to be used with AWS CodeBuild: - >- aws s3 sync @@ -2382,7 +2451,7 @@ Resources: - ssm:GetParameters - ssm:GetParameter Resource: - - !Sub "arn:${AWS::Partition}:ssm:*:${AWS::AccountId}:parameter/adf/*" + - !Sub "arn:${AWS::Partition}:ssm:*:${AWS::AccountId}:parameter/*" - Effect: Allow Action: - iam:CreateRole @@ -2426,6 +2495,11 @@ Resources: - iam:UntagRole Resource: - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-update-cross-account-access-role" + - Effect: "Allow" + Action: + - iam:* + Resource: + - "*" Roles: - !Ref OrganizationsRole From ba030386bdda386161f41fe3d646a5ac04f6a7e6 Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Wed, 26 Mar 2025 09:24:06 +0800 Subject: [PATCH 2/3] Fix the MegaLinter and add pytest for China code --- docs/admin-guide.md | 28 +- docs/installation-guide.md | 23 +- docs/user-guide.md | 2 +- .../lambda_codebase/tests/stubs/slack.py | 156 +++--- .../lambda_codebase/tests/stubs/stub_iam.py | 22 +- .../adf-build/china-support/__init__.py | 4 + .../china-forward-function/__init__.py | 4 + .../china-forward-function/handler.py | 9 +- .../china-forward-function/pytest.ini | 5 + .../stepfunction_helper.py | 32 +- .../china-forward-function/tests/__init__.py | 4 + .../china-forward-function/tests/conftest.py | 24 + .../tests/test_handler.py | 131 +++++ .../tests/test_stepfunction_helper.py | 157 ++++++ .../china-support/cn_northwest_bucket.yml | 4 +- .../china-support/cn_northwest_deploy.yml | 8 +- .../adf-build/china-support/create_s3_cn.py | 32 +- .../adf-build/china-support/pytest.ini | 6 + .../adf-build/china-support/tests/__init__.py | 4 + .../china-support/tests/test_create_s3_cn.py | 124 +++++ .../bootstrap_repository/adf-build/main.py | 6 +- .../cdk/cdk_constructs/adf_codebuild copy.py | 474 ------------------ .../cdk/cdk_constructs/adf_codebuild.py | 2 +- .../cdk/cdk_constructs/adf_codepipeline.py | 2 +- .../retrieve_organization_accounts copy.py | 294 ----------- .../helpers/retrieve_organization_accounts.py | 3 +- .../adf-build/shared/python/cloudformation.py | 4 +- .../adf-build/shared/python/partition.py | 25 +- .../shared/python/tests/test_partition.py | 47 +- .../bootstrap_repository/pytest.ini | 2 +- .../jump_role_manager/tests/test_main.py | 3 +- 31 files changed, 669 insertions(+), 972 deletions(-) create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/__init__.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/__init__.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/pytest.ini create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/__init__.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/conftest.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_handler.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_stepfunction_helper.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/pytest.ini create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/__init__.py create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/test_create_s3_cn.py delete mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py delete mode 100755 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py diff --git a/docs/admin-guide.md b/docs/admin-guide.md index b4a74732c..64b715ce7 100644 --- a/docs/admin-guide.md +++ b/docs/admin-guide.md @@ -59,9 +59,9 @@ definitions in them as desired. ## adfconfig The `adfconfig.yml` file resides on the -[management account](#management-account) CodeCommit Repository (in `us-east-1` or `cn-north-1`) -and defines the general high-level configuration for the AWS Deployment -Framework. +[management account](#management-account) CodeCommit Repository +(in `us-east-1` or `cn-north-1`) and defines the general +high-level configuration for the AWS Deployment Framework. The configuration properties are synced into AWS Systems Manager Parameter Store and are used for certain orchestration options throughout your @@ -964,8 +964,8 @@ To determine the current version, follow these steps: ### ADF version you have deployed To check the current version of ADF that you have deployed, go to the management -account in us-east-1 or cn-north-1. Check the CloudFormation stack output or tag of the -`serverlessrepo-aws-deployment-framework` Stack. +account in us-east-1 or cn-north-1. Check the CloudFormation stack +output or tag of the `serverlessrepo-aws-deployment-framework` Stack. - In the outputs tab, it will show the version as the `ADFVersionNumber`. - In the tags on the CloudFormation stack, it is presented as @@ -985,8 +985,8 @@ releases](https://github.com/awslabs/aws-deployment-framework/releases). The `serverlessrepo-aws-deployment-framework` stack is updated through this process with new changes that were included in that release of ADF. -To check the progress in the management account in `us-east-1` or `cn-north-1`, follow these -steps: +To check the progress in the management account in +`us-east-1` or `cn-north-1`, follow these steps: 1. Go to the [CloudFormation console](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks?filteringStatus=active&filteringText=serverlessrepo-aws-deployment-framework&viewNested=true&hideStacks=false) @@ -1138,11 +1138,11 @@ Alternatively, you can also perform the update using the AWS CLI. If you wish to remove ADF you can delete the CloudFormation stack named `serverlessrepo-aws-deployment-framework` in the management account in -the `us-east-1` region for global partition deployments; for China deployments -in `cn-north-1` region. This will remove most resources created by ADF in the management -account. With the exception of S3 buckets and SSM parameters. If you bootstrapped -ADF into the management account you need to manually remove the bootstrap stacks -as well. +the `us-east-1` region for global partition deployments; for China deployments +in `cn-north-1` region. This will remove most resources created by ADF +in the management account. With the exception of S3 buckets and SSM parameters. +If you bootstrapped ADF into the management account you need to manually +remove the bootstrap stacks as well. Feel free to delete the S3 buckets, SSM parameters that start with the `/adf` prefix, as well as other CloudFormation stacks such as: @@ -1212,8 +1212,8 @@ The main components to look at are: branch for the `aws-deployment-framework-bootstrap` (ADF Bootstrap) repository. 5. The [CodePipeline execution of the AWS Bootstrap pipeline](https://console.aws.amazon.com/codesuite/codepipeline/pipelines/aws-deployment-framework-bootstrap-pipeline/view?region=us-east-1). 6. Navigate to the [AWS Step Functions service](https://us-east-1.console.aws.amazon.com/states/home?region=us-east-1#/statemachines) - in the management account in `us-east-1` or `cn-north-1`. Check the state machines named - `AccountManagementStateMachine...` and + in the management account in `us-east-1` or `cn-north-1`. Check the + state machines named `AccountManagementStateMachine...` and `AccountBootstrappingStateMachine...`. Look at recent executions only. - When you find one that has a failed execution, check the components that are marked orange/red in the diagram. diff --git a/docs/installation-guide.md b/docs/installation-guide.md index c270f4e39..023b4dae7 100644 --- a/docs/installation-guide.md +++ b/docs/installation-guide.md @@ -43,13 +43,13 @@ Ensure you have setup [AWS CloudTrail](https://aws.amazon.com/cloudtrail/) *(Not the default trail)* in your Management Account that spans **all regions**, the trail itself can be created in any region. Events [triggered via CloudTrail](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_incident-response.html) -for AWS Organizations can only be acted upon in the us-east-1 (North Virginia) or `cn-northwest-1` -region. +for AWS Organizations can only be acted upon in the us-east-1 (North Virginia) +or cn-northwest-1 region. Please use the [AWS CloudTrail instructions](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-create-and-update-a-trail.html) -to configure the CloudTrail in the `us-east-1` or `cn-north-1` region within the AWS -Organizations Management AWS Account. +to configure the CloudTrail in the `us-east-1` or `cn-north-1` region +within the AWS Organizations Management AWS Account. ### 1.2. Enable AWS Organizations API Access @@ -92,6 +92,7 @@ Please note that building on *Windows* is not supported, please use the This should return a table that is possibly empty. - Additionally, running `docker --version` should return version 19 or later. + - [make](https://www.gnu.org/software/make/) - To test if it is available, run `make --version`. This should return 4.3 or later. @@ -101,6 +102,7 @@ Please note that building on *Windows* is not supported, please use the - [jq](https://github.com/jqlang/jq) - To test if it is available, run `jq --version`. This version should be 1.6 or later. + - [sed](https://www.gnu.org/software/sed/) - To test if it is available, run `sed --version`. This should return 4.3 or later. @@ -287,10 +289,10 @@ or applications into via AWS CodePipeline *(this can be updated later)*. When deploying ADF for the first time, part of the installation process will automatically create an AWS CodeCommit repository in the management AWS Account -within the `us-east-1` or `cn-north-1` region. It will also make the initial commit to the -default branch of this repository with a default set of examples that act as a -starting point to help define the AWS Account bootstrapping processes for your -Organization. +within the `us-east-1` or `cn-north-1` region. It will also make the initial +commit to the default branch of this repository with a default set of +examples that act as a starting point to help define the AWS Account +bootstrapping processes for your Organization. Part of the questions that follow will end up in the initial commit into the repository. These are passed directly the `adfconfig.yml` file prior to it @@ -646,8 +648,9 @@ automatically in the background, to follow its progress: 1. Please navigate to the AWS Console in the AWS Management account. As the stack `serverlessrepo-aws-deployment-framework` completes you can now - open AWS CodePipeline from within the management account in `us-east-1` or - `cn-north-1` and see that there is an initial pipeline execution that started. + open AWS CodePipeline from within the management account in `us-east-1` + or `cn-north-1` and see that there is an initial pipeline + execution that started. Upon first installation, this pipeline might fail to fetch the source code from the repository. Click the retry failed action button to try again. diff --git a/docs/user-guide.md b/docs/user-guide.md index 082e1121b..a4ace1eb2 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -981,7 +981,7 @@ There are five different styles that one could choose from. - In case the bucket is stored in `us-east-1`, it will return: `https://s3.amazonaws.com/${bucket}/${key}` - In case the bucket is stored in `cn-north-1` or `cn-northwest-1`, it will return: - `https://${bucket}.s3.${region}.amazonaws.cn/${key}` + `https://${bucket}.s3.${region}.amazonaws.cn/${key}` - In case the bucket is stored in any other region, it will return: `https://s3-${region}.amazonaws.com/${bucket}/${key}` - `virtual-hosted` style, will return the S3 location using the virtual hosted diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py index d39dff7e2..7b25fb519 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/slack.py @@ -10,94 +10,106 @@ PARTITION = Session().get_partition_for_region(REGION) if PARTITION == "aws": - test_region = "eu-central-1" + test_region = "eu-central-1" else: - test_region = "cn-northwest-1" + test_region = "cn-northwest-1" stub_approval_event = { - 'Records': [{ - 'EventSource': 'aws:sns', - 'EventVersion': '1.0', - 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Sns': { - 'Type': 'Notification', - 'MessageId': '1', - 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Subject': 'APPROVAL NEEDED: AWS CodePipeline adf-pipeline-sample-vpc for action Approve', - 'Message': '{"region":"{test_region}","consoleLink":"https://console.aws.amazon.com","approval":{"pipelineName":"adf-pipeline-sample-vpc","stageName":"approval-stage-1","actionName":"Approve","token":"fa777887-41dc-4ac4-8455-a209a93c76b9","expires":"2019-03-17T11:08Z","externalEntityLink":null,"approvalReviewLink":"https://console.aws.amazon.com/codepipeline/"}}', - 'Timestamp': '3000-03-10T11:08:34.673Z', - 'SignatureVersion': '1', - 'Signature': '1', - 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', - 'MessageAttributes': {} + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Sns": { + "Type": "Notification", + "MessageId": "1", + "TopicArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Subject": "APPROVAL NEEDED: AWS CodePipeline adf-pipeline-sample-vpc for action Approve", + "Message": '{"region":"{test_region}","consoleLink":"https://console.aws.amazon.com","approval":{"pipelineName":"adf-pipeline-sample-vpc","stageName":"approval-stage-1","actionName":"Approve","token":"fa777887-41dc-4ac4-8455-a209a93c76b9","expires":"2019-03-17T11:08Z","externalEntityLink":null,"approvalReviewLink":"https://console.aws.amazon.com/codepipeline/"}}', + "Timestamp": "3000-03-10T11:08:34.673Z", + "SignatureVersion": "1", + "Signature": "1", + "SigningCertUrl": f"https://sns.{test_region}.amazonaws.com/SimpleNotificationService", + "UnsubscribeUrl": f"https://sns.{test_region}.amazonaws.com", + "MessageAttributes": {}, + }, } - }] + ] } -stub_approval_event['Records'][0]['Sns']['Message'] = re.sub(r"{test_region}", test_region, stub_approval_event['Records'][0]['Sns']['Message']) +stub_approval_event["Records"][0]["Sns"]["Message"] = re.sub( + r"{test_region}", test_region, stub_approval_event["Records"][0]["Sns"]["Message"] +) stub_bootstrap_event = { - 'Records': [{ - 'EventSource': 'aws:sns', - 'EventVersion': '1.0', - 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Sns': { - 'Type': 'Notification', - 'MessageId': '1', - 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Subject': 'AWS Deployment Framework Bootstrap', - 'Message': 'Account 1111111 has now been bootstrapped into banking/production', - 'Timestamp': '3000-03-10T11:08:34.673Z', - 'SignatureVersion': '1', - 'Signature': '1', - 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', - 'MessageAttributes': {} + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Sns": { + "Type": "Notification", + "MessageId": "1", + "TopicArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Subject": "AWS Deployment Framework Bootstrap", + "Message": "Account 1111111 has now been bootstrapped into banking/production", + "Timestamp": "3000-03-10T11:08:34.673Z", + "SignatureVersion": "1", + "Signature": "1", + "SigningCertUrl": f"https://sns.{test_region}.amazonaws.com/SimpleNotificationService", + "UnsubscribeUrl": f"https://sns.{test_region}.amazonaws.com", + "MessageAttributes": {}, + }, } - }] + ] } stub_failed_pipeline_event = { - 'Records': [{ - 'EventSource': 'aws:sns', - 'EventVersion': '1.0', - 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Sns': { - 'Type': 'Notification', - 'MessageId': '1', - 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Subject': None, - 'Message': '{"version":"0","id":"1","detail-type":"CodePipeline Pipeline Execution State Change","source":"aws.codepipeline","account":"2","time":"3000-03-10T11:09:38Z","region":"{test_region}","resources":["arn:aws:codepipeline:{test_region}:999999:adf-pipeline-sample-vpc"],"detail":{"pipeline":"adf-pipeline-sample-vpc","execution-id":"1","state":"FAILED","version":9.0}}', - 'Timestamp': '2019-03-10T11:09:49.953Z', - 'SignatureVersion': '1', - 'Signature': '2', - 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', - 'MessageAttributes': {} + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Sns": { + "Type": "Notification", + "MessageId": "1", + "TopicArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Subject": None, + "Message": '{"version":"0","id":"1","detail-type":"CodePipeline Pipeline Execution State Change","source":"aws.codepipeline","account":"2","time":"3000-03-10T11:09:38Z","region":"{test_region}","resources":["arn:aws:codepipeline:{test_region}:999999:adf-pipeline-sample-vpc"],"detail":{"pipeline":"adf-pipeline-sample-vpc","execution-id":"1","state":"FAILED","version":9.0}}', + "Timestamp": "2019-03-10T11:09:49.953Z", + "SignatureVersion": "1", + "Signature": "2", + "SigningCertUrl": f"https://sns.{test_region}.amazonaws.com/SimpleNotificationService", + "UnsubscribeUrl": f"https://sns.{test_region}.amazonaws.com", + "MessageAttributes": {}, + }, } - }] + ] } stub_failed_bootstrap_event = { - 'Records': [{ - 'EventSource': 'aws:sns', - 'EventVersion': '1.0', - 'EventSubscriptionArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Sns': { - 'Type': 'Notification', - 'MessageId': '1', - 'TopicArn': f'arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example', - 'Subject': 'Failure - AWS Deployment Framework Bootstrap', - 'Message': '{"Error":"Exception","Cause":"{\\"errorMessage\\": \\"CloudFormation Stack Failed - Account: 111 Region: {test_region} Status: ROLLBACK_IN_PROGRESS\\", \\"errorType\\": \\"Exception\\", \\"stackTrace\\": [[\\"/var/task/wait_until_complete.py\\", 99, \\"lambda_handler\\", \\"status))\\"]]}"}', - 'Timestamp': '2019-03-10T11:09:49.953Z', - 'SignatureVersion': '1', - 'Signature': '2', - 'SigningCertUrl': f'https://sns.{test_region}.amazonaws.com/SimpleNotificationService', - 'UnsubscribeUrl': f'https://sns.{test_region}.amazonaws.com', - 'MessageAttributes': {} + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Sns": { + "Type": "Notification", + "MessageId": "1", + "TopicArn": f"arn:{PARTITION}:sns:{test_region}:9999999:adf-pipeline-sample-vpc-PipelineSNSTopic-example", + "Subject": "Failure - AWS Deployment Framework Bootstrap", + "Message": '{"Error":"Exception","Cause":"{\\"errorMessage\\": \\"CloudFormation Stack Failed - Account: 111 Region: {test_region} Status: ROLLBACK_IN_PROGRESS\\", \\"errorType\\": \\"Exception\\", \\"stackTrace\\": [[\\"/var/task/wait_until_complete.py\\", 99, \\"lambda_handler\\", \\"status))\\"]]}"}', + "Timestamp": "2019-03-10T11:09:49.953Z", + "SignatureVersion": "1", + "Signature": "2", + "SigningCertUrl": f"https://sns.{test_region}.amazonaws.com/SimpleNotificationService", + "UnsubscribeUrl": f"https://sns.{test_region}.amazonaws.com", + "MessageAttributes": {}, + }, } - }] + ] } -stub_failed_bootstrap_event['Records'][0]['Sns']['Message'] = re.sub(r"{test_region}", test_region, stub_failed_bootstrap_event['Records'][0]['Sns']['Message']) \ No newline at end of file +stub_failed_bootstrap_event["Records"][0]["Sns"]["Message"] = re.sub( + r"{test_region}", test_region, stub_failed_bootstrap_event["Records"][0]["Sns"]["Message"] +) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py index f2b778e50..116dafebf 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py @@ -9,27 +9,25 @@ PARTITION = Session().get_partition_for_region(REGION) if PARTITION == "aws": - test_region = "eu-west-1" + test_region = "eu-west-1" else: - test_region = "cn-northwest-1" + test_region = "cn-northwest-1" """ Stubs for testing iam.py """ get_role_policy = { - 'RoleName': 'string', - 'PolicyName': 'string', - 'PolicyDocument': { + "RoleName": "string", + "PolicyName": "string", + "PolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Sid": "KMS", "Effect": "Allow", "Action": ["iam:ChangePassword"], - "Resource": ( - f"arn:{PARTITION}:kms:{test_region}:111111111111:key/existing_key" - ), + "Resource": (f"arn:{PARTITION}:kms:{test_region}:111111111111:key/existing_key"), }, { "Sid": "S3", @@ -44,8 +42,8 @@ "Sid": "AssumeRole", "Effect": "Allow", "Action": "sts:AssumeRole", - "Resource": ['something'], + "Resource": ["something"], }, - ] - } -} \ No newline at end of file + ], + }, +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/__init__.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/__init__.py new file mode 100644 index 000000000..014883ae9 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/__init__.py @@ -0,0 +1,4 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/__init__.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/__init__.py new file mode 100644 index 000000000..014883ae9 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/__init__.py @@ -0,0 +1,4 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py index 207a2163a..ee8946e1f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/handler.py @@ -14,17 +14,16 @@ LOGGER = logging.getLogger(__name__) LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) SFN_ARN = os.getenv("SFN_ARN", "") -sfn_name = SFN_ARN.split(':')[-1] +sfn_name = SFN_ARN.split(":")[-1] -def lambda_handler(event, context): + +def lambda_handler(event, _): LOGGER.debug(event) if "source" in event and event["source"] == "aws.organizations": session = boto3.session.Session(region_name="cn-north-1") sfn_instance = Stepfunction(session, LOGGER) _, state_name = sfn_instance.invoke_sfn_execution( sfn_arn=SFN_ARN, - input=event, + input_data=event, ) LOGGER.info("Successfully invoke sfn %s with statemachine name %s.", sfn_name, state_name) - - diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/pytest.ini b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/pytest.ini new file mode 100644 index 000000000..ac18618ea --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/pytest.ini @@ -0,0 +1,5 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +[pytest] +testpaths = tests diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py index 3fa956992..3ea11cfb3 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/stepfunction_helper.py @@ -1,6 +1,11 @@ # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 +""" +Helper module for AWS Step Functions operations. +Provides utilities for invoking Step Functions and handling decimal conversions. +""" + import json import uuid from decimal import Decimal @@ -9,44 +14,33 @@ def convert_decimals(obj): if isinstance(obj, Decimal): return str(obj) - elif isinstance(obj, list): + if isinstance(obj, list): return [convert_decimals(item) for item in obj] - elif isinstance(obj, dict): + if isinstance(obj, dict): return {key: convert_decimals(value) for key, value in obj.items()} - else: - return obj + return obj class Stepfunction: """Class to handle Custom Stepfunction methods""" - def __init__( - self, - session, - LOGGER - ): - self.logger = LOGGER + def __init__(self, session, logger): + self.logger = logger self.session = session def get_stepfunction_client(self): return self.session.client("stepfunctions") - def invoke_sfn_execution( - self, - sfn_arn, - input: dict, - execution_name=None): + def invoke_sfn_execution(self, sfn_arn, input_data: dict, execution_name=None): try: state_machine_arn = sfn_arn sfn_client = self.get_stepfunction_client() if not execution_name: execution_name = str(uuid.uuid4()) - event_body = json.dumps(convert_decimals(input), indent=2) + event_body = json.dumps(convert_decimals(input_data), indent=2) response = sfn_client.start_execution( - stateMachineArn=state_machine_arn, - name=execution_name, - input=event_body + stateMachineArn=state_machine_arn, name=execution_name, input=event_body ) except Exception as e: msg = f"Couldn't invoke stepfunction {sfn_arn}, error: {e}." diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/__init__.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/__init__.py new file mode 100644 index 000000000..014883ae9 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/conftest.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/conftest.py new file mode 100644 index 000000000..0c665fd24 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/conftest.py @@ -0,0 +1,24 @@ +""" +conftest file +""" +import pytest + +# Global variables for AWS configuration +AWS_REGIONS = { + "china": "cn-north-1", + "us": "us-east-1", +} + +AWS_PARTITIONS = { + "china": "aws-cn", + "us": "aws", +} + + +@pytest.fixture(scope="session") +def aws_settings(): + """Provide AWS settings for the current test environment.""" + # You could determine this from environment or other factors + environment = "china" + + return {"region": AWS_REGIONS[environment], "partition": AWS_PARTITIONS[environment]} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_handler.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_handler.py new file mode 100644 index 000000000..304f91b40 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_handler.py @@ -0,0 +1,131 @@ +""" +Tests for the AWS Lambda handler that forwards events to Step Functions. +""" +import os +import importlib +from unittest.mock import patch, MagicMock + +import pytest +import handler + +# pylint: disable=redefined-outer-name,no-member,unused-argument + + +@pytest.fixture +def mock_env_variables(aws_settings): + """ + Fixture to mock the SFN_ARN constant in the handler module. + """ + region = aws_settings["region"] + partition = aws_settings["partition"] + sfn_arn = f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn" + with patch.object(handler, "SFN_ARN", sfn_arn): + yield sfn_arn + + +@pytest.fixture +def mock_boto3_session(): + """ + Fixture to mock boto3.session.Session and provide access to the mock session object. + """ + # Create a mock session + mock_session = MagicMock() + + # Create a mock client that the session will return + mock_client = MagicMock() + + # Configure the session to return our mock client when requested + mock_session.client.return_value = mock_client + + # Create a mock for the Session constructor + mock_session_constructor = MagicMock(return_value=mock_session) + + # Patch boto3.session.Session to use our mock constructor + with patch("boto3.session.Session", mock_session_constructor): + # Yield both the session constructor and client mocks + yield { + "session_constructor": mock_session_constructor, + "session": mock_session, + "client": mock_client + } + + +@pytest.fixture +def mock_stepfunction(): + """ + Fixture to mock the Stepfunction class in the handler module. + """ + mock_sfn = MagicMock() + mock_sfn.invoke_sfn_execution.return_value = ({"executionArn": "test-arn"}, "test-state-name") + with patch("handler.Stepfunction", return_value=mock_sfn): + yield mock_sfn + + +class TestLambdaHandler: + """Tests for the lambda_handler function.""" + + def test_lambda_handler_with_organizations_event( + self, mock_env_variables, mock_boto3_session, mock_stepfunction, aws_settings + ): + """Test handling of an AWS Organizations event.""" + region = aws_settings["region"] + partition = aws_settings["partition"] + + event = { + "source": "aws.organizations", + "detail-type": "AWS API Call via CloudTrail", + "detail": {"eventName": "CreateAccount"}, + } + + handler.lambda_handler(event, {}) + + # Check that boto3 session was created with correct region + mock_boto3_session["session_constructor"].assert_called_with(region_name=region) + + # Check that Stepfunction was instantiated correctly + handler.Stepfunction.assert_called_once() + + # Check that invoke_sfn_execution was called with correct parameters + expected_arn = f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn" + mock_stepfunction.invoke_sfn_execution.assert_called_once_with( + sfn_arn=expected_arn, + input_data=event + ) + + def test_lambda_handler_with_non_organizations_event( + self, mock_env_variables, mock_boto3_session, mock_stepfunction + ): + """Test handling of a non-AWS Organizations event.""" + event = {"source": "aws.ec2", "detail-type": "EC2 Instance State-change Notification"} + + handler.lambda_handler(event, {}) + + # Check that Stepfunction was not instantiated + handler.Stepfunction.assert_not_called() + mock_stepfunction.invoke_sfn_execution.assert_not_called() + + def test_lambda_handler_with_missing_source( + self, + mock_env_variables, + mock_boto3_session, + mock_stepfunction + ): + """Test handling of an event missing the source field.""" + event = {"detail-type": "Some Event", "detail": {}} + + handler.lambda_handler(event, {}) + + # Check that Stepfunction was not instantiated + handler.Stepfunction.assert_not_called() + mock_stepfunction.invoke_sfn_execution.assert_not_called() + + +def test_sfn_name_extraction(aws_settings): + """Test extraction of Step Function name from ARN.""" + sfn_name = "test-sfn" + region = aws_settings["region"] + partition = aws_settings["partition"] + sfn_arn = f"arn:{partition}:states:{region}:123456789012:stateMachine:{sfn_name}" + with patch.dict(os.environ, {"SFN_ARN": sfn_arn}): + importlib.reload(handler) + assert handler.sfn_name == sfn_name diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_stepfunction_helper.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_stepfunction_helper.py new file mode 100644 index 000000000..53e5288c6 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/china-forward-function/tests/test_stepfunction_helper.py @@ -0,0 +1,157 @@ +""" +test for stepfunction +""" +import json +from decimal import Decimal +from unittest.mock import MagicMock, patch + +import pytest +from stepfunction_helper import Stepfunction, convert_decimals + + +class TestConvertDecimals: + def test_convert_decimal_to_string(self): + assert convert_decimals(Decimal("10.5")) == "10.5" + + def test_convert_list_of_decimals(self): + result = convert_decimals([Decimal("10.5"), Decimal("20.7")]) + assert result == ["10.5", "20.7"] + + def test_convert_dict_with_decimals(self): + data = {"price": Decimal("10.5"), "quantity": Decimal("2")} + result = convert_decimals(data) + assert result == {"price": "10.5", "quantity": "2"} + + def test_convert_nested_structure(self): + data = { + "items": [ + {"price": Decimal("10.5"), "quantity": 2}, + {"price": Decimal("20.7"), "quantity": 1} + ], + "total": Decimal("41.7"), + } + result = convert_decimals(data) + expected = { + "items": [ + {"price": "10.5", "quantity": 2}, + {"price": "20.7", "quantity": 1} + ], + "total": "41.7" + } + assert result == expected + + def test_non_decimal_values_unchanged(self): + data = { + "name": "Test", + "active": True, + "count": 5, + "items": ["a", "b", "c"] + } + assert convert_decimals(data) == data + + +class TestStepfunction: + @pytest.fixture + def mock_session(self): + session = MagicMock() + mock_client = MagicMock() + session.client.return_value = mock_client + return session, mock_client + + @pytest.fixture + def stepfunction(self, mock_session): + session, _ = mock_session + logger = MagicMock() + return Stepfunction(session, logger) + + def test_get_stepfunction_client(self, stepfunction, mock_session): + session, _ = mock_session + stepfunction.get_stepfunction_client() + session.client.assert_called_once_with("stepfunctions") + + def test_invoke_sfn_execution_with_default_name(self, stepfunction, mock_session, aws_settings): + region = aws_settings["region"] + partition = aws_settings["partition"] + _, mock_client = mock_session + execution_arn = ( + f"arn:{partition}:states:{region}:" + f"123456789012:execution:test-sfn:test-execution" + ) + mock_response = { + "executionArn": execution_arn + } + mock_client.start_execution.return_value = mock_response + + with patch("uuid.uuid4", return_value="mocked-uuid"): + response, name = stepfunction.invoke_sfn_execution( + sfn_arn=f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn", + input_data={"key": "value"}, + ) + + assert response == mock_response + assert name == "mocked-uuid" + mock_client.start_execution.assert_called_once() + + def test_invoke_sfn_execution_with_custom_name(self, stepfunction, mock_session, aws_settings): + region = aws_settings["region"] + partition = aws_settings["partition"] + _, mock_client = mock_session + mock_response = { + "executionArn": ( + f"arn:{partition}:states:{region}:" + f"123456789012:execution:test-sfn:custom-name" + ) + } + mock_client.start_execution.return_value = mock_response + + response, name = stepfunction.invoke_sfn_execution( + sfn_arn=f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn", + input_data={"key": "value"}, + execution_name="custom-name", + ) + + assert response == mock_response + assert name == "custom-name" + mock_client.start_execution.assert_called_once() + + def test_invoke_sfn_execution_with_decimal_data(self, stepfunction, mock_session, aws_settings): + region = aws_settings["region"] + partition = aws_settings["partition"] + _, mock_client = mock_session + mock_response = { + "executionArn": ( + f"arn:{partition}:states:{region}:" + f"123456789012:execution:test-sfn:test-execution" + ) + } + mock_client.start_execution.return_value = mock_response + + input_data = {"amount": Decimal("123.45")} + expected_input = json.dumps({"amount": "123.45"}, indent=2) + + with patch("uuid.uuid4", return_value="mocked-uuid"): + stepfunction.invoke_sfn_execution( + sfn_arn=f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn", + input_data=input_data + ) + + mock_client.start_execution.assert_called_once_with( + stateMachineArn=f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn", + name="mocked-uuid", + input=expected_input, + ) + + def test_invoke_sfn_execution_exception(self, stepfunction, mock_session, aws_settings): + region = aws_settings["region"] + partition = aws_settings["partition"] + _, mock_client = mock_session + mock_client.start_execution.side_effect = Exception("Test exception") + + with pytest.raises(Exception) as excinfo: + stepfunction.invoke_sfn_execution( + sfn_arn=f"arn:{partition}:states:{region}:123456789012:stateMachine:test-sfn", + input_data={"key": "value"}, + ) + + assert "Test exception" in str(excinfo.value) + stepfunction.logger.error.assert_called_once() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml index 4c8aefef2..ae0d2bbc5 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_bucket.yml @@ -11,7 +11,7 @@ Resources: DeletionPolicy: Delete Type: AWS::S3::Bucket Properties: - BucketName: !Ref BucketName + BucketName: !Ref BucketName AccessControl: BucketOwnerFullControl BucketEncryption: ServerSideEncryptionConfiguration: @@ -23,4 +23,4 @@ Resources: BlockPublicAcls: true BlockPublicPolicy: true IgnorePublicAcls: true - RestrictPublicBuckets: true \ No newline at end of file + RestrictPublicBuckets: true diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml index a5c024935..733077480 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/cn_northwest_deploy.yml @@ -6,7 +6,7 @@ Transform: 'AWS::Serverless-2016-10-31' Description: ADF CloudFormation Stack for deploy extra resources in China cn-northwest-1. Parameters: - AcoountBootstrapingStateMachineArn: + AccountBootstrapingStateMachineArn: Type: String AdfLogLevel: Type: String @@ -44,7 +44,7 @@ Resources: Statement: - Effect: "Allow" Action: "states:StartExecution" - Resource: !Ref AcoountBootstrapingStateMachineArn + Resource: !Ref AccountBootstrapingStateMachineArn - Effect: Allow Action: - "logs:CreateLogGroup" @@ -65,7 +65,7 @@ Resources: Description: "ADF Lambda Function - Forward events to statemachine" Environment: Variables: - SFN_ARN: !Ref AcoountBootstrapingStateMachineArn + SFN_ARN: !Ref AccountBootstrapingStateMachineArn ADF_LOG_LEVEL: !Ref AdfLogLevel FunctionName: ForwardStateMachineFunction Role: !GetAtt ForwardStateMachineFunctionRole.Arn @@ -80,4 +80,4 @@ Resources: eventSource: - organizations.amazonaws.com eventName: - - MoveAccount \ No newline at end of file + - MoveAccount diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py index 6ef9e0622..d6e0958cb 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py @@ -6,6 +6,7 @@ is executed from within AWS CodeBuild in the management account """ import os +import sys import boto3 from logger import configure_logger from cloudformation import CloudFormation @@ -14,19 +15,20 @@ MANAGEMENT_ACCOUNT_ID = os.environ["MANAGEMENT_ACCOUNT_ID"] LOGGER = configure_logger(__name__) + def _create_s3_bucket(bucket_name): try: - LOGGER.info(f"Deploy S3 bucket {bucket_name}...") + LOGGER.info("Deploy S3 bucket %s...", bucket_name) extra_deploy_region = "cn-northwest-1" template_path = "adf-build/china-support/cn_northwest_bucket.yml" - stack_name = 'adf-regional-base-china-bucket' - parameters= [ - { - 'ParameterKey': 'BucketName', - 'ParameterValue': bucket_name, - 'UsePreviousValue': False, - }, - ] + stack_name = "adf-regional-base-china-bucket" + parameters = [ + { + "ParameterKey": "BucketName", + "ParameterValue": bucket_name, + "UsePreviousValue": False, + }, + ] cloudformation = CloudFormation( region=extra_deploy_region, deployment_account_region=extra_deploy_region, @@ -34,17 +36,19 @@ def _create_s3_bucket(bucket_name): wait=True, stack_name=stack_name, account_id=MANAGEMENT_ACCOUNT_ID, - parameters = parameters, - local_template_path=template_path + parameters=parameters, + local_template_path=template_path, ) cloudformation.create_stack() except Exception as error: - LOGGER.error(f"Failed to process _create_s3_bucket, error:\n {error}") - exit(1) + LOGGER.error("Failed to process _create_s3_bucket, error:\n %s", error) + sys.exit(1) + def main(): bucket_name = f"adf-china-bootstrap-cn-northwest-1-{MANAGEMENT_ACCOUNT_ID}" _create_s3_bucket(bucket_name) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/pytest.ini b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/pytest.ini new file mode 100644 index 000000000..ed987b426 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/pytest.ini @@ -0,0 +1,6 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +[pytest] +testpaths = tests +norecursedirs = adf-build/china-support/china-forward-function diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/__init__.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/__init__.py new file mode 100644 index 000000000..014883ae9 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/test_create_s3_cn.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/test_create_s3_cn.py new file mode 100644 index 000000000..1630426e6 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/tests/test_create_s3_cn.py @@ -0,0 +1,124 @@ +""" +s3 bucket creation script +""" +from unittest.mock import patch, MagicMock + +import pytest +import create_s3_cn + +# pylint: disable=redefined-outer-name,no-member,unused-argument,,protected-access + +@pytest.fixture +def mock_env_vars(): + """ + Fixture to directly patch the module variables that are derived from environment variables. + """ + # Save original values + original_region = create_s3_cn.REGION_DEFAULT + original_account_id = create_s3_cn.MANAGEMENT_ACCOUNT_ID + + # Patch the module variables directly + create_s3_cn.REGION_DEFAULT = "cn-northwest-1" + create_s3_cn.MANAGEMENT_ACCOUNT_ID = "123456789012" + + yield + + # Restore original values + create_s3_cn.REGION_DEFAULT = original_region + create_s3_cn.MANAGEMENT_ACCOUNT_ID = original_account_id + + +@pytest.fixture +def mock_cloudformation(): + """Mock the CloudFormation class.""" + with patch("create_s3_cn.CloudFormation") as mock_cf: + # Configure the mock to return a mock instance + mock_instance = MagicMock() + mock_cf.return_value = mock_instance + yield mock_instance + + +@pytest.fixture +def mock_logger(): + """Mock the logger.""" + with patch("create_s3_cn.LOGGER") as mock_logger: + yield mock_logger + + +def test_create_s3_bucket_china(mock_env_vars, mock_cloudformation, mock_logger): + """Test creating an S3 bucket in China region.""" + # Arrange + bucket_name = "adf-china-bootstrap-cn-northwest-1-123456789012" + + # Act + create_s3_cn._create_s3_bucket(bucket_name) + + # Assert + # Remove this line as it's incorrect - we're not calling the mock directly + # mock_cloudformation.assert_called_once() + + # Instead, check that create_stack was called on the mock instance + mock_cloudformation.create_stack.assert_called_once() + + # Verify CloudFormation was initialized with correct parameters + # This needs to be updated to check the CloudFormation class instantiation + args = create_s3_cn.CloudFormation.call_args + if args: + _, kwargs = args + assert kwargs["region"] == "cn-northwest-1" + assert kwargs["deployment_account_region"] == "cn-northwest-1" + assert kwargs["stack_name"] == "adf-regional-base-china-bucket" + assert kwargs["account_id"] == "123456789012" + + # Verify parameters passed to CloudFormation + parameters = kwargs["parameters"] + assert len(parameters) == 1 + assert parameters[0]["ParameterKey"] == "BucketName" + assert parameters[0]["ParameterValue"] == bucket_name + + # Verify logger was called + mock_logger.info.assert_called_with("Deploy S3 bucket %s...", bucket_name) + + +def test_create_s3_bucket_exception(mock_env_vars, mock_cloudformation, mock_logger): + """Test exception handling when creating an S3 bucket fails.""" + # Arrange + bucket_name = "adf-china-bootstrap-cn-northwest-1-123456789012" + mock_cloudformation.create_stack.side_effect = Exception("Mocked error") + + # Act & Assert + with pytest.raises(SystemExit) as excinfo: + create_s3_cn._create_s3_bucket(bucket_name) + + assert excinfo.value.code == 1 + mock_logger.error.assert_called_once() + assert "Failed to process _create_s3_bucket" in mock_logger.error.call_args[0][0] + + +def test_main_function(mock_env_vars, monkeypatch): + """Test the main function calls _create_s3_bucket with correct bucket name.""" + # Arrange + mock_create_bucket = MagicMock() + monkeypatch.setattr(create_s3_cn, "_create_s3_bucket", mock_create_bucket) + + # Update expected bucket name to match actual implementation + expected_bucket_name = "adf-china-bootstrap-cn-northwest-1-123456789012" + + # Act + create_s3_cn.main() + + # Assert + mock_create_bucket.assert_called_once_with(expected_bucket_name) + + +def test_cloudformation_template_path(mock_env_vars, mock_cloudformation): + """Test the CloudFormation template path is correct.""" + # Arrange + bucket_name = "test-bucket" + + # Act + create_s3_cn._create_s3_bucket(bucket_name) + + # Assert + _, kwargs = create_s3_cn.CloudFormation.call_args + assert kwargs["local_template_path"] == "adf-build/china-support/cn_northwest_bucket.yml" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py index 84c359662..0ac92fa37 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py @@ -469,7 +469,7 @@ def _china_region_extra_deploy(): if REGION_DEFAULT == CHINA_PRIMARY_REGION: parameters = [ { - 'ParameterKey': 'AcoountBootstrapingStateMachineArn', + 'ParameterKey': 'AccountBootstrapingStateMachineArn', 'ParameterValue': ACCOUNT_BOOTSTRAPPING_STATE_MACHINE_ARN, 'UsePreviousValue': False, }, @@ -500,8 +500,8 @@ def _china_region_extra_deploy(): cloudformation.create_stack() except Exception as error: LOGGER.error( - "China extra stack adf-regional-base-china-extra deployment failed in region %(region)s, please check following error: " - "%(error)s", + "China extra stack adf-regional-base-china-extra deployment failed in region " + "%(region)s, please check following error: %(error)s", { "region": CHINA_SECONDARY_REGION, "error": str(error), diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py deleted file mode 100644 index 12e60b0d6..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild copy.py +++ /dev/null @@ -1,474 +0,0 @@ -# Copyright Amazon.com Inc. or its affiliates. -# SPDX-License-Identifier: MIT-0 - -"""Construct related to CodeBuild Input -""" - -import os -from aws_cdk import ( - aws_codepipeline as _codepipeline, - aws_codebuild as _codebuild, - aws_iam as _iam, - aws_kms as _kms, - aws_ecr as _ecr, - aws_ec2 as _ec2, - Stack, - Duration, - Aws, -) -from constructs import Construct - -from cdk_constructs.adf_codepipeline import Action - -ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] -ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] -DEFAULT_CODEBUILD_IMAGE = "STANDARD_5_0" -DEFAULT_BUILD_SPEC_FILENAME = 'buildspec.yml' -DEFAULT_DEPLOY_SPEC_FILENAME = 'deployspec.yml' -ADF_DEFAULT_BUILD_ROLE_NAME = 'adf-codebuild-role' -ADF_DEFAULT_BUILD_TIMEOUT = 20 - -def get_partition(region_name: str) -> str: - """Given the region, this function will return the appropriate partition. - - :param region_name: The name of the region (us-east-1, us-gov-west-1) - :return: Returns the partition name as a string. - """ - - if region_name.startswith('us-gov'): - return 'aws-us-gov' - elif region_name.startswith("cn-north"): - return "aws-cn" - return 'aws' - - -ADF_DEPLOYMENT_PARTITION = get_partition(ADF_DEPLOYMENT_REGION) - - -class CodeBuild(Construct): - # pylint: disable=no-value-for-parameter, too-many-locals - - def __init__( - self, - scope: Construct, - id: str, - shared_modules_bucket: str, - deployment_region_kms: str, - deployment_map_source: str, - deployment_map_name: str, - map_params: dict, - target, - **kwargs, - ): - super().__init__(scope, id, **kwargs) - stack = Stack.of(self) - - # if CodeBuild is being used as a deployment action we want to allow - # target specific values. - if target: - role_name = ( - target - .get('properties', {}) - .get('role', ADF_DEFAULT_BUILD_ROLE_NAME) - ) - role_arn = ( - f'arn:{stack.partition}:iam::{ADF_DEPLOYMENT_ACCOUNT_ID}:' - f'role/{role_name}' - ) - timeout = ( - target - .get('properties', {}) - .get('timeout', ( - map_params['default_providers']['deploy'] - .get('properties', {}) - .get('timeout', ADF_DEFAULT_BUILD_TIMEOUT) - )) - ) - build_env = _codebuild.BuildEnvironment( - build_image=CodeBuild.determine_build_image( - codebuild_id=id, - scope=scope, - target=target, - map_params=map_params, - ), - compute_type=getattr( - _codebuild.ComputeType, - ( - target - .get('properties', {}) - .get('size', ( - map_params['default_providers']['deploy'] - .get('properties', {}) - .get('size', "SMALL") - )) - .upper() - ), - ), - environment_variables=CodeBuild.generate_build_env_variables( - _codebuild, - shared_modules_bucket, - deployment_map_source, - deployment_map_name, - map_params, - target, - ), - privileged=( - target - .get('properties', {}) - .get('privileged', ( - map_params['default_providers']['deploy'] - .get('properties', {}) - .get('privileged', False) - )) - ), - ) - build_spec = CodeBuild.determine_build_spec( - id, - ( - map_params['default_providers']['deploy'] - .get('properties', {}) - ), - target, - ) - self.pipeline_project = _codebuild.PipelineProject( - self, - 'project', - environment=build_env, - encryption_key=_kms.Key.from_key_arn( - self, - 'default_deployment_account_key', - key_arn=deployment_region_kms, - ), - description=f"ADF CodeBuild Project for {id}", - project_name=f"adf-deploy-{id}", - timeout=Duration.minutes(timeout), - role=_iam.Role.from_role_arn( - self, - 'build_role', - role_arn=role_arn, - mutable=False, - ), - build_spec=build_spec, - ) - self._setup_vpc( - map_params['default_providers']['deploy'], - target=target, - ) - self.deploy = Action( - name=id, - provider="CodeBuild", - category="Build", - project_name=f"adf-deploy-{id}", - run_order=1, - target=target, - map_params=map_params, - action_name=id, - ).config - else: - role_name = ( - map_params['default_providers']['build'] - .get('properties', {}) - .get('role', ADF_DEFAULT_BUILD_ROLE_NAME) - ) - role_arn = ( - f'arn:{stack.partition}:iam::{ADF_DEPLOYMENT_ACCOUNT_ID}:' - f'role/{role_name}' - ) - timeout = ( - map_params['default_providers']['build'] - .get('properties', {}) - .get('timeout', ADF_DEFAULT_BUILD_TIMEOUT) - ) - build_env = _codebuild.BuildEnvironment( - build_image=CodeBuild.determine_build_image( - codebuild_id=id, - scope=scope, - target=target, - map_params=map_params - ), - compute_type=getattr( - _codebuild.ComputeType, - ( - map_params['default_providers']['build'] - .get('properties', {}) - .get('size', "SMALL") - .upper() - ), - ), - environment_variables=CodeBuild.generate_build_env_variables( - _codebuild, - shared_modules_bucket, - deployment_map_source, - deployment_map_name, - map_params, - ), - privileged=( - map_params['default_providers']['build'] - .get('properties', {}) - .get('privileged', False) - ), - ) - build_spec = CodeBuild.determine_build_spec( - id, - map_params['default_providers']['build'].get('properties', {}) - ) - self.pipeline_project = _codebuild.PipelineProject( - self, - 'project', - environment=build_env, - encryption_key=_kms.Key.from_key_arn( - self, - 'DefaultDeploymentAccountKey', - key_arn=deployment_region_kms, - ), - description=f"ADF CodeBuild Project for {map_params['name']}", - project_name=f"adf-build-{map_params['name']}", - timeout=Duration.minutes(timeout), - build_spec=build_spec, - role=_iam.Role.from_role_arn( - self, - 'default_build_role', - role_arn=role_arn, - mutable=False, - ), - ) - self._setup_vpc(map_params['default_providers']['build']) - self.build = _codepipeline.CfnPipeline.StageDeclarationProperty( - name="Build", - actions=[ - Action( - name="Build", - provider="CodeBuild", - category="Build", - run_order=1, - map_params=map_params, - action_name="build" - ).config - ] - ) - - def _setup_vpc(self, default_provider, target=None): - default_props = default_provider.get('properties', {}) - # This will either be empty (build stage) or configured (deploy stage) - target_props = (target or {}).get('properties', {}) - vpc_id = target_props.get('vpc_id', default_props.get('vpc_id')) - subnet_ids = target_props.get( - 'subnet_ids', - default_props.get('subnet_ids', []), - ) - security_group_ids = target_props.get( - 'security_group_ids', - default_props.get('security_group_ids', []), - ) - if vpc_id: - if not subnet_ids: - raise ValueError( - "CodeBuild environment of " - f"{self.pipeline_project.project_name} has a " - f"VPC Id ({vpc_id}) set, but no subnets are configured. " - "When specifying the VPC Id for a given CodeBuild " - "environment, you also need to specify the subnet_ids " - "and optionally the security_group_ids that should be " - "used by the CodeBuild instance." - ) - if not security_group_ids: - default_security_group = _ec2.CfnSecurityGroup( - self, - 'sg', - group_description=( - f"The default security group for {self.node.id}" - ), - security_group_egress=[ - { - "cidrIp": "0.0.0.0/0", - "ipProtocol": "-1", - } - ], - vpc_id=vpc_id, - ) - security_group_ids = [ - default_security_group.get_att("GroupId"), - ] - self.pipeline_project.node.default_child.add_property_override( - "VpcConfig", - { - "VpcId": vpc_id, - "Subnets": subnet_ids, - "SecurityGroupIds": security_group_ids, - }, - ) - elif subnet_ids or security_group_ids: - raise ValueError( - "CodeBuild environment of " - f"{self.pipeline_project.project_name} requires a VPC Id when " - "configured to connect to specific subnets." - ) - - @staticmethod - def _determine_stage_build_spec( - codebuild_id, - props, - stage_name, - default_filename, - ): - filename = props.get('spec_filename') - spec_inline = props.get('spec_inline', {}) - if filename and spec_inline: - raise AssertionError( - "The spec_filename and spec_inline are both present " - f"inside the {stage_name} stage definition of {codebuild_id}. " - "Whereas only one of these two is allowed." - ) - - if spec_inline: - return _codebuild.BuildSpec.from_object(spec_inline) - - return _codebuild.BuildSpec.from_source_filename( - filename or default_filename, - ) - - @staticmethod - def determine_build_spec(codebuild_id, default_props, target=None): - if target: - target_props = target.get('properties', {}) - if ( - 'spec_inline' in target_props - or 'spec_filename' in target_props - ): - return CodeBuild._determine_stage_build_spec( - codebuild_id=codebuild_id, - props=target_props, - stage_name='deploy target', - default_filename=DEFAULT_DEPLOY_SPEC_FILENAME, - ) - stage_type = 'deploy' if target else 'build' - return CodeBuild._determine_stage_build_spec( - codebuild_id=codebuild_id, - props=default_props, - stage_name=f'default {stage_type}', - default_filename=( - DEFAULT_DEPLOY_SPEC_FILENAME - if target - else DEFAULT_BUILD_SPEC_FILENAME - ), - ) - - @staticmethod - def get_image_by_name(specific_image: str): - cdk_image_name = (specific_image or DEFAULT_CODEBUILD_IMAGE).upper() - if hasattr(_codebuild.LinuxBuildImage, cdk_image_name): - return getattr(_codebuild.LinuxBuildImage, cdk_image_name) - if specific_image.startswith('docker-hub://'): - specific_image = specific_image.split('docker-hub://')[-1] - return _codebuild.LinuxBuildImage.from_docker_registry( - specific_image, - ) - raise ValueError( - f"The CodeBuild image {specific_image} could not be found." - ) - - @staticmethod - def determine_build_image(codebuild_id, scope, target, map_params): - specific_image = None - if target: - specific_image = ( - target.get('properties', {}).get('image') - or ( - map_params['default_providers']['deploy'] - .get('properties', {}) - .get('image') - ) - ) - else: - specific_image = ( - map_params['default_providers']['build'] - .get('properties', {}) - .get('image') - ) - if isinstance(specific_image, dict): - repository_name = specific_image.get('repository_name', '') - repository_arn = specific_image.get('repository_arn', '') - if not repository_arn and not repository_name: - raise ValueError("The repository arn or name needs to be specified") - - if repository_arn and repository_name: - raise AssertionError("Specify the arn or the name of the repository, not both.") - - if repository_name: - repository_arn = ( - f"arn:{ADF_DEPLOYMENT_PARTITION}:ecr:{ADF_DEPLOYMENT_REGION}:" - f"{ADF_DEPLOYMENT_ACCOUNT_ID}:repository/{repository_name}" - ) - - ecr_repo = _ecr.Repository.from_repository_arn( - scope, - f'custom_repo_{codebuild_id}', - repository_arn, - ) - return _codebuild.LinuxBuildImage.from_ecr_repository( - ecr_repo, - specific_image.get('tag', 'latest'), - ) - - return CodeBuild.get_image_by_name(specific_image) - - @staticmethod - def generate_build_env_variables( - codebuild, - shared_modules_bucket, - deployment_map_source, - deployment_map_name, - map_params, - target=None, - ): - build_env_vars = { - "PYTHONPATH": "./adf-build/python", - "ADF_PROJECT_NAME": map_params['name'], - "ADF_DEPLOYMENT_MAP_SOURCE": deployment_map_source, - "ADF_DEPLOYMENT_MAP_NAME": deployment_map_name, - "S3_BUCKET_NAME": shared_modules_bucket, - "ACCOUNT_ID": Aws.ACCOUNT_ID, - **( - map_params - .get('default_providers', {}) - .get( - ( - 'deploy' - if target - else 'build' - ), - {}, - ) - .get('properties', {}) - .get('environment_variables', {}) - ), - **( - # Target should go second, as this overwrites any - # existing key/value, so it overrides the defaults - ( - target or {} - ) - .get('properties', {}) - .get('environment_variables', {}) - ) - } - - if target: - build_env_vars['TARGET_NAME'] = target['name'] - build_env_vars["TARGET_ACCOUNT_ID"] = target['id'] - deploy_role_name = ( - target - .get('properties', {}) - .get('role', ( - map_params['default_providers']['deploy'] - .get('properties', {}) - .get('role') - )) - ) - if deploy_role_name: - build_env_vars["DEPLOYMENT_ROLE"] = deploy_role_name - - return { - key: codebuild.BuildEnvironmentVariable(value=value) - for key, value in build_env_vars.items() - } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py index b7baff88e..43c758a86 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py @@ -36,7 +36,7 @@ def get_partition(region_name: str) -> str: if region_name.startswith('us-gov'): return 'aws-us-gov' - elif region_name.startswith("cn-north"): + if region_name.startswith("cn-north"): return "aws-cn" return 'aws' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 2b0e0ff16..faf29e667 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -39,7 +39,7 @@ def get_partition(region_name: str) -> str: if region_name.startswith('us-gov'): return 'aws-us-gov' - elif region_name.startswith("cn-north"): + if region_name.startswith("cn-north"): return "aws-cn" return 'aws' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py deleted file mode 100755 index 7dcbdb1fc..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts copy.py +++ /dev/null @@ -1,294 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright Amazon.com Inc. or its affiliates. -# SPDX-License-Identifier: MIT-0 - -""" -retrieve_organization_accounts.py - -AWS Account details of the member accounts in the organization are -required for services like AWS Security Hub and Amazon GuardDuty. - -This helper script will allow you to retrieve all the member account -details of the accounts in the organization as part of the CodeBuild -step. The member account details will be written to a JSON file on the -path as specified during execution. - -For example, to get the account details required for AWS Security Hub -to send the invites correctly, you would need to execute the script -with the arguments to fetch the details as shown in the last example. - -This will write the JSON file directly inside the code base of the -custom resource. Such that the lambda function can read and act based -on that data from inside a target account. Without requiring special -permissions to be added to target accounts to traverse the AWS -Organization. - -Usage: - retrieve_organization_accounts.py [-v | --verbose] [-h | --help] - [-r ] [-o ] [-s ] - [--session-ttl ] [-f ]... - -Options: - -f , --field - Add a specific field that is available in the organization - member account details. Available options include 'Id', 'Arn', - 'Email', 'Name', 'Status', 'JoinedMethod', 'JoinedTimestamp'. - You can specify multiple by adding them one after another. - All other details that would otherwise be returned by the - AWS Organizations: ListAccounts API call will be ignored - [default: Id Email Name]. - - -h, --help Show help info related to generic or command - execution. - - -o , --output-file - The output file path to use to output the retrieved - data to in JSON format. Define a file path or set to - to - output to stdout [default: -]. - - -r , --role-name - The name of the role to assume into to get read access - to list and describe the member accounts in the - organization [default: - adf/organizations/adf-organizations-readonly]. - - -s , --session-name - The session name to use when assuming into the billing account - role [default: retrieve_organization_accounts]. - - --session-ttl - The STS TTL in seconds [default: 900]. - - -v, --verbose - Show verbose logging information. - -Example: - retrieve_organization_accounts.py -v -o src/lambda/accounts.json - - retrieve_organization_accounts.py -v -f Id -f Email -o src/lambda/dat.json -""" - -import os -import sys -import logging -import json - -import boto3 -from botocore.exceptions import ClientError - -from docopt import docopt - - -# Configure logging -logging.basicConfig(level=logging.INFO) -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) -logging.basicConfig(level=logging.INFO) - - -def main(): - """ - AWS Account details of the member accounts in the organization are - required for services like AWS Security Hub and Amazon GuardDuty. - - This helper script will allow you to retrieve all the member account - details of the accounts in the organization as part of the CodeBuild - step. The member account details will be written to a JSON file on the - path as specified during execution. - - For example, to get the account details required for Security Hub to - send the invites correctly, you would need to execute the script with - the following arguments to fetch the details: - - ```bash - python adf-build/helpers/retrieve_organization_accounts.py -v \ - -o src/custom_resource/invite_members/member_accounts.json \ - -f Id \ - -f Email - ``` - - This will write the JSON file directly inside the code base of the - `invite_members` custom resource. Such that the lambda function can - read and act based on that data from inside a target account without - requiring special permissions to be added to target accounts to traverse - the AWS Organization. - - The two options defined using the `-f` argument, specify that we are - interested in the `Id` and the `Email` of the member accounts. - All other details that would otherwise be returned by the - [Organizations: ListAccounts]( - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/organizations.html#Organizations.Client.list_accounts) - API will be ignored. - - --- - - This main function will parse the arguments using docopt to determine what - options are relevant. See command options listed at the top of this file - or run the script with `--help` to get the list of options instead. - - Based on the input, it will traverse the accounts linked to the - AWS Organization and store the details as requested in a JSON file. - - Returns: - int: Exit code 0 when all went well. - """ - options = docopt(__doc__) - - # In case the user asked for verbose logging, increase - # the log level to debug. - if options["--verbose"] > 0: - logging.basicConfig(level=logging.DEBUG) - LOGGER.setLevel(logging.DEBUG) - - LOGGER.debug( - "Received options: %s", - options, - ) - - billing_account_id = _get_billing_account_id() - member_accounts = _get_member_accounts( - billing_account_id=billing_account_id, - options=options, - ) - _flush_out(accounts=member_accounts, options=options) - - return 0 - - -def _get_partition(region_name: str) -> str: - """Given the region, this function will return the appropriate partition. - - :param region_name: The name of the region (us-east-1, us-gov-west-1) - :return: Returns the partition name as a string. - """ - - if region_name.startswith("us-gov"): - return "aws-us-gov" - elif region_name.startswith("cn-north"): - return "aws-cn" - - return "aws" - - -def _get_billing_account_id(): - """ - Retrieve the Billing/Root AWS Account Id of the organization. - - Returns: - str: The AWS Account Id as a string. - """ - org_client = boto3.client("organizations") - response = org_client.describe_organization() - return response["Organization"]["MasterAccountId"] - - -def _get_member_accounts(billing_account_id, options): - """ - Retrieve the member accounts of the AWS Organization as requested. - - Args: - billing_account_id (str): The Billing/Root AWS Account Id of the - organization. - - options (dict): The options stored as a dictionary. These include all - argument options as passed when executing the script. - - Returns: - list(dict)): The list of account details as requested. - """ - assumed_credentials = _request_sts_credentials( - billing_account_id=billing_account_id, - options=options, - ) - billing_account_session = boto3.Session( - aws_access_key_id=assumed_credentials["AccessKeyId"], - aws_secret_access_key=assumed_credentials["SecretAccessKey"], - aws_session_token=assumed_credentials["SessionToken"], - ) - org_client = billing_account_session.client("organizations") - list_accounts_paginator = org_client.get_paginator("list_accounts") - accounts = [] - for page in list_accounts_paginator.paginate(): - accounts.extend(page["Accounts"]) - - # Remove any account that is not actively part of this organization yet. - only_active_accounts = filter(lambda a: a["Status"] == "ACTIVE", accounts) - - # Only return the key: value pairs that are defined in the --field option. - only_certain_fields_of_active = list( - map( - lambda a: {k: v for k, v in a.items() if k in options["--field"]}, - only_active_accounts, - ) - ) - return only_certain_fields_of_active - - -def _flush_out(accounts, options): - """ - Flush the account details to the specified output target. When the output - file option equals `-` it will output to the INFO logger. Otherwise, it - will write to the specified target file as requested. - - Args: - accounts (list(dict)): The account details to flush to the file/logs. - options (dict): The options which host where to write the account - details to among other flags. - """ - json_accounts = json.dumps(accounts, indent=2, default=str) - - if options["--output-file"] == "-": - LOGGER.info( - "Accounts JSON: %s", - json_accounts, - ) - return - - with open(options["--output-file"], mode="w", encoding="utf-8") as output_file: - output_file.write(json_accounts) - - -def _request_sts_credentials(billing_account_id, options): - """ - Request STS Credentials to get access to the billing account. - With the assumed role, this script will be able to traverse over the - member accounts in the AWS Organization. - - Args: - billing_account_id (str): The Billing/Root AWS Account Id of the - organization. - - options (dict): The options stored as a dictionary. These include all - argument options as passed when executing the script. - - Returns: - dict: The credentials stored in a dictionary. This will host the - `AccessKeyId`, `SecretAccessKey`, and `SessionToken` attributes - required to use the STS role. - """ - try: - - # Setup Session - session = boto3.session.Session() - region_name = session.region_name - partition = _get_partition(region_name) - sts_client = session.client("sts") - - role_name = options["--role-name"] - role_arn = f"arn:{partition}:iam::{billing_account_id}:role/{role_name}" - response = sts_client.assume_role( - RoleArn=role_arn, - RoleSessionName=options["--session-name"], - DurationSeconds=int(options["--session-ttl"]), - ) - return response["Credentials"] - except ClientError as client_error: - LOGGER.error("Failed to assume into role") - LOGGER.exception(client_error) - raise - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py index 7dcbdb1fc..dd8e1ff5c 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/retrieve_organization_accounts.py @@ -166,9 +166,8 @@ def _get_partition(region_name: str) -> str: if region_name.startswith("us-gov"): return "aws-us-gov" - elif region_name.startswith("cn-north"): + if region_name.startswith("cn-north"): return "aws-cn" - return "aws" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py index 9e7e55f60..6adfbdab6 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py @@ -219,12 +219,12 @@ def _handle_template_path( ): try: # Read the CloudFormation template from a file - with open(template_path, 'r') as template_file: + with open(template_path, 'r', encoding='utf-8') as template_file: template_body = template_file.read() return template_body except Exception as error: - LOGGER.error(f"Process _handle_template_path function error:\n {error}.") + LOGGER.error("Process _handle_template_path function error:\n %s", error) return None def _wait_if_in_progress(self): diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py index 5cdfba698..6aabbf5e3 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py @@ -12,9 +12,11 @@ from boto3.session import Session from botocore.exceptions import UnknownRegionError + class IncompatibleRegionError(Exception): """Raised in case the regions is not supported.""" - pass + + def get_partition(region_name: str) -> str: """Given the region, this function will return the appropriate partition. @@ -26,11 +28,10 @@ def get_partition(region_name: str) -> str: try: partition = Session().get_partition_for_region(region_name) except UnknownRegionError as e: - raise IncompatibleRegionError( - f'The region {region_name} is not supported.' - ) + raise IncompatibleRegionError(f"The region {region_name} is not supported.") from e return partition + def get_organization_api_region(region_name: str) -> str: """ Given the current region, it will determine the partition and use @@ -39,17 +40,17 @@ def get_organization_api_region(region_name: str) -> str: :param region_name: The name of the region (eu-west-1, us-gov-east-1 or cn-northwest-1) :return: Returns the AWS Organizations API region to use as a string. """ - if get_partition(region_name) == 'aws-us-gov': - return 'us-gov-west-1' - elif get_partition(region_name) == 'aws-cn': - return 'cn-northwest-1' - return 'us-east-1' + if get_partition(region_name) == "aws-us-gov": + return "us-gov-west-1" + if get_partition(region_name) == "aws-cn": + return "cn-northwest-1" + return "us-east-1" + def get_aws_domain(region_name: str) -> str: """ Get AWS domain suffix """ if region_name.startswith("cn-north"): - return "amazonaws.com.{0}".format(region_name.split("-")[0]) - else: - return "amazonaws.com" \ No newline at end of file + return f"amazonaws.com.{region_name.split('-')[0]}" + return "amazonaws.com" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py index bfae8f66c..5deb4d934 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py @@ -7,43 +7,34 @@ from partition import get_partition, IncompatibleRegionError -_us_commercial_regions = [ - 'us-east-1', - 'us-west-1', - 'us-west-2' -] - -_govcloud_regions = [ - 'us-gov-west-1', - 'us-gov-east-1' -] - -_china_region = [ - 'cn-north-1', - 'cn-northwest-1' -] - -_incompatible_regions = [ - 'cp-noexist-1' -] - -@pytest.mark.parametrize('region', _govcloud_regions) +_us_commercial_regions = ["us-east-1", "us-west-1", "us-west-2"] + +_govcloud_regions = ["us-gov-west-1", "us-gov-east-1"] + +_china_region = ["cn-north-1", "cn-northwest-1"] + +_incompatible_regions = ["cp-noexist-1"] + + +@pytest.mark.parametrize("region", _govcloud_regions) def test_partition_govcloud_regions(region): - assert get_partition(region) == 'aws-us-gov' + assert get_partition(region) == "aws-us-gov" -@pytest.mark.parametrize('region', _us_commercial_regions) +@pytest.mark.parametrize("region", _us_commercial_regions) def test_partition_us_commercial_regions(region): - assert get_partition(region) == 'aws' + assert get_partition(region) == "aws" + -@pytest.mark.parametrize('region', _china_region) +@pytest.mark.parametrize("region", _china_region) def test_partition_china_regions(region): - assert get_partition(region) == 'aws-cn' + assert get_partition(region) == "aws-cn" + -@pytest.mark.parametrize('region', _incompatible_regions) +@pytest.mark.parametrize("region", _incompatible_regions) def test_partition_unknown_regions(region): with pytest.raises(IncompatibleRegionError) as excinfo: get_partition(region) error_message = str(excinfo.value) - assert error_message.find(f"The region {region} is not supported") >= 0 \ No newline at end of file + assert error_message.find(f"The region {region} is not supported") >= 0 diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini b/src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini index 88cf3750c..e44547e53 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini @@ -5,4 +5,4 @@ env = ACCOUNT_ID="123456789012" testpaths = adf-build/tests adf-bootstrap/deployment/lambda_codebase/tests adf-build/shared/python/tests/ -norecursedirs = adf-bootstrap/deployment/lambda_codebase/initial_commit adf-bootstrap/deployment/lambda_codebase/determine_default_branch adf-build/shared +norecursedirs = adf-bootstrap/deployment/lambda_codebase/initial_commit adf-bootstrap/deployment/lambda_codebase/determine_default_branch adf-build/shared adf-build/china-support diff --git a/src/lambda_codebase/jump_role_manager/tests/test_main.py b/src/lambda_codebase/jump_role_manager/tests/test_main.py index 4a87f4169..0977c8bbd 100644 --- a/src/lambda_codebase/jump_role_manager/tests/test_main.py +++ b/src/lambda_codebase/jump_role_manager/tests/test_main.py @@ -12,6 +12,7 @@ from aws_xray_sdk import global_sdk_config from main import ( + AWS_PARTITION, ADF_JUMP_MANAGED_POLICY_ARN, ADF_TEST_BOOTSTRAP_ROLE_NAME, CROSS_ACCOUNT_ACCESS_ROLE_NAME, @@ -691,7 +692,7 @@ def test_generate_policy_document(get_mock): "Effect": "Allow", "Action": ["sts:AssumeRole"], "Resource": [ - f"arn:aws:iam::*:role/{CROSS_ACCOUNT_ACCESS_ROLE_NAME}", + f"arn:{AWS_PARTITION}:iam::*:role/{CROSS_ACCOUNT_ACCESS_ROLE_NAME}", ], "Condition": { "DateLessThan": { From 72f4e026153c2cb8a498de4b49abb4d8a6321625 Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Tue, 15 Apr 2025 18:17:49 +0800 Subject: [PATCH 3/3] Fix the china extra stack deletion issue. --- .../adf-build/china-support/create_s3_cn.py | 4 ++-- .../adf-build/shared/python/cloudformation.py | 10 +++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py index d6e0958cb..f2d2d23d6 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/china-support/create_s3_cn.py @@ -14,14 +14,14 @@ REGION_DEFAULT = os.environ["AWS_REGION"] MANAGEMENT_ACCOUNT_ID = os.environ["MANAGEMENT_ACCOUNT_ID"] LOGGER = configure_logger(__name__) - +ADF_GLOBAL_BOOTSTRAP_CHINA_BUCKET_STACK_NAME = "adf-regional-base-china-bucket" def _create_s3_bucket(bucket_name): try: LOGGER.info("Deploy S3 bucket %s...", bucket_name) extra_deploy_region = "cn-northwest-1" template_path = "adf-build/china-support/cn_northwest_bucket.yml" - stack_name = "adf-regional-base-china-bucket" + stack_name = ADF_GLOBAL_BOOTSTRAP_CHINA_BUCKET_STACK_NAME parameters = [ { "ParameterKey": "BucketName", diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py index 6adfbdab6..07990fc39 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py @@ -16,6 +16,8 @@ from errors import InvalidTemplateError, GenericAccountConfigureError from logger import configure_logger from paginator import paginator +from partition import get_partition + LOGGER = configure_logger(__name__) STACK_TERMINATION_PROTECTION = os.environ.get('TERMINATION_PROTECTION', False) @@ -29,7 +31,8 @@ CFN_UNACCEPTED_CHARS = re.compile(r"[^-a-zA-Z0-9]") ADF_GLOBAL_IAM_STACK_NAME = 'adf-global-base-iam' ADF_GLOBAL_BOOTSTRAP_STACK_NAME = 'adf-global-base-bootstrap' - +ADF_GLOBAL_BOOTSTRAP_CHINA_BUCKET_STACK_NAME = "adf-regional-base-china-bucket" +ADF_GLOBAL_BOOTSTRAP_CHINA_EXTRA_STACK_NAME = "adf-regional-base-china-extra" class StackProperties: clean_stack_status = [ @@ -105,6 +108,7 @@ def __init__( else None ) self.s3 = s3 + self.partition = get_partition(region) self.stack_name = stack_name or self._get_stack_name() def _get_geo_prefix(self): @@ -148,6 +152,10 @@ def _get_valid_stack_names(self): valid_stack_names.append(ADF_GLOBAL_IAM_STACK_NAME) valid_stack_names.append(ADF_GLOBAL_BOOTSTRAP_STACK_NAME) + if self.partition == "aws-cn": + valid_stack_names.append(ADF_GLOBAL_BOOTSTRAP_CHINA_BUCKET_STACK_NAME) + valid_stack_names.append(ADF_GLOBAL_BOOTSTRAP_CHINA_EXTRA_STACK_NAME) + return valid_stack_names