Skip to content

Commit 3a6e08b

Browse files
committed
feat: 1) Add default s3 bucket name, s3 object and account id of the bucket properties in S3 provder
2) Add s3 source pipeline trigger via s3 notification events function 3) Add cross account s3 event rule fowarder function
1 parent 5178240 commit 3a6e08b

File tree

11 files changed

+297
-84
lines changed

11 files changed

+297
-84
lines changed

docs/providers-guide.md

+39-5
Original file line numberDiff line numberDiff line change
@@ -125,15 +125,41 @@ Please add the required S3 read permissions to the `adf-codecomit-role` via the
125125
the `adf-codecommit-role` S3 read permissions in the bucket policy of the
126126
source bucket.
127127

128+
If the `poll_for_changes` proerty set to `False`, ADF will monitor the S3 events
129+
`Object Created` or `Object Copy` for the defined `object_key` of the defined
130+
`bucket_name` and trigger the related pipeline.
131+
132+
The source S3 bucket should enable `Bucket Versioning` and `Amazon EventBridge`,
133+
otherwise, the auto pipeline trigger will not work.
134+
135+
ADF supports source S3 bucket in a target account other than default Deployment account.
136+
To make it work, an event bridge policy should be manually added to the default event bus
137+
in the Deployment map. For example:
138+
139+
```
140+
{
141+
"Version": "2012-10-17",
142+
"Statement": [{
143+
"Sid": "allow_account_to_put_events",
144+
"Effect": "Allow",
145+
"Principal": {
146+
"AWS": "arn:aws:iam::<target-account-id>:root"
147+
},
148+
"Action": "events:PutEvents",
149+
"Resource": "arn:aws:events:eu-central-1:<deployment-account-id>:event-bus/default"
150+
}]
151+
}
152+
```
153+
128154
Provider type: `s3`.
129155

130156
#### Properties
131157

132-
- *account_id* - *(String)* **(required)**
133-
- The AWS Account ID where the source S3 Bucket is located.
134-
- *bucket_name* - *(String)* **(required)**
158+
- *account_id* - *(String)* **(optional)**
159+
- The AWS Account ID where the source S3 Bucket is located. By default, it is Deployment account ID.
160+
- *bucket_name* - *(String)*
135161
- The Name of the S3 Bucket that will be the source of the pipeline.
136-
- *object_key* - *(String)* **(required)**
162+
- *object_key* - *(String)*
137163
- The Specific Object within the bucket that will trigger the pipeline
138164
execution.
139165
- *trigger_on_changes* - *(Boolean)* default: `True`.
@@ -144,7 +170,15 @@ Provider type: `s3`.
144170
- **By default**, it will trigger on changes using the polling mechanism of
145171
CodePipeline. Monitoring the S3 object so it can trigger a release when an
146172
update took place.
147-
173+
- *poll_for_changes* - *(Boolean)* default: `True`.
174+
- If CodePipeline should poll the repository for changes, defaults to `False`
175+
in favor of Amazon EventBridge events. As the name implies, when polling
176+
for changes it will check the repository for updates every minute or so.
177+
This will show up as actions in CloudTrail.
178+
- **By default**, it will poll for changes, however, if set to `False`, it
179+
will use the event triggered by S3 notification when an update to the
180+
s3 object took place.
181+
148182
### CodeConnections
149183

150184
Use CodeConnections as a source to trigger your pipeline. The source action retrieves

src/lambda_codebase/initial_commit/adfconfig.yml.j2

+1
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ config:
3333
default-scm-branch: main
3434
# Optional:
3535
# default-scm-codecommit-account-id: "123456789012"
36+
# default-s3-source-bucket-name: "mys3deploymentbucket"
3637
deployment-maps:
3738
allow-empty-target: disabled
3839
# ^ Needs to be set to "enabled" to activate. Defaults to "disabled" when

src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py

+59-26
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@
2323
CLOUDWATCH = boto3.client("cloudwatch")
2424
METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/RULE")
2525

26-
_CACHE = None
26+
_CACHE_S3 = None
27+
_CACHE_CODECOMMIT = None
2728

2829

2930
def lambda_handler(event, _):
@@ -38,31 +39,52 @@ def lambda_handler(event, _):
3839
event (dict): The ADF Pipeline Management State Machine execution
3940
input object.
4041
"""
41-
4242
# pylint: disable=W0603
4343
# Global variable here to cache across lambda execution runtimes.
44-
global _CACHE
45-
if not _CACHE:
46-
_CACHE = Cache()
44+
global _CACHE_S3, _CACHE_CODECOMMIT
45+
46+
if not _CACHE_S3:
47+
_CACHE_S3 = Cache()
48+
METRICS.put_metric_data(
49+
{"MetricName": "S3CacheInitialized", "Value": 1, "Unit": "Count"}
50+
)
51+
52+
if not _CACHE_CODECOMMIT:
53+
_CACHE_CODECOMMIT = Cache()
4754
METRICS.put_metric_data(
48-
{"MetricName": "CacheInitialized", "Value": 1, "Unit": "Count"}
55+
{"MetricName": "CodeCommitCacheInitialized", "Value": 1, "Unit": "Count"}
4956
)
5057

5158
LOGGER.info(event)
5259

5360
pipeline = event['pipeline_definition']
5461

55-
source_provider = (
56-
pipeline.get("default_providers", {})
57-
.get("source", {})
58-
.get("provider", "codecommit")
59-
)
60-
source_account_id = (
61-
pipeline.get("default_providers", {})
62-
.get("source", {})
63-
.get("properties", {})
64-
.get("account_id")
65-
)
62+
default_source_provider = pipeline.get("default_providers", {}).get("source", {})
63+
source_provider = default_source_provider.get("provider", "codecommit")
64+
source_provider_properties = default_source_provider.get("properties", {})
65+
source_account_id = source_provider_properties.get("account_id")
66+
source_bucket_name = source_provider_properties.get("bucket_name")
67+
if source_provider == "s3":
68+
if not source_account_id:
69+
source_account_id = DEPLOYMENT_ACCOUNT_ID
70+
pipeline["default_providers"]["source"].setdefault("properties", {})["account_id"] = source_account_id
71+
if not source_bucket_name:
72+
try:
73+
parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
74+
default_s3_source_bucket_name = parameter_store.fetch_parameter(
75+
"/adf/scm/default-s3-source-bucket-name"
76+
)
77+
except ParameterNotFoundError:
78+
default_s3_source_bucket_name = os.environ["S3_BUCKET_NAME"]
79+
LOGGER.debug("default_s3_source_bucket_name not found in SSM - Fall back to s3_bucket_name.")
80+
pipeline["default_providers"]["source"].setdefault("properties", {})["bucket_name"] = default_s3_source_bucket_name
81+
source_bucket_name = default_s3_source_bucket_name
82+
event_params = {
83+
"SourceS3BucketName": source_bucket_name
84+
}
85+
else:
86+
event_params = {}
87+
6688

6789
# Resolve codecommit source_account_id in case it is not set
6890
if source_provider == "codecommit" and not source_account_id:
@@ -98,25 +120,36 @@ def lambda_handler(event, _):
98120
)
99121

100122
if (
101-
source_provider == "codecommit"
102-
and source_account_id
123+
source_account_id
103124
and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID)
104-
and not _CACHE.exists(source_account_id)
125+
and (
126+
(source_provider == "codecommit" and not _CACHE_CODECOMMIT.exists(source_account_id))
127+
or (source_provider == "s3" and not _CACHE_S3.exists(source_account_id))
128+
)
105129
):
106130
LOGGER.info(
107-
"Source is CodeCommit and the repository is hosted in the %s "
131+
"Source is %s and the repository/bucket is hosted in the %s "
108132
"account instead of the deployment account (%s). Creating or "
109133
"updating EventBridge forward rule to forward change events "
110134
"from the source account to the deployment account in "
111135
"EventBridge.",
136+
source_provider,
112137
source_account_id,
113138
DEPLOYMENT_ACCOUNT_ID,
114139
)
115-
rule = Rule(source_account_id)
140+
141+
rule = Rule(source_account_id, source_provider, event_params)
116142
rule.create_update()
117-
_CACHE.add(source_account_id, True)
118-
METRICS.put_metric_data(
119-
{"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"}
120-
)
143+
144+
if source_provider == "codecommit":
145+
_CACHE_CODECOMMIT.add(source_account_id, True)
146+
METRICS.put_metric_data(
147+
{"MetricName": "CodeCommitCreateOrUpdate", "Value": 1, "Unit": "Count"}
148+
)
149+
elif source_provider == "s3":
150+
_CACHE_S3.add(source_account_id, True)
151+
METRICS.put_metric_data(
152+
{"MetricName": "S3CreateOrUpdate", "Value": 1, "Unit": "Count"}
153+
)
121154

122155
return event

src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/global.yml

+6-5
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ Resources:
7070
- "codecommit:UploadArchive"
7171
- "codepipeline:StartPipelineExecution"
7272
- "events:PutEvents"
73+
- "s3:Get*"
7374
Resource: "*"
7475
- Effect: Allow
7576
Action:
@@ -372,7 +373,7 @@ Resources:
372373
- "iam:TagRole"
373374
- "iam:UntagRole"
374375
Resource:
375-
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
376+
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
376377
- Effect: Allow
377378
Sid: "IAMFullPathAndNameOnly"
378379
Action:
@@ -381,21 +382,21 @@ Resources:
381382
- "iam:GetRolePolicy"
382383
- "iam:PutRolePolicy"
383384
Resource:
384-
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
385-
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
385+
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
386+
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
386387
- Effect: Allow
387388
Sid: "IAMPassRole"
388389
Action:
389390
- "iam:PassRole"
390391
Resource:
391-
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
392+
- !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
392393
Condition:
393394
StringEquals:
394395
'iam:PassedToService':
395396
- "events.amazonaws.com"
396397
ArnEquals:
397398
'iam:AssociatedResourceArn':
398-
- !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
399+
- !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}"
399400
- Effect: Allow
400401
Sid: "KMS"
401402
Action:

src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py

+9
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,15 @@ def prepare_deployment_account(sts, deployment_account_id, config):
188188
.get('default-scm-codecommit-account-id', deployment_account_id)
189189
)
190190
)
191+
# TODO merge
192+
deployment_account_parameter_store.put_parameter(
193+
'scm/default-s3-source-bucket-name',
194+
(
195+
config.config
196+
.get('scm', {})
197+
.get('default-s3-source-bucket-name', S3_BUCKET_NAME)
198+
)
199+
)
191200
deployment_account_parameter_store.put_parameter(
192201
'deployment_maps/allow_empty_target',
193202
config.config.get('deployment-maps', {}).get(

src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py

+33-10
Original file line numberDiff line numberDiff line change
@@ -158,14 +158,18 @@ def _generate_configuration(self):
158158
.get('default_providers', {})
159159
.get('source', {})
160160
.get('properties', {})
161-
.get('object_key')
161+
.get('object_key', f"{self.map_params['name']}.zip")
162162
),
163163
"PollForSourceChanges": (
164-
self.map_params
165-
.get('default_providers', {})
166-
.get('source', {})
167-
.get('properties', {})
168-
.get('trigger_on_changes', True)
164+
(
165+
self.map_params['default_providers']['source']
166+
.get('properties', {})
167+
.get('trigger_on_changes', True)
168+
) and (
169+
self.map_params['default_providers']['source']
170+
.get('properties', {})
171+
.get('poll_for_changes', True)
172+
)
169173
),
170174
}
171175
if self.provider == "S3" and self.category == "Deploy":
@@ -202,7 +206,7 @@ def _generate_configuration(self):
202206
.get('default_providers', {})
203207
.get('deploy', {})
204208
.get('properties', {})
205-
.get('object_key')
209+
.get('object_key', f"{self.map_params['name']}.zip")
206210
))
207211
),
208212
"KMSEncryptionKeyARN": (
@@ -706,7 +710,12 @@ def __init__(
706710
'pipeline',
707711
**pipeline_args
708712
)
709-
adf_events.Events(self, 'events', {
713+
_provider = (map_params
714+
.get('default_providers', {})
715+
.get('source', {})
716+
.get('provider')
717+
)
718+
_event_params = {
710719
"pipeline": (
711720
f'arn:{ADF_DEPLOYMENT_PARTITION}:codepipeline:'
712721
f'{ADF_DEPLOYMENT_REGION}:{ADF_DEPLOYMENT_ACCOUNT_ID}:'
@@ -753,7 +762,7 @@ def __init__(
753762
.get('default_providers', {})
754763
.get('source', {})
755764
.get('properties', {})
756-
.get('poll_for_changes', False)
765+
.get('poll_for_changes', True if _provider == "s3" else False)
757766
),
758767
"trigger_on_changes": (
759768
map_params
@@ -763,7 +772,21 @@ def __init__(
763772
.get('trigger_on_changes', True)
764773
),
765774
}
766-
})
775+
}
776+
if _provider == "s3":
777+
_event_params["s3_bucket_name"] = (map_params
778+
.get('default_providers', {})
779+
.get('source', {})
780+
.get('properties', {})
781+
.get('bucket_name')
782+
)
783+
_event_params["s3_object_key"] = (map_params
784+
.get('default_providers', {})
785+
.get('source', {})
786+
.get('properties', {})
787+
.get('object_key', f"{map_params['name']}.zip")
788+
)
789+
adf_events.Events(self, 'events', _event_params)
767790

768791
@staticmethod
769792
def restructure_tags(current_tags):

0 commit comments

Comments
 (0)