diff --git a/packages/@aws-cdk/custom-resource-handlers/lib/aws-s3-deployment/bucket-deployment-handler/index.py b/packages/@aws-cdk/custom-resource-handlers/lib/aws-s3-deployment/bucket-deployment-handler/index.py index 8a7928a4c350d..533fe8c2072d9 100644 --- a/packages/@aws-cdk/custom-resource-handlers/lib/aws-s3-deployment/bucket-deployment-handler/index.py +++ b/packages/@aws-cdk/custom-resource-handlers/lib/aws-s3-deployment/bucket-deployment-handler/index.py @@ -53,6 +53,7 @@ def cfn_error(message=None): source_bucket_names = props['SourceBucketNames'] source_object_keys = props['SourceObjectKeys'] source_markers = props.get('SourceMarkers', None) + source_version_ids = props.get('SourceVersionIDs', []) dest_bucket_name = props['DestinationBucketName'] dest_bucket_prefix = props.get('DestinationBucketKeyPrefix', '') extract = props.get('Extract', 'true') == 'true' @@ -93,7 +94,14 @@ def cfn_error(message=None): if dest_bucket_prefix == "/": dest_bucket_prefix = "" - s3_source_zips = list(map(lambda name, key: "s3://%s/%s" % (name, key), source_bucket_names, source_object_keys)) + s3_source_zips = [ + { + "BucketName": bucket, + "ObjectKey": object_key, + "VersionID": source_version_ids[i] if i < len(source_version_ids) else "" + } + for i, (bucket, object_key) in enumerate(zip(source_bucket_names, source_object_keys)) + ] s3_dest = "s3://%s/%s" % (dest_bucket_name, dest_bucket_prefix) old_s3_dest = "s3://%s/%s" % (old_props.get("DestinationBucketName", ""), old_props.get("DestinationBucketKeyPrefix", "")) @@ -181,19 +189,26 @@ def s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune, ex try: # download the archive from the source and extract to "contents" for i in range(len(s3_source_zips)): - s3_source_zip = s3_source_zips[i] + s3_source_bucket = s3_source_zips[i]['BucketName'] + s3_source_key = s3_source_zips[i]['ObjectKey'] + s3_source_version_id = s3_source_zips[i]['VersionID'] markers = source_markers[i] - + + s3_command = ["s3api", "get-object", "--bucket", s3_source_bucket, "--key", s3_source_key] + if len(s3_source_version_id) > 0: + s3_command.extend(["--version-id", s3_source_version_id]) if extract: archive=os.path.join(workdir, str(uuid4())) logger.info("archive: %s" % archive) - aws_command("s3", "cp", s3_source_zip, archive) + s3_command.append(archive) + aws_command(*s3_command) logger.info("| extracting archive to: %s\n" % contents_dir) logger.info("| markers: %s" % markers) extract_and_replace_markers(archive, contents_dir, markers) else: logger.info("| copying archive to: %s\n" % contents_dir) - aws_command("s3", "cp", s3_source_zip, contents_dir) + s3_command.append(contents_dir) + aws_command(*s3_command) # sync from "contents" to destination diff --git a/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/aws b/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/aws index 027a8b802ed59..e5e00cd0e9503 100755 --- a/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/aws +++ b/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/aws @@ -4,8 +4,8 @@ # # the mock behaves as follows: # - argvs are going to be written to "aws.out" (one command in each line) -# - if "aws s3 cp" is invoked, the destination will be populated with a test zip file. -# - for "cp" and "sync", "aws.out" argv[4] is replaced by "archive.zip" and "contents.zip" +# - if "aws s3api get-object" is invoked, the destination will be populated with a test zip file. +# - for "get-object" and "sync", "aws.out" argv[-1] is replaced by "archive.zip" and "contents.zip" # becuase the actual value is a full path of a temporary directory # import sys @@ -15,14 +15,14 @@ import shutil scriptdir=os.path.dirname(os.path.realpath(__file__)) -# if "cp" is called to contents, copy a test zip file to contents -if sys.argv[2] == "cp" and sys.argv[4].endswith("/contents"): - shutil.copy(os.path.join(scriptdir, 'test.zip'), sys.argv[4]) - sys.argv[4] = "/tmp/contents" -# else if "cp" is called with a local destination, copy a test zip file to the destination -elif sys.argv[2] == "cp" and not sys.argv[4].startswith("s3://"): - shutil.copyfile(os.path.join(scriptdir, 'test.zip'), sys.argv[4]) - sys.argv[4] = "archive.zip" +# if "get-object" is called to contents, copy a test zip file to contents +if sys.argv[2] == "get-object" and sys.argv[-1].endswith("/contents"): + shutil.copy(os.path.join(scriptdir, 'test.zip'), sys.argv[-1]) + sys.argv[-1] = "/tmp/contents" +# else if "get-object" is called with a local destination, copy a test zip file to the destination +elif sys.argv[2] == "get-object" and not sys.argv[-1].startswith("s3://"): + shutil.copyfile(os.path.join(scriptdir, 'test.zip'), sys.argv[-1]) + sys.argv[-1] = "archive.zip" if sys.argv[2] == "sync": contentsIdx = 3 diff --git a/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/test.py b/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/test.py index 71b18c930b97b..712ade0062119 100644 --- a/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/test.py +++ b/packages/@aws-cdk/custom-resource-handlers/test/aws-s3-deployment/bucket-deployment-handler/test.py @@ -51,7 +51,7 @@ def test_create_update(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -64,7 +64,7 @@ def test_create_no_delete(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "contents.zip", "s3:///"] ) @@ -79,7 +79,7 @@ def test_update_no_delete(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "contents.zip", "s3:///"] ) @@ -92,7 +92,7 @@ def test_create_exclude(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "sample.json", "contents.zip", "s3:///"] ) @@ -107,7 +107,7 @@ def test_update_exclude(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "sample.json", "contents.zip", "s3:///"] ) @@ -120,7 +120,7 @@ def test_create_include(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--include", "/sample/*.json", "contents.zip", "s3:///"] ) @@ -135,7 +135,7 @@ def test_update_include(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--include", "/sample/*.json", "contents.zip", "s3:///"] ) @@ -149,7 +149,7 @@ def test_create_include_exclude(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "/sample/*", "--include", "/sample/*.json", "contents.zip", "s3:///"] ) @@ -165,7 +165,7 @@ def test_update_include_exclude(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "/sample/*", "--include", "/sample/*.json", "contents.zip", "s3:///"] ) @@ -178,7 +178,7 @@ def test_create_no_extract_file(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "/tmp/contents"], + ["s3api", "get-object","--bucket", "", "--key", "", "/tmp/contents"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -193,7 +193,7 @@ def test_update_no_extract_file(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "/tmp/contents"], + ["s3api", "get-object","--bucket", "", "--key", "", "/tmp/contents"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -207,7 +207,7 @@ def test_create_multiple_include_exclude(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "/sample/*", "--exclude", "/another/*", "--include", "/sample/*.json", "--include", "/another/*.json", "contents.zip", "s3:///"] ) @@ -223,7 +223,7 @@ def test_update_multiple_include_exclude(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "--exclude", "/sample/*", "--exclude", "/another/*", "--include", "/sample/*.json", "--include", "/another/*.json", "contents.zip", "s3:///"] ) @@ -237,8 +237,8 @@ def test_create_update_multiple_sources(self): # Note: these are different files in real-life. For testing purposes, we hijack # the command to output a static filename, archive.zip self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -251,7 +251,7 @@ def test_create_with_backslash_prefix_same_as_no_prefix(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -265,7 +265,7 @@ def test_create_update_with_dest_key(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -280,7 +280,7 @@ def test_create_update_with_metadata(self): }) self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///", "--content-type", "text/html", "--content-language", "en", "--metadata", "{\"best\":\"game\"}", "--metadata-directive", "REPLACE"] ) @@ -390,7 +390,7 @@ def test_update_same_dest(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -471,7 +471,7 @@ def test_update_new_dest_retain(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -489,7 +489,7 @@ def test_update_new_dest_no_retain(self): self.assertAwsCommands( ["s3", "rm", "s3:///", "--recursive"], - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -504,7 +504,7 @@ def test_update_new_dest_retain_implicit(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -522,7 +522,7 @@ def test_update_new_dest_prefix_no_retain(self): self.assertAwsCommands( ["s3", "rm", "s3:///", "--recursive"], - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -537,7 +537,7 @@ def test_update_new_dest_prefix_retain_implicit(self): }, physical_id="") self.assertAwsCommands( - ["s3", "cp", "s3:///", "archive.zip"], + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], ["s3", "sync", "--delete", "contents.zip", "s3:///"] ) @@ -684,6 +684,31 @@ def test_marker_substitution(self): with open(os.path.join(workdir, "subfolder", "boom.txt"), "r") as file: self.assertEqual(file.read().rstrip(), "Another value1-source2 file with _marker2_ hey!\nLine 2 with value1-source2 again :-)") + def test_create_update_with_version_id(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "SourceVersionIDs": [""], + "DestinationBucketName": "" + }) + + self.assertAwsCommands( + ["s3api", "get-object","--bucket", "", "--key", "", "--version-id", "", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_create_update_with_empty_version_id(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "SourceVersionIDs": [""], + "DestinationBucketName": "" + }) + + self.assertAwsCommands( + ["s3api", "get-object","--bucket", "", "--key", "", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) # asserts that a given list of "aws xxx" commands have been invoked (in order) def assertAwsCommands(self, *expected): diff --git a/packages/aws-cdk-lib/aws-s3-deployment/lib/bucket-deployment.ts b/packages/aws-cdk-lib/aws-s3-deployment/lib/bucket-deployment.ts index f53eb3e3379d1..dde90b7b88a8c 100644 --- a/packages/aws-cdk-lib/aws-s3-deployment/lib/bucket-deployment.ts +++ b/packages/aws-cdk-lib/aws-s3-deployment/lib/bucket-deployment.ts @@ -426,6 +426,7 @@ export class BucketDeployment extends Construct { }, [] as Array>); }, }, { omitEmptyArray: true }), + SourceVersionIds: cdk.Lazy.uncachedList({ produce: () => this.sources.map(source => source.versionId ?? '') }), DestinationBucketName: this.destinationBucket.bucketName, DestinationBucketKeyPrefix: props.destinationKeyPrefix, RetainOnDelete: props.retainOnDelete, @@ -926,5 +927,6 @@ function sourceConfigEqual(stack: cdk.Stack, a: SourceConfig, b: SourceConfig) { return ( JSON.stringify(stack.resolve(a.bucket.bucketName)) === JSON.stringify(stack.resolve(b.bucket.bucketName)) && a.zipObjectKey === b.zipObjectKey - && a.markers === undefined && b.markers === undefined); + && a.markers === undefined && b.markers === undefined + && a.versionId === undefined && b.versionId === undefined); } diff --git a/packages/aws-cdk-lib/aws-s3-deployment/lib/source.ts b/packages/aws-cdk-lib/aws-s3-deployment/lib/source.ts index c6815194aa843..f93142e95e202 100644 --- a/packages/aws-cdk-lib/aws-s3-deployment/lib/source.ts +++ b/packages/aws-cdk-lib/aws-s3-deployment/lib/source.ts @@ -26,6 +26,12 @@ export interface SourceConfig { * @default - no markers */ readonly markers?: Record; + + /** + * The identifier of the S3 version of the object in the source bucket. + * @default - none + */ + readonly versionId?: string; } /** diff --git a/packages/aws-cdk-lib/aws-s3-deployment/test/bucket-deployment.test.ts b/packages/aws-cdk-lib/aws-s3-deployment/test/bucket-deployment.test.ts index 6f6738117ed52..6aae29cd2c8bf 100644 --- a/packages/aws-cdk-lib/aws-s3-deployment/test/bucket-deployment.test.ts +++ b/packages/aws-cdk-lib/aws-s3-deployment/test/bucket-deployment.test.ts @@ -1374,6 +1374,7 @@ test('"SourceMarkers" is not included if none of the sources have markers', () = 'ServiceToken', 'SourceBucketNames', 'SourceObjectKeys', + 'SourceVersionIds', 'DestinationBucketName', 'Prune', 'OutputObjectKeys',