diff --git a/javascriptv3/example_code/cross-services/photo-asset-manager/tests/detect-labels-handler.unit.test.js b/javascriptv3/example_code/cross-services/photo-asset-manager/tests/detect-labels-handler.unit.test.js index 9afa3a3a1c3..298f61620ed 100644 --- a/javascriptv3/example_code/cross-services/photo-asset-manager/tests/detect-labels-handler.unit.test.js +++ b/javascriptv3/example_code/cross-services/photo-asset-manager/tests/detect-labels-handler.unit.test.js @@ -13,7 +13,7 @@ describe("detect-labels handler", () => { { s3: { bucket: { - name: "my-bucket", + name: "amzn-s3-demo-bucket", }, object: { key: "my_image.jpeg", diff --git a/javascriptv3/example_code/libs/tests/util-string.unit.test.js b/javascriptv3/example_code/libs/tests/util-string.unit.test.js index 5b2e2d8dd7c..3a297a7f8d9 100644 --- a/javascriptv3/example_code/libs/tests/util-string.unit.test.js +++ b/javascriptv3/example_code/libs/tests/util-string.unit.test.js @@ -16,6 +16,12 @@ describe("util-string", () => { const u2 = getUniqueName(value); expect(u1).not.toEqual(u2); }); + + it("should return undefined if a falsy value is passed in", () => { + expect(getUniqueName()).toBeUndefined(); + expect(getUniqueName("")).toBeUndefined(); + expect(getUniqueName(0)).toBeUndefined(); + }); }); describe("postfix", () => { diff --git a/javascriptv3/example_code/libs/utils/util-node.js b/javascriptv3/example_code/libs/utils/util-node.js index 2d1c2ce3eba..563fe17be89 100644 --- a/javascriptv3/example_code/libs/utils/util-node.js +++ b/javascriptv3/example_code/libs/utils/util-node.js @@ -1,7 +1,14 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { fileURLToPath } from "url"; export const getEnv = (/** @type {string} */ key) => process.env[key]; export const setEnv = (/** @type {string} */ key, value) => { process.env[key] = value; }; + +/** + * Check if the running file was run directly. + * @param {string | URL} fileUrl + */ +export const isMain = (fileUrl) => process.argv[1] === fileURLToPath(fileUrl); diff --git a/javascriptv3/example_code/libs/utils/util-string.js b/javascriptv3/example_code/libs/utils/util-string.js index c7b7bfd98a0..fc65146fa05 100644 --- a/javascriptv3/example_code/libs/utils/util-string.js +++ b/javascriptv3/example_code/libs/utils/util-string.js @@ -5,7 +5,13 @@ import { v4 as uuidv4 } from "uuid"; /** * @param {string} name */ -export const getUniqueName = (name) => `${uuidv4()}-${name.toLowerCase()}`; +export const getUniqueName = (name) => { + if (!name) { + return; + } + + return `${name.toLowerCase()}-${uuidv4()}`; +}; /** * @param {int} length diff --git a/javascriptv3/example_code/s3/README.md b/javascriptv3/example_code/s3/README.md index f0e424eecfd..dcf64a44b10 100644 --- a/javascriptv3/example_code/s3/README.md +++ b/javascriptv3/example_code/s3/README.md @@ -45,28 +45,28 @@ Code examples that show you how to perform the essential operations within a ser Code excerpts that show you how to call individual service functions. -- [CopyObject](actions/copy-object.js#L6) -- [CreateBucket](actions/create-bucket.js#L6) +- [CopyObject](actions/copy-object.js#L4) +- [CreateBucket](actions/create-bucket.js#L4) - [DeleteBucket](actions/delete-bucket.js#L6) -- [DeleteBucketPolicy](actions/delete-bucket-policy.js#L6) -- [DeleteBucketWebsite](actions/delete-bucket-website.js#L6) -- [DeleteObject](actions/delete-object.js#L6) -- [DeleteObjects](actions/delete-objects.js#L6) -- [GetBucketAcl](actions/get-bucket-acl.js#L6) -- [GetBucketCors](actions/get-bucket-cors.js#L6) -- [GetBucketPolicy](actions/get-bucket-policy.js#L6) -- [GetBucketWebsite](actions/get-bucket-website.js#L6) -- [GetObject](actions/get-object.js#L6) +- [DeleteBucketPolicy](actions/delete-bucket-policy.js#L4) +- [DeleteBucketWebsite](actions/delete-bucket-website.js#L4) +- [DeleteObject](actions/delete-object.js#L4) +- [DeleteObjects](actions/delete-objects.js#L4) +- [GetBucketAcl](actions/get-bucket-acl.js#L4) +- [GetBucketCors](actions/get-bucket-cors.js#L4) +- [GetBucketPolicy](actions/get-bucket-policy.js#L4) +- [GetBucketWebsite](actions/get-bucket-website.js#L4) +- [GetObject](actions/get-object.js#L4) - [GetObjectLegalHold](actions/get-object-legal-hold.js) - [GetObjectLockConfiguration](actions/get-object-lock-configuration.js) - [GetObjectRetention](actions/get-object-retention.js) - [ListBuckets](actions/list-buckets.js#L6) -- [ListObjectsV2](actions/list-objects.js#L6) -- [PutBucketAcl](actions/put-bucket-acl.js#L6) -- [PutBucketCors](actions/put-bucket-cors.js#L6) -- [PutBucketPolicy](actions/put-bucket-policy.js#L6) -- [PutBucketWebsite](actions/put-bucket-website.js#L6) -- [PutObject](actions/put-object.js#L6) +- [ListObjectsV2](actions/list-objects.js#L4) +- [PutBucketAcl](actions/put-bucket-acl.js#L4) +- [PutBucketCors](actions/put-bucket-cors.js#L4) +- [PutBucketPolicy](actions/put-bucket-policy.js#L4) +- [PutBucketWebsite](actions/put-bucket-website.js#L4) +- [PutObject](actions/put-object.js#L4) - [PutObjectLegalHold](actions/put-object-legal-hold.js) - [PutObjectLockConfiguration](actions/put-object-lock-configuration.js) - [PutObjectRetention](actions/put-object-retention.js) diff --git a/javascriptv3/example_code/s3/actions/copy-object.js b/javascriptv3/example_code/s3/actions/copy-object.js index c986c194b4e..8d85703c075 100644 --- a/javascriptv3/example_code/s3/actions/copy-object.js +++ b/javascriptv3/example_code/s3/actions/copy-object.js @@ -1,33 +1,81 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.copyObjectV3] -import { S3Client, CopyObjectCommand } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + S3Client, + CopyObjectCommand, + ObjectNotInActiveTierError, + waitUntilObjectExists, +} from "@aws-sdk/client-s3"; /** - * Copy an Amazon S3 object from one bucket to another. + * Copy an S3 object from one bucket to another. + * + * @param {{ + * sourceBucket: string, + * sourceKey: string, + * destinationBucket: string, + * destinationKey: string }} config */ -export const main = async () => { - const command = new CopyObjectCommand({ - CopySource: "SOURCE_BUCKET/SOURCE_OBJECT_KEY", - Bucket: "DESTINATION_BUCKET", - Key: "NEW_OBJECT_KEY", - }); +export const main = async ({ + sourceBucket, + sourceKey, + destinationBucket, + destinationKey, +}) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send( + new CopyObjectCommand({ + CopySource: `${sourceBucket}/${sourceKey}`, + Bucket: destinationBucket, + Key: destinationKey, + }), + ); + await waitUntilObjectExists( + { client }, + { Bucket: destinationBucket, Key: destinationKey }, + ); + console.log( + `Successfully copied ${sourceBucket}/${sourceKey} to ${destinationBucket}/${destinationKey}`, + ); + } catch (caught) { + if (caught instanceof ObjectNotInActiveTierError) { + console.error( + `Could not copy ${sourceKey} from ${sourceBucket}. Object is not in the active tier.`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.copyObjectV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + sourceBucket: { + type: "string", + default: "source-bucket", + }, + sourceKey: { + type: "string", + default: "todo.txt", + }, + destinationBucket: { + type: "string", + default: "destination-bucket", + }, + destinationKey: { + type: "string", + default: "todo.txt", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/create-bucket.js b/javascriptv3/example_code/s3/actions/create-bucket.js index 9edcd57c65a..df4a87b431c 100644 --- a/javascriptv3/example_code/s3/actions/create-bucket.js +++ b/javascriptv3/example_code/s3/actions/create-bucket.js @@ -1,30 +1,64 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.createBucketV3] -import { CreateBucketCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + BucketAlreadyExists, + BucketAlreadyOwnedByYou, + CreateBucketCommand, + S3Client, + waitUntilBucketExists, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new CreateBucketCommand({ - // The name of the bucket. Bucket names are unique and have several other constraints. - // See https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html - Bucket: "bucket-name", - }); +/** + * Create an Amazon S3 bucket. + * @param {{ bucketName: string }} config + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const { Location } = await client.send(command); + const { Location } = await client.send( + new CreateBucketCommand({ + // The name of the bucket. Bucket names are unique and have several other constraints. + // See https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html + Bucket: bucketName, + }), + ); + await waitUntilBucketExists({ client }, { Bucket: bucketName }); console.log(`Bucket created with location ${Location}`); - } catch (err) { - console.error(err); + } catch (caught) { + if (caught instanceof BucketAlreadyExists) { + console.error( + `The bucket "${bucketName}" already exists in another AWS account. Bucket names must be globally unique.`, + ); + } + // WARNING: If you try to create a bucket in the North Virginia region, + // and you already own a bucket in that region with the same name, this + // error will not be thrown. Instead, the call will return successfully + // and the ACL on that bucket will be reset. + else if (caught instanceof BucketAlreadyOwnedByYou) { + console.error( + `The bucket "${bucketName}" already exists in this AWS account.`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.createBucketV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "bucket-name", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/delete-bucket-policy.js b/javascriptv3/example_code/s3/actions/delete-bucket-policy.js index 7960834a505..f945ec53071 100644 --- a/javascriptv3/example_code/s3/actions/delete-bucket-policy.js +++ b/javascriptv3/example_code/s3/actions/delete-bucket-policy.js @@ -1,29 +1,57 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.policy.deleteBucketPolicyV3] -import { DeleteBucketPolicyCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + DeleteBucketPolicyCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -// This will remove the policy from the bucket. -export const main = async () => { - const command = new DeleteBucketPolicyCommand({ - Bucket: "test-bucket", - }); +/** + * Remove the policy from an Amazon S3 bucket. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send( + new DeleteBucketPolicyCommand({ + Bucket: bucketName, + }), + ); + console.log(`Bucket policy deleted from "${bucketName}".`); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while deleting policy from ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while deleting policy from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.policy.deleteBucketPolicyV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/delete-bucket-website.js b/javascriptv3/example_code/s3/actions/delete-bucket-website.js index 7b9851bb5e5..5cb5774048a 100644 --- a/javascriptv3/example_code/s3/actions/delete-bucket-website.js +++ b/javascriptv3/example_code/s3/actions/delete-bucket-website.js @@ -1,29 +1,61 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.website.deleteBucketWebsiteV3] -import { DeleteBucketWebsiteCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + DeleteBucketWebsiteCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -// Disable static website hosting on the bucket. -export const main = async () => { - const command = new DeleteBucketWebsiteCommand({ - Bucket: "test-bucket", - }); +/** + * Remove the website configuration for a bucket. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send( + new DeleteBucketWebsiteCommand({ + Bucket: bucketName, + }), + ); + // The response code will be successful for both removed configurations and + // configurations that did not exist in the first place. + console.log( + `The bucket "${bucketName}" is not longer configured as a website, or it never was.`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while removing website configuration from ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while removing website configuration from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.website.deleteBucketWebsiteV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/delete-object.js b/javascriptv3/example_code/s3/actions/delete-object.js index 2b7416bc815..750014bee01 100644 --- a/javascriptv3/example_code/s3/actions/delete-object.js +++ b/javascriptv3/example_code/s3/actions/delete-object.js @@ -1,29 +1,71 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.deleteobjectV3] -import { DeleteObjectCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + DeleteObjectCommand, + S3Client, + S3ServiceException, + waitUntilObjectNotExists, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new DeleteObjectCommand({ - Bucket: "test-bucket", - Key: "test-key.txt", - }); +/** + * Delete one object from an Amazon S3 bucket. + * @param {{ bucketName: string, key: string }} + */ +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send( + new DeleteObjectCommand({ + Bucket: bucketName, + Key: key, + }), + ); + await waitUntilObjectNotExists( + { client }, + { Bucket: bucketName, Key: key }, + ); + // A successful delete, or a delete for a non-existent object, both return + // a 204 response code. + console.log( + `The object "${key}" from bucket "${bucketName}" was deleted, or it didn't exist.`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while deleting object from ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while deleting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.deleteobjectV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + key: { + type: "string", + default: "todo.txt", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/delete-objects.js b/javascriptv3/example_code/s3/actions/delete-objects.js index 6e977621ed6..c64c2c0562f 100644 --- a/javascriptv3/example_code/s3/actions/delete-objects.js +++ b/javascriptv3/example_code/s3/actions/delete-objects.js @@ -1,34 +1,78 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.deletemultipleobjectsV3] -import { DeleteObjectsCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + DeleteObjectsCommand, + S3Client, + S3ServiceException, + waitUntilObjectNotExists, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new DeleteObjectsCommand({ - Bucket: "test-bucket", - Delete: { - Objects: [{ Key: "object1.txt" }, { Key: "object2.txt" }], - }, - }); +/** + * Delete multiple objects from an S3 bucket. + * @param {{ bucketName: string, keys: string[] }} + */ +export const main = async ({ bucketName, keys }) => { + const client = new S3Client({}); try { - const { Deleted } = await client.send(command); + const { Deleted } = await client.send( + new DeleteObjectsCommand({ + Bucket: bucketName, + Delete: { + Objects: keys.map((k) => ({ Key: k })), + }, + }), + ); + for (const key in keys) { + await waitUntilObjectNotExists( + { client }, + { Bucket: bucketName, Key: key }, + ); + } console.log( `Successfully deleted ${Deleted.length} objects from S3 bucket. Deleted objects:`, ); console.log(Deleted.map((d) => ` • ${d.Key}`).join("\n")); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while deleting objects from ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while deleting objects from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.deletemultipleobjectsV3] -// Invoke main function if this file was run directly. +/** + Call function if run directly. + + Example usage: + node delete-objects.js --bucketName amzn-s3-demo-bucket obj1.txt obj2.txt + */ +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values, positionals } = parseArgs({ + options, + allowPositionals: true, + }); + main({ ...values, keys: positionals }); } diff --git a/javascriptv3/example_code/s3/actions/get-bucket-acl.js b/javascriptv3/example_code/s3/actions/get-bucket-acl.js index b00e9b5e665..c080b4f8968 100644 --- a/javascriptv3/example_code/s3/actions/get-bucket-acl.js +++ b/javascriptv3/example_code/s3/actions/get-bucket-acl.js @@ -1,28 +1,58 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.perms.getBucketAclV3] -import { GetBucketAclCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + GetBucketAclCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new GetBucketAclCommand({ - Bucket: "test-bucket", - }); +/** + * Retrieves the Access Control List (ACL) for an S3 bucket. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + const response = await client.send( + new GetBucketAclCommand({ + Bucket: bucketName, + }), + ); + console.log(`ACL for bucket "${bucketName}":`); + console.log(JSON.stringify(response, null, 2)); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while getting ACL for ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting ACL for ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.perms.getBucketAclV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { parseArgs } from "util"; +import { fileURLToPath } from "url"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-bucket-cors.js b/javascriptv3/example_code/s3/actions/get-bucket-cors.js index 4caa8bccf74..3e9f7fe3bdd 100644 --- a/javascriptv3/example_code/s3/actions/get-bucket-cors.js +++ b/javascriptv3/example_code/s3/actions/get-bucket-cors.js @@ -1,38 +1,68 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.cors.getBucketCorsV3] -import { GetBucketCorsCommand, S3Client } from "@aws-sdk/client-s3"; +import { + GetBucketCorsCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -const client = new S3Client({}); - -export const main = async () => { +/** + * Log the Cross-Origin Resource Sharing (CORS) configuration information + * set for the bucket. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); const command = new GetBucketCorsCommand({ - Bucket: "test-bucket", + Bucket: bucketName, }); try { const { CORSRules } = await client.send(command); + console.log(JSON.stringify(CORSRules)); CORSRules.forEach((cr, i) => { console.log( `\nCORSRule ${i + 1}`, `\n${"-".repeat(10)}`, - `\nAllowedHeaders: ${cr.AllowedHeaders.join(" ")}`, - `\nAllowedMethods: ${cr.AllowedMethods.join(" ")}`, - `\nAllowedOrigins: ${cr.AllowedOrigins.join(" ")}`, - `\nExposeHeaders: ${cr.ExposeHeaders.join(" ")}`, + `\nAllowedHeaders: ${cr.AllowedHeaders}`, + `\nAllowedMethods: ${cr.AllowedMethods}`, + `\nAllowedOrigins: ${cr.AllowedOrigins}`, + `\nExposeHeaders: ${cr.ExposeHeaders}`, `\nMaxAgeSeconds: ${cr.MaxAgeSeconds}`, ); }); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while getting bucket CORS rules for ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting bucket CORS rules for ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.cors.getBucketCorsV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-bucket-policy.js b/javascriptv3/example_code/s3/actions/get-bucket-policy.js index 07aec0deab4..1972c6c0c0e 100644 --- a/javascriptv3/example_code/s3/actions/get-bucket-policy.js +++ b/javascriptv3/example_code/s3/actions/get-bucket-policy.js @@ -1,28 +1,57 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.policy.getBucketPolicyV3] -import { GetBucketPolicyCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + GetBucketPolicyCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new GetBucketPolicyCommand({ - Bucket: "test-bucket", - }); +/** + * Logs the policy for a specified bucket. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const { Policy } = await client.send(command); - console.log(JSON.parse(Policy)); - } catch (err) { - console.error(err); + const { Policy } = await client.send( + new GetBucketPolicyCommand({ + Bucket: bucketName, + }), + ); + console.log(`Policy for "${bucketName}":\n${Policy}`); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while getting policy from ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting policy from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.policy.getBucketPolicyV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-bucket-website.js b/javascriptv3/example_code/s3/actions/get-bucket-website.js index 562c6ad7706..fcadc64cd5c 100644 --- a/javascriptv3/example_code/s3/actions/get-bucket-website.js +++ b/javascriptv3/example_code/s3/actions/get-bucket-website.js @@ -1,31 +1,59 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.website.getBucketWebsiteV3] -import { GetBucketWebsiteCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + GetBucketWebsiteCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new GetBucketWebsiteCommand({ - Bucket: "test-bucket", - }); +/** + * Log the website configuration for a bucket. + * @param {{ bucketName }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const { ErrorDocument, IndexDocument } = await client.send(command); + const response = await client.send( + new GetBucketWebsiteCommand({ + Bucket: bucketName, + }), + ); console.log( - `Your bucket is set up to host a website. It has an error document:`, - `${ErrorDocument.Key}, and an index document: ${IndexDocument.Suffix}.`, + `Your bucket is set up to host a website with the following configuration:\n${JSON.stringify(response, null, 2)}`, ); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchWebsiteConfiguration" + ) { + console.error( + `Error from S3 while getting website configuration for ${bucketName}. The bucket isn't configured as a website.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting website configuration for ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.website.getBucketWebsiteV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-object-legal-hold.js b/javascriptv3/example_code/s3/actions/get-object-legal-hold.js index ac3f077a5a0..6d68cad5fe1 100644 --- a/javascriptv3/example_code/s3/actions/get-object-legal-hold.js +++ b/javascriptv3/example_code/s3/actions/get-object-legal-hold.js @@ -1,33 +1,63 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; -import { GetObjectLegalHoldCommand, S3Client } from "@aws-sdk/client-s3"; +import { + GetObjectLegalHoldCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; /** - * @param {S3Client} client - * @param {string} bucketName - * @param {string} objectKey + * Get an object's current legal hold status. + * @param {{ bucketName: string, key: string }} */ -export const main = async (client, bucketName, objectKey) => { - const command = new GetObjectLegalHoldCommand({ - Bucket: bucketName, - Key: objectKey, - // Optionally, you can provide additional parameters - // ExpectedBucketOwner: "ACCOUNT_ID", - // RequestPayer: "requester", - // VersionId: "OBJECT_VERSION_ID", - }); +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); try { - const response = await client.send(command); + const response = await client.send( + new GetObjectLegalHoldCommand({ + Bucket: bucketName, + Key: key, + // Optionally, you can provide additional parameters + // ExpectedBucketOwner: "", + // VersionId: "", + }), + ); console.log(`Legal Hold Status: ${response.LegalHold.Status}`); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while getting legal hold status for ${key} in ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting legal hold status for ${key} in ${bucketName} from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(new S3Client(), "amzn-s3-demo-bucket", "OBJECT_KEY"); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + key: { + type: "string", + default: "foo.txt", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-object-lock-configuration.js b/javascriptv3/example_code/s3/actions/get-object-lock-configuration.js index eac8a006efc..21df9a04df7 100644 --- a/javascriptv3/example_code/s3/actions/get-object-lock-configuration.js +++ b/javascriptv3/example_code/s3/actions/get-object-lock-configuration.js @@ -1,31 +1,58 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; import { GetObjectLockConfigurationCommand, S3Client, + S3ServiceException, } from "@aws-sdk/client-s3"; /** - * @param {S3Client} client - * @param {string} bucketName + * Gets the Object Lock configuration for a bucket. + * @param {{ bucketName: string }} */ -export const main = async (client, bucketName) => { - const command = new GetObjectLockConfigurationCommand({ - Bucket: bucketName, - // Optionally, you can provide additional parameters - // ExpectedBucketOwner: "ACCOUNT_ID", - }); +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const { ObjectLockConfiguration } = await client.send(command); - console.log(`Object Lock Configuration: ${ObjectLockConfiguration}`); - } catch (err) { - console.error(err); + const { ObjectLockConfiguration } = await client.send( + new GetObjectLockConfigurationCommand({ + Bucket: bucketName, + // Optionally, you can provide additional parameters + // ExpectedBucketOwner: "", + }), + ); + console.log( + `Object Lock Configuration:\n${JSON.stringify(ObjectLockConfiguration)}`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while getting object lock configuration for ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object lock configuration for ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(new S3Client(), "BUCKET_NAME"); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-object-retention.js b/javascriptv3/example_code/s3/actions/get-object-retention.js index 980ad5b26df..d3016b3d24b 100644 --- a/javascriptv3/example_code/s3/actions/get-object-retention.js +++ b/javascriptv3/example_code/s3/actions/get-object-retention.js @@ -1,32 +1,62 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; -import { GetObjectRetentionCommand, S3Client } from "@aws-sdk/client-s3"; + +import { + GetObjectRetentionCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; /** - * @param {S3Client} client - * @param {string} bucketName - * @param {string} objectKey + * Log the "RetainUntilDate" for an object in an S3 bucket. + * @param {{ bucketName: string, key: string }} */ -export const main = async (client, bucketName, objectKey) => { - const command = new GetObjectRetentionCommand({ - Bucket: bucketName, - Key: objectKey, - // Optionally, you can provide additional parameters - // ExpectedBucketOwner: "ACCOUNT_ID", - // RequestPayer: "requester", - // VersionId: "OBJECT_VERSION_ID", - }); +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); try { - const { Retention } = await client.send(command); - console.log(`Object Retention Settings: ${Retention.Status}`); - } catch (err) { - console.error(err); + const { Retention } = await client.send( + new GetObjectRetentionCommand({ + Bucket: bucketName, + Key: key, + }), + ); + console.log( + `${key} in ${bucketName} will be retained until ${Retention.RetainUntilDate}`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchObjectLockConfiguration" + ) { + console.warn( + `The object "${key}" in the bucket "${bucketName}" does not have an ObjectLock configuration.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object retention settings for "${bucketName}". ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(new S3Client(), "BUCKET_NAME", "OBJECT_KEY"); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + key: { + type: "string", + default: "foo.txt", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/get-object.js b/javascriptv3/example_code/s3/actions/get-object.js index 58dcd8041d5..65505133d2e 100644 --- a/javascriptv3/example_code/s3/actions/get-object.js +++ b/javascriptv3/example_code/s3/actions/get-object.js @@ -1,31 +1,62 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.getobjectV3] -import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + GetObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { - const command = new GetObjectCommand({ - Bucket: "test-bucket", - Key: "hello-s3.txt", - }); +/** + * Get a single object from a specified S3 bucket. + * @param {{ bucketName: string, key: string }} + */ +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); try { - const response = await client.send(command); + const response = await client.send( + new GetObjectCommand({ + Bucket: bucketName, + Key: key, + }), + ); // The Body object also has 'transformToByteArray' and 'transformToWebStream' methods. const str = await response.Body.transformToString(); console.log(str); - } catch (err) { - console.error(err); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.getobjectV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + key: { + type: "string", + default: "foo.txt", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/list-buckets.js b/javascriptv3/example_code/s3/actions/list-buckets.js index 42180459f95..45e70178af4 100644 --- a/javascriptv3/example_code/s3/actions/list-buckets.js +++ b/javascriptv3/example_code/s3/actions/list-buckets.js @@ -4,23 +4,48 @@ import { fileURLToPath } from "url"; // snippet-start:[s3.JavaScript.buckets.listBucketsV3] -import { ListBucketsCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + paginateListBuckets, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; +/** + * List the Amazon S3 buckets in your account. + */ export const main = async () => { - const command = new ListBucketsCommand({}); + const client = new S3Client({}); + /** @type {?import('@aws-sdk/client-s3').Owner} */ + let Owner = null; + + /** @type {import('@aws-sdk/client-s3').Bucket[]} */ + const Buckets = []; try { - const { Owner, Buckets } = await client.send(command); + const paginator = paginateListBuckets({ client }, {}); + + for await (const page of paginator) { + if (!Owner) { + Owner = page.Owner; + } + + Buckets.push(...page.Buckets); + } + console.log( `${Owner.DisplayName} owns ${Buckets.length} bucket${ Buckets.length === 1 ? "" : "s" }:`, ); console.log(`${Buckets.map((b) => ` • ${b.Name}`).join("\n")}`); - } catch (err) { - console.error(err); + } catch (caught) { + if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while listing buckets. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.listBucketsV3] diff --git a/javascriptv3/example_code/s3/actions/list-objects.js b/javascriptv3/example_code/s3/actions/list-objects.js index af0f56931a0..ca2503a015b 100644 --- a/javascriptv3/example_code/s3/actions/list-objects.js +++ b/javascriptv3/example_code/s3/actions/list-objects.js @@ -1,49 +1,70 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.listObjectsV3] -// snippet-start:[s3.JavaScript.buckets.listManyObjectsV3] import { S3Client, + S3ServiceException, // This command supersedes the ListObjectsCommand and is the recommended way to list objects. - ListObjectsV2Command, + paginateListObjectsV2, } from "@aws-sdk/client-s3"; -const client = new S3Client({}); - -export const main = async () => { - const command = new ListObjectsV2Command({ - Bucket: "my-bucket", - // The default and maximum number of keys returned is 1000. This limits it to - // one for demonstration purposes. - MaxKeys: 1, - }); - +/** + * Log all of the object keys in a bucket. + * @param {{ bucketName: string, pageSize: string }} + */ +export const main = async ({ bucketName, pageSize }) => { + const client = new S3Client({}); + /** @type {string[][]} */ + const objects = []; try { - let isTruncated = true; + const paginator = paginateListObjectsV2( + { client, /* Max items per page */ pageSize: parseInt(pageSize) }, + { Bucket: bucketName }, + ); - console.log("Your bucket contains the following objects:\n"); - let contents = ""; - - while (isTruncated) { - const { Contents, IsTruncated, NextContinuationToken } = - await client.send(command); - const contentsList = Contents.map((c) => ` • ${c.Key}`).join("\n"); - contents += contentsList + "\n"; - isTruncated = IsTruncated; - command.input.ContinuationToken = NextContinuationToken; + for await (const page of paginator) { + objects.push(page.Contents.map((o) => o.Key)); + } + objects.forEach((objectList, pageNum) => { + console.log( + `Page ${pageNum + 1}\n------\n${objectList.map((o) => `• ${o}`).join("\n")}\n`, + ); + }); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while listing objects for "${bucketName}". The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while listing objects for "${bucketName}". ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; } - console.log(contents); - } catch (err) { - console.error(err); } }; // snippet-end:[s3.JavaScript.buckets.listObjectsV3] -// snippet-end:[s3.JavaScript.buckets.listManyObjectsV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + pageSize: { + type: "string", + default: "1", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-bucket-acl.js b/javascriptv3/example_code/s3/actions/put-bucket-acl.js index 1a3de68d8ee..0cab6373111 100644 --- a/javascriptv3/example_code/s3/actions/put-bucket-acl.js +++ b/javascriptv3/example_code/s3/actions/put-bucket-acl.js @@ -1,21 +1,30 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.perms.putBucketAclV3] -import { PutBucketAclCommand, S3Client } from "@aws-sdk/client-s3"; +import { + PutBucketAclCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -const client = new S3Client({}); - -// Most Amazon S3 use cases don't require the use of access control lists (ACLs). -// We recommend that you disable ACLs, except in unusual circumstances where -// you need to control access for each object individually. -// Consider a policy instead. For more information see https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucket-policies.html. -export const main = async () => { - // Grant a user READ access to a bucket. +/** + * Grant read access to a user using their canonical AWS account ID. + * + * Most Amazon S3 use cases don't require the use of access control lists (ACLs). + * We recommend that you disable ACLs, except in unusual circumstances where + * you need to control access for each object individually. + * Consider a policy instead. For more information see https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucket-policies.html. + * @param {{ bucketName: string, granteeCanonicalUserId: string, ownerCanonicalUserId }} + */ +export const main = async ({ + bucketName, + granteeCanonicalUserId, + ownerCanonicalUserId, +}) => { + const client = new S3Client({}); const command = new PutBucketAclCommand({ - Bucket: "test-bucket", + Bucket: bucketName, AccessControlPolicy: { Grants: [ { @@ -23,30 +32,59 @@ export const main = async () => { // The canonical ID of the user. This ID is an obfuscated form of your AWS account number. // It's unique to Amazon S3 and can't be found elsewhere. // For more information, see https://docs.aws.amazon.com/AmazonS3/latest/userguide/finding-canonical-user-id.html. - ID: "canonical-id-1", + ID: granteeCanonicalUserId, Type: "CanonicalUser", }, // One of FULL_CONTROL | READ | WRITE | READ_ACP | WRITE_ACP // https://docs.aws.amazon.com/AmazonS3/latest/API/API_Grant.html#AmazonS3-Type-Grant-Permission - Permission: "FULL_CONTROL", + Permission: "READ", }, ], Owner: { - ID: "canonical-id-2", + ID: ownerCanonicalUserId, }, }, }); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send(command); + console.log(`Granted READ access to ${bucketName}`); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while setting ACL for bucket ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while setting ACL for bucket ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.perms.putBucketAclV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + granteeCanonicalUserId: { + type: "string", + }, + ownerCanonicalUserId: { + type: "string", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-bucket-cors.js b/javascriptv3/example_code/s3/actions/put-bucket-cors.js index d604f553931..2b064390564 100644 --- a/javascriptv3/example_code/s3/actions/put-bucket-cors.js +++ b/javascriptv3/example_code/s3/actions/put-bucket-cors.js @@ -1,49 +1,77 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.v3.cors.putBucketCors] -import { PutBucketCorsCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + PutBucketCorsCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -// By default, Amazon S3 doesn't allow cross-origin requests. Use this command -// to explicitly allow cross-origin requests. -export const main = async () => { - const command = new PutBucketCorsCommand({ - Bucket: "test-bucket", - CORSConfiguration: { - CORSRules: [ - { - // Allow all headers to be sent to this bucket. - AllowedHeaders: ["*"], - // Allow only GET and PUT methods to be sent to this bucket. - AllowedMethods: ["GET", "PUT"], - // Allow only requests from the specified origin. - AllowedOrigins: ["https://www.example.com"], - // Allow the entity tag (ETag) header to be returned in the response. The ETag header - // The entity tag represents a specific version of the object. The ETag reflects - // changes only to the contents of an object, not its metadata. - ExposeHeaders: ["ETag"], - // How long the requesting browser should cache the preflight response. After - // this time, the preflight request will have to be made again. - MaxAgeSeconds: 3600, - }, - ], - }, - }); +/** + * Allows cross-origin requests to an S3 bucket by setting the CORS configuration. + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send( + new PutBucketCorsCommand({ + Bucket: bucketName, + CORSConfiguration: { + CORSRules: [ + { + // Allow all headers to be sent to this bucket. + AllowedHeaders: ["*"], + // Allow only GET and PUT methods to be sent to this bucket. + AllowedMethods: ["GET", "PUT"], + // Allow only requests from the specified origin. + AllowedOrigins: ["https://www.example.com"], + // Allow the entity tag (ETag) header to be returned in the response. The ETag header + // The entity tag represents a specific version of the object. The ETag reflects + // changes only to the contents of an object, not its metadata. + ExposeHeaders: ["ETag"], + // How long the requesting browser should cache the preflight response. After + // this time, the preflight request will have to be made again. + MaxAgeSeconds: 3600, + }, + ], + }, + }), + ); + console.log(`Successfully set CORS rules for bucket: ${bucketName}`); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while setting CORS rules for ${bucketName}. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while setting CORS rules for ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.v3.cors.putBucketCors] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const { values } = parseArgs({ + options: { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }, + }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-bucket-policy.js b/javascriptv3/example_code/s3/actions/put-bucket-policy.js index e7c87e1675a..68a9b96f0bc 100644 --- a/javascriptv3/example_code/s3/actions/put-bucket-policy.js +++ b/javascriptv3/example_code/s3/actions/put-bucket-policy.js @@ -1,44 +1,78 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.policy.putBucketPolicyV3] -import { PutBucketPolicyCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + PutBucketPolicyCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { +/** + * Grant an IAM role GetObject access to all of the objects + * in the provided bucket. + * @param {{ bucketName: string, iamRoleArn: string }} + */ +export const main = async ({ bucketName, iamRoleArn }) => { + const client = new S3Client({}); const command = new PutBucketPolicyCommand({ + // This is a resource-based policy. For more information on resource-based policies, + // see https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_resource-based. Policy: JSON.stringify({ Version: "2012-10-17", Statement: [ { - Sid: "AllowGetObject", - // Allow this particular user to call GetObject on any object in this bucket. Effect: "Allow", Principal: { - AWS: "arn:aws:iam::ACCOUNT-ID:user/USERNAME", + AWS: iamRoleArn, }, Action: "s3:GetObject", - Resource: "arn:aws:s3:::BUCKET-NAME/*", + Resource: `arn:aws:s3:::${bucketName}/*`, }, ], }), // Apply the preceding policy to this bucket. - Bucket: "BUCKET-NAME", + Bucket: bucketName, }); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send(command); + console.log( + `GetObject access to the bucket "${bucketName}" was granted to the provided IAM role.`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "MalformedPolicy" + ) { + console.error( + `Error from S3 while setting the bucket policy for the bucket "${bucketName}". The policy was malformed.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while setting the bucket policy for the bucket "${bucketName}". ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.policy.putBucketPolicyV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + iamRoleArn: { + type: "string", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-bucket-website.js b/javascriptv3/example_code/s3/actions/put-bucket-website.js index d354ff5ea9f..dff7be3fcac 100644 --- a/javascriptv3/example_code/s3/actions/put-bucket-website.js +++ b/javascriptv3/example_code/s3/actions/put-bucket-website.js @@ -1,40 +1,73 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.website.putBucketWebsiteV3] -import { PutBucketWebsiteCommand, S3Client } from "@aws-sdk/client-s3"; - -const client = new S3Client({}); +import { + PutBucketWebsiteCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -// Set up a bucket as a static website. -// The bucket needs to be publicly accessible. -export const main = async () => { +/** + * Configure an Amazon S3 bucket to serve a static website. + * Website access must also be granted separately. For more information + * on setting the permissions for website access, see + * https://docs.aws.amazon.com/AmazonS3/latest/userguide/WebsiteAccessPermissionsReqd.html. + * + * @param {{ bucketName: string }} + */ +export const main = async ({ bucketName }) => { + const client = new S3Client({}); const command = new PutBucketWebsiteCommand({ - Bucket: "test-bucket", + Bucket: bucketName, WebsiteConfiguration: { ErrorDocument: { // The object key name to use when a 4XX class error occurs. Key: "error.html", }, IndexDocument: { - // A suffix that is appended to a request that is for a directory. + // A suffix that is appended to a request when the request is + // for a directory. Suffix: "index.html", }, }, }); try { - const response = await client.send(command); - console.log(response); - } catch (err) { - console.error(err); + await client.send(command); + console.log( + `The bucket "${bucketName}" has been configured as a static website.`, + ); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while configuring the bucket "${bucketName}" as a static website. The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while configuring the bucket "${bucketName}" as a static website. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.website.putBucketWebsiteV3] -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-object-legal-hold.js b/javascriptv3/example_code/s3/actions/put-object-legal-hold.js index 2f16988e6d2..4bbcc243b79 100644 --- a/javascriptv3/example_code/s3/actions/put-object-legal-hold.js +++ b/javascriptv3/example_code/s3/actions/put-object-legal-hold.js @@ -1,41 +1,75 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; -import { PutObjectLegalHoldCommand, S3Client } from "@aws-sdk/client-s3"; +import { + PutObjectLegalHoldCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; /** - * @param {S3Client} client - * @param {string} bucketName - * @param {string} objectKey + * Apply a legal hold configuration to the specified object. + * @param {{ bucketName: string, objectKey: string, legalHoldStatus: "ON" | "OFF" }} */ -export const main = async (client, bucketName, objectKey) => { +export const main = async ({ bucketName, objectKey, legalHoldStatus }) => { + if (!["OFF", "ON"].includes(legalHoldStatus.toUpperCase())) { + throw new Error( + "Invalid parameter. legalHoldStatus must be 'ON' or 'OFF'.", + ); + } + + const client = new S3Client({}); const command = new PutObjectLegalHoldCommand({ Bucket: bucketName, Key: objectKey, LegalHold: { // Set the status to 'ON' to place a legal hold on the object. // Set the status to 'OFF' to remove the legal hold. - Status: "ON", + Status: legalHoldStatus, }, - // Optionally, you can provide additional parameters - // ChecksumAlgorithm: "ALGORITHM", - // ContentMD5: "MD5_HASH", - // ExpectedBucketOwner: "ACCOUNT_ID", - // RequestPayer: "requester", - // VersionId: "OBJECT_VERSION_ID", }); try { - const response = await client.send(command); + await client.send(command); console.log( - `Object legal hold status: ${response.$metadata.httpStatusCode}`, + `Legal hold status set to "${legalHoldStatus}" for "${objectKey}" in "${bucketName}"`, ); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while modifying legal hold status for "${objectKey}" in "${bucketName}". The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while modifying legal hold status for "${objectKey}" in "${bucketName}". ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; -// Invoke main function if this file was run directly. +// Call function if run directly +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(new S3Client(), "BUCKET_NAME", "OBJECT_KEY"); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + objectKey: { + type: "string", + default: "file.txt", + }, + legalHoldStatus: { + type: "string", + default: "ON", + }, + }; + const { values } = parseArgs({ options }); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-object-lock-configuration.js b/javascriptv3/example_code/s3/actions/put-object-lock-configuration.js index 2c1b719cc05..57d0331b532 100644 --- a/javascriptv3/example_code/s3/actions/put-object-lock-configuration.js +++ b/javascriptv3/example_code/s3/actions/put-object-lock-configuration.js @@ -1,39 +1,62 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; import { PutObjectLockConfigurationCommand, S3Client, + S3ServiceException, } from "@aws-sdk/client-s3"; /** - * @param {S3Client} client - * @param {string} bucketName + * Enable S3 Object Lock for an Amazon S3 bucket. + * After you enable Object Lock on a bucket, you can't + * disable Object Lock or suspend versioning for that bucket. + * @param {{ bucketName: string, enabled: boolean }} */ -export const main = async (client, bucketName) => { +export const main = async ({ bucketName }) => { + const client = new S3Client({}); const command = new PutObjectLockConfigurationCommand({ Bucket: bucketName, // The Object Lock configuration that you want to apply to the specified bucket. ObjectLockConfiguration: { ObjectLockEnabled: "Enabled", }, - // Optionally, you can provide additional parameters - // ExpectedBucketOwner: "ACCOUNT_ID", - // RequestPayer: "requester", - // Token: "OPTIONAL_TOKEN", }); try { - const response = await client.send(command); - console.log( - `Object Lock Configuration updated: ${response.$metadata.httpStatusCode}`, - ); - } catch (err) { - console.error(err); + await client.send(command); + console.log(`Object Lock for "${bucketName}" enabled.`); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "NoSuchBucket" + ) { + console.error( + `Error from S3 while modifying the object lock configuration for the bucket "${bucketName}". The bucket doesn't exist.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while modifying the object lock configuration for the bucket "${bucketName}". ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; -// Invoke main function if this file was run directly. -if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(new S3Client(), "BUCKET_NAME"); +// Call function if run directly +import { parseArgs } from "util"; +import { isMain } from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + bucketName: { + type: "string", + }, + }; + return parseArgs({ options }); +}; + +if (isMain(import.meta.url)) { + const { values } = loadArgs(); + main(values); } diff --git a/javascriptv3/example_code/s3/actions/put-object.js b/javascriptv3/example_code/s3/actions/put-object.js index ff5e0dc8c2a..bcc4d238a71 100644 --- a/javascriptv3/example_code/s3/actions/put-object.js +++ b/javascriptv3/example_code/s3/actions/put-object.js @@ -1,30 +1,75 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { fileURLToPath } from "url"; - // snippet-start:[s3.JavaScript.buckets.uploadV3] -import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { readFile } from "fs/promises"; -const client = new S3Client({}); +import { + PutObjectCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; -export const main = async () => { +/** + * Upload a file to an S3 bucket. + * @param {{ bucketName: string, key: string, filePath: string }} + */ +export const main = async ({ bucketName, key, filePath }) => { + const client = new S3Client({}); const command = new PutObjectCommand({ - Bucket: "test-bucket", - Key: "hello-s3.txt", - Body: "Hello S3!", + Bucket: bucketName, + Key: key, + Body: await readFile(filePath), }); try { const response = await client.send(command); console.log(response); - } catch (err) { - console.error(err); + } catch (caught) { + if ( + caught instanceof S3ServiceException && + caught.name === "EntityTooLarge" + ) { + console.error( + `Error from S3 while uploading object to ${bucketName}. \ +The object was too large. To upload objects larger than 5GB, use the S3 console (160GB max) \ +or the multipart upload API (5TB max).`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while uploading object to ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } } }; // snippet-end:[s3.JavaScript.buckets.uploadV3] -// Invoke main function if this file was run directly. +/** + Call function if run directly. + + Example usage: + node put-object.js --bucketName amzn-s3-demo-bucket --key movies.json \ + ../../../../resources/sample_files/movies.json + */ +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + if (process.argv[1] === fileURLToPath(import.meta.url)) { - main(); + const options = { + bucketName: { + type: "string", + default: "amzn-s3-demo-bucket", + }, + key: { + type: "string", + default: "demo-key.txt", + }, + }; + const { values, positionals } = parseArgs({ + options, + allowPositionals: true, + }); + main({ ...values, filePath: positionals[0] }); } diff --git a/javascriptv3/example_code/s3/hello.js b/javascriptv3/example_code/s3/hello.js index 18eec2af9d9..4b8bb769024 100644 --- a/javascriptv3/example_code/s3/hello.js +++ b/javascriptv3/example_code/s3/hello.js @@ -4,19 +4,42 @@ import { fileURLToPath } from "url"; // snippet-start:[javascript.v3.s3.hello] -import { ListBucketsCommand, S3Client } from "@aws-sdk/client-s3"; - -// When no region or credentials are provided, the SDK will use the -// region and credentials from the local AWS config. -const client = new S3Client({}); +import { + paginateListBuckets, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; +/** + * List the S3 buckets in your configured AWS account. + */ export const helloS3 = async () => { - const command = new ListBucketsCommand({}); + // When no region or credentials are provided, the SDK will use the + // region and credentials from the local AWS config. + const client = new S3Client({}); + + try { + /** + * @type { import("@aws-sdk/client-s3").Bucket[] } + */ + const buckets = []; - const { Buckets } = await client.send(command); - console.log("Buckets: "); - console.log(Buckets.map((bucket) => bucket.Name).join("\n")); - return Buckets; + for await (const page of paginateListBuckets({ client }, {})) { + buckets.push(...page.Buckets); + } + console.log("Buckets: "); + console.log(buckets.map((bucket) => bucket.Name).join("\n")); + return buckets; + } catch (caught) { + // ListBuckets does not throw any modeled errors. Any error caught + // here will be something generic like `AccessDenied`. + if (caught instanceof S3ServiceException) { + console.error(`${caught.name}: ${caught.message}`); + } else { + // Something besides S3 failed. + throw caught; + } + } }; // snippet-end:[javascript.v3.s3.hello] diff --git a/javascriptv3/example_code/s3/tests/basic.unit.test.js b/javascriptv3/example_code/s3/tests/basic.unit.test.js index ae054cc8f4d..ab1b593aaa0 100644 --- a/javascriptv3/example_code/s3/tests/basic.unit.test.js +++ b/javascriptv3/example_code/s3/tests/basic.unit.test.js @@ -53,7 +53,7 @@ describe("S3 basic scenario", () => { it("should log a success message", async () => { send.mockResolvedValueOnce({}); - await createBucket("my-bucket"); + await createBucket("amzn-s3-demo-bucket"); expect(logSpy).toHaveBeenCalledWith("Bucket created successfully.\n"); }); @@ -63,12 +63,15 @@ describe("S3 basic scenario", () => { it("should send the files to s3", async () => { send.mockResolvedValueOnce({}); - await uploadFilesToBucket({ bucketName: "my-bucket", folderPath: "" }); + await uploadFilesToBucket({ + bucketName: "amzn-s3-demo-bucket", + folderPath: "", + }); expect(send).toHaveBeenCalledWith( expect.objectContaining({ input: expect.objectContaining({ - Bucket: "my-bucket", + Bucket: "amzn-s3-demo-bucket", Key: "file1.txt", Body: "file content", }), @@ -79,7 +82,10 @@ describe("S3 basic scenario", () => { it("should log the files that were found and uploaded", async () => { send.mockResolvedValueOnce({}); - await uploadFilesToBucket({ bucketName: "my-bucket", folderPath: "" }); + await uploadFilesToBucket({ + bucketName: "amzn-s3-demo-bucket", + folderPath: "", + }); expect(logSpy).toHaveBeenCalledWith("file1.txt uploaded successfully."); }); @@ -91,7 +97,10 @@ describe("S3 basic scenario", () => { Contents: [{ Key: "file1" }, { Key: "file2" }], }); - await listFilesInBucket({ bucketName: "my-bucket", folderPath: "" }); + await listFilesInBucket({ + bucketName: "amzn-s3-demo-bucket", + folderPath: "", + }); expect(logSpy).toHaveBeenCalledWith(` • file1\n • file2\n`); }); @@ -103,13 +112,13 @@ describe("S3 basic scenario", () => { Contents: [{ Key: "file1" }, { Key: "file2" }], }); - await emptyBucket({ bucketName: "my-bucket", folderPath: "" }); + await emptyBucket({ bucketName: "amzn-s3-demo-bucket", folderPath: "" }); expect(send).toHaveBeenNthCalledWith( 2, expect.objectContaining({ input: expect.objectContaining({ - Bucket: "my-bucket", + Bucket: "amzn-s3-demo-bucket", Delete: { Objects: [{ Key: "file1" }, { Key: "file2" }], }, @@ -123,12 +132,12 @@ describe("S3 basic scenario", () => { it("should call 'send' with the provided bucket name", async () => { send.mockResolvedValueOnce({}); - await deleteBucket({ bucketName: "my-bucket" }); + await deleteBucket({ bucketName: "amzn-s3-demo-bucket" }); expect(send).toHaveBeenCalledWith( expect.objectContaining({ input: expect.objectContaining({ - Bucket: "my-bucket", + Bucket: "amzn-s3-demo-bucket", }), }), ); diff --git a/javascriptv3/example_code/s3/tests/copy-object.unit.test.js b/javascriptv3/example_code/s3/tests/copy-object.unit.test.js index 1f439da510b..48be8bcd4e8 100644 --- a/javascriptv3/example_code/s3/tests/copy-object.unit.test.js +++ b/javascriptv3/example_code/s3/tests/copy-object.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { ObjectNotInActiveTierError } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -18,23 +19,32 @@ vi.doMock("@aws-sdk/client-s3", async () => { const { main } = await import("../actions/copy-object.js"); describe("copy-object", () => { + const sourceBucket = "my-old-bucket"; + const sourceKey = "todo.txt"; + const destinationBucket = "my-new-bucket"; + const destinationKey = "updated-todo.txt"; + it("should log the response from the service", async () => { send.mockResolvedValue("foo"); const spy = vi.spyOn(console, "log"); - await main(); + await main({ sourceBucket, sourceKey, destinationBucket, destinationKey }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Successfully copied ${sourceBucket}/${sourceKey} to ${destinationBucket}/${destinationKey}`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error message if the object is in an archive tier", async () => { + send.mockRejectedValue(new ObjectNotInActiveTierError()); const spy = vi.spyOn(console, "error"); - await main(); + await main({ sourceBucket, sourceKey, destinationBucket, destinationKey }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Could not copy ${sourceKey} from ${sourceBucket}. Object is not in the active tier.`, + ); }); }); diff --git a/javascriptv3/example_code/s3/tests/create-bucket.unit.test.js b/javascriptv3/example_code/s3/tests/create-bucket.unit.test.js index 815f7f35dab..3f980330cfb 100644 --- a/javascriptv3/example_code/s3/tests/create-bucket.unit.test.js +++ b/javascriptv3/example_code/s3/tests/create-bucket.unit.test.js @@ -1,6 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { + BucketAlreadyExists, + BucketAlreadyOwnedByYou, +} from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -17,24 +21,40 @@ vi.doMock("@aws-sdk/client-s3", async () => { const { main } = await import("../actions/create-bucket.js"); -describe("copy-object", () => { +describe("create-bucket", () => { it("should log the response from the service", async () => { send.mockResolvedValue({ Location: "foo" }); const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "bucket-name" }); expect(spy).toHaveBeenCalledWith("Bucket created with location foo"); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error message if a bucket already exists globally", async () => { + const error = new BucketAlreadyExists(); + send.mockRejectedValue(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName: "bucket-name" }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `The bucket "bucket-name" already exists in another AWS account. Bucket names must be globally unique.`, + ); + }); + + it("should log a relevant error message if a bucket already exists in the users AWS account", async () => { + const error = new BucketAlreadyOwnedByYou(); + send.mockRejectedValue(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName: "bucket-name" }); + + expect(spy).toHaveBeenCalledWith( + `The bucket "bucket-name" already exists in this AWS account.`, + ); }); }); diff --git a/javascriptv3/example_code/s3/tests/delete-bucket-policy.unit.test.js b/javascriptv3/example_code/s3/tests/delete-bucket-policy.unit.test.js index a97849365c9..35e0b1ff3e7 100644 --- a/javascriptv3/example_code/s3/tests/delete-bucket-policy.unit.test.js +++ b/javascriptv3/example_code/s3/tests/delete-bucket-policy.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -19,22 +20,51 @@ const { main } = await import("../actions/delete-bucket-policy.js"); describe("delete-bucket-policy", () => { it("should log the response from the service", async () => { - send.mockResolvedValue("foo"); + send.mockResolvedValueOnce("foo"); const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket" }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Bucket policy deleted from "amzn-s3-demo-bucket".`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting policy from ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting policy from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/delete-bucket-website.unit.test.js b/javascriptv3/example_code/s3/tests/delete-bucket-website.unit.test.js index 5206d2ed050..88ff6a123ff 100644 --- a/javascriptv3/example_code/s3/tests/delete-bucket-website.unit.test.js +++ b/javascriptv3/example_code/s3/tests/delete-bucket-website.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -18,23 +19,42 @@ vi.doMock("@aws-sdk/client-s3", async () => { const { main } = await import("../actions/delete-bucket-website.js"); describe("delete-bucket-website", () => { - it("should log the response from the service", async () => { - send.mockResolvedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); - const spy = vi.spyOn(console, "log"); + const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, keys: ["foo"] }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while removing website configuration from ${bucketName}. The bucket doesn't exist.`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while removing website configuration from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => + main({ bucketName, keys: ["foo"] }), + ).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/delete-object.unit.test.js b/javascriptv3/example_code/s3/tests/delete-object.unit.test.js index 7cccda19bf5..fc47cd34168 100644 --- a/javascriptv3/example_code/s3/tests/delete-object.unit.test.js +++ b/javascriptv3/example_code/s3/tests/delete-object.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -9,6 +10,7 @@ vi.doMock("@aws-sdk/client-s3", async () => { const actual = await vi.importActual("@aws-sdk/client-s3"); return { ...actual, + waitUntilObjectNotExists: () => Promise.resolve(), S3Client: class { send = send; }, @@ -19,22 +21,51 @@ const { main } = await import("../actions/delete-object.js"); describe("delete-object", () => { it("should log the response from the service", async () => { - send.mockResolvedValue("foo"); + send.mockResolvedValueOnce(); const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket", key: "my-object" }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `The object "my-object" from bucket "amzn-s3-demo-bucket" was deleted, or it didn't exist.`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting object from ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting object from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/delete-objects.unit.test.js b/javascriptv3/example_code/s3/tests/delete-objects.unit.test.js index 5a6abcaac10..0fe3e88777a 100644 --- a/javascriptv3/example_code/s3/tests/delete-objects.unit.test.js +++ b/javascriptv3/example_code/s3/tests/delete-objects.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -9,6 +10,7 @@ vi.doMock("@aws-sdk/client-s3", async () => { const actual = await vi.importActual("@aws-sdk/client-s3"); return { ...actual, + waitUntilObjectNotExists: () => Promise.resolve(), S3Client: class { send = send; }, @@ -23,7 +25,7 @@ describe("delete-objects", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket", keys: ["foo", "bar"] }); expect(spy).toHaveBeenNthCalledWith( 1, @@ -32,13 +34,42 @@ describe("delete-objects", () => { expect(spy).toHaveBeenNthCalledWith(2, ` • foo\n • bar`); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, keys: ["foo"] }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting objects from ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while deleting objects from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => + main({ bucketName, keys: ["foo"] }), + ).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-bucket-acl.unit.test.js b/javascriptv3/example_code/s3/tests/get-bucket-acl.unit.test.js new file mode 100644 index 00000000000..25c12469133 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-bucket-acl.unit.test.js @@ -0,0 +1,73 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { S3ServiceException } from "@aws-sdk/client-s3"; +import { describe, it, expect, vi } from "vitest"; + +const send = vi.fn(); + +vi.doMock("@aws-sdk/client-s3", async () => { + const actual = await vi.importActual("@aws-sdk/client-s3"); + return { + ...actual, + S3Client: class { + send = send; + }, + }; +}); + +const { main } = await import("../actions/get-bucket-acl.js"); + +describe("get-bucket-acl", () => { + it("should log the response from the service", async () => { + const response = { + Owner: { DisplayName: "Alice", ID: "1234" }, + Grants: [], + }; + send.mockResolvedValue(response); + + const spy = vi.spyOn(console, "log"); + + await main({ bucketName: "amzn-s3-demo-bucket" }); + + expect(spy).toHaveBeenCalledWith(`ACL for bucket "amzn-s3-demo-bucket":`); + expect(spy).toHaveBeenCalledWith(JSON.stringify(response, null, 2)); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting ACL for ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting ACL for ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); + }); +}); diff --git a/javascriptv3/example_code/s3/tests/get-bucket-cors.unit.test.js b/javascriptv3/example_code/s3/tests/get-bucket-cors.unit.test.js index 8f4a89f4969..b00e2ceea44 100644 --- a/javascriptv3/example_code/s3/tests/get-bucket-cors.unit.test.js +++ b/javascriptv3/example_code/s3/tests/get-bucket-cors.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -33,7 +34,7 @@ describe("get-bucket-cors", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket" }); expect(spy).toHaveBeenCalledWith( `\nCORSRule 1`, @@ -46,13 +47,40 @@ describe("get-bucket-cors", () => { ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting bucket CORS rules for ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting bucket CORS rules for ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-bucket-policy.unit.test.js b/javascriptv3/example_code/s3/tests/get-bucket-policy.unit.test.js index 621093f1cc5..ede096c8096 100644 --- a/javascriptv3/example_code/s3/tests/get-bucket-policy.unit.test.js +++ b/javascriptv3/example_code/s3/tests/get-bucket-policy.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -23,18 +24,47 @@ describe("get-bucket-policy", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket" }); - expect(spy).toHaveBeenCalledWith({ foo: "bar" }); + expect(spy).toHaveBeenCalledWith( + `Policy for "amzn-s3-demo-bucket":\n{ "foo": "bar" }`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting policy from ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting policy from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-bucket-website.unit.test.js b/javascriptv3/example_code/s3/tests/get-bucket-website.unit.test.js index 2cb83064155..c426758f853 100644 --- a/javascriptv3/example_code/s3/tests/get-bucket-website.unit.test.js +++ b/javascriptv3/example_code/s3/tests/get-bucket-website.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -19,28 +20,56 @@ const { main } = await import("../actions/get-bucket-website.js"); describe("get-bucket-website", () => { it("should log the response from the service", async () => { - send.mockResolvedValue({ + const mockResponse = { IndexDocument: { Suffix: "foo" }, ErrorDocument: { Key: "bar" }, - }); + }; + send.mockResolvedValue(mockResponse); + const bucketName = "amzn-s3-demo-bucket"; const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName }); expect(spy).toHaveBeenCalledWith( - "Your bucket is set up to host a website. It has an error document:", - "bar, and an index document: foo.", + `Your bucket is set up to host a website with the following configuration:\n${JSON.stringify(mockResponse, null, 2)}`, ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket isn't configured as a website.", async () => { + const error = new S3ServiceException("Not such website configuration."); + error.name = "NoSuchWebsiteConfiguration"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting website configuration for ${bucketName}. The bucket isn't configured as a website.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting website configuration for ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-object-legal-hold.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-legal-hold.integration.test.js index 373cf4b8740..46c664a3afe 100644 --- a/javascriptv3/example_code/s3/tests/get-object-legal-hold.integration.test.js +++ b/javascriptv3/example_code/s3/tests/get-object-legal-hold.integration.test.js @@ -7,7 +7,6 @@ import { CreateBucketCommand, PutObjectLegalHoldCommand, PutObjectCommand, - GetObjectLegalHoldCommand, } from "@aws-sdk/client-s3"; import { getUniqueName } from "@aws-doc-sdk-examples/lib/utils/util-string.js"; import { main as getObjectLegalHold } from "../actions/get-object-legal-hold.js"; @@ -24,7 +23,6 @@ describe("get-object-legal-hold.js Integration Test", () => { }); it("should get object legal hold", async () => { - // Setup await client.send( new CreateBucketCommand({ Bucket: bucketName, @@ -46,15 +44,8 @@ describe("get-object-legal-hold.js Integration Test", () => { }), ); - // Execute - const spy = vi.spyOn(console, "error"); - await getObjectLegalHold(client, bucketName, objectKey); - expect(spy).not.toHaveBeenCalled(); - - // Verify - const { LegalHold } = await client.send( - new GetObjectLegalHoldCommand({ Bucket: bucketName, Key: objectKey }), - ); - expect(LegalHold.Status).toBe("ON"); + const spy = vi.spyOn(console, "log"); + await getObjectLegalHold({ bucketName, key: objectKey }); + expect(spy).toHaveBeenCalledWith("Legal Hold Status: ON"); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-object-lock-configuration.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-lock-configuration.integration.test.js index d9fdcf9aa1e..2bb741806fc 100644 --- a/javascriptv3/example_code/s3/tests/get-object-lock-configuration.integration.test.js +++ b/javascriptv3/example_code/s3/tests/get-object-lock-configuration.integration.test.js @@ -15,7 +15,7 @@ import { getUniqueName } from "@aws-doc-sdk-examples/lib/utils/util-string.js"; import { legallyEmptyAndDeleteBuckets } from "../libs/s3Utils.js"; const client = new S3Client({}); -const bucketName = getUniqueName("test-bucket"); +const bucketName = getUniqueName("code-example"); describe("get-object-lock-configuration.js Integration Test", () => { afterAll(async () => { @@ -44,7 +44,7 @@ describe("get-object-lock-configuration.js Integration Test", () => { // Execute const spy = vi.spyOn(console, "error"); - await getObjectLockConfiguration(client, bucketName); + await getObjectLockConfiguration({ bucketName }); expect(spy).not.toHaveBeenCalled(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-object-lock-configuration.unit.test.js b/javascriptv3/example_code/s3/tests/get-object-lock-configuration.unit.test.js new file mode 100644 index 00000000000..0f443d3cc96 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-object-lock-configuration.unit.test.js @@ -0,0 +1,72 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { S3ServiceException } from "@aws-sdk/client-s3"; +import { describe, it, expect, vi } from "vitest"; + +const send = vi.fn(); + +vi.doMock("@aws-sdk/client-s3", async () => { + const actual = await vi.importActual("@aws-sdk/client-s3"); + return { + ...actual, + S3Client: class { + send = send; + }, + }; +}); + +const { main } = await import("../actions/get-object-lock-configuration.js"); + +describe("get-object-lock-configuration", () => { + it("should log the response from the service", async () => { + send.mockResolvedValue({ ObjectLockConfiguration: "foo" }); + + const spy = vi.spyOn(console, "log"); + + await main({ bucketName: "amzn-s3-demo-bucket", key: "foo" }); + + expect(spy).toHaveBeenCalledWith( + `Object Lock Configuration:\n${JSON.stringify("foo")}`, + ); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting object lock configuration for ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting object lock configuration for ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => + main({ bucketName, keys: ["foo"] }), + ).rejects.toBeTruthy(); + }); +}); diff --git a/javascriptv3/example_code/s3/tests/get-object-retention.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-retention.integration.test.js index 9c68b6ba63c..af3dbd698f9 100644 --- a/javascriptv3/example_code/s3/tests/get-object-retention.integration.test.js +++ b/javascriptv3/example_code/s3/tests/get-object-retention.integration.test.js @@ -7,8 +7,8 @@ import { PutObjectCommand, CreateBucketCommand, PutObjectRetentionCommand, - GetObjectRetentionCommand, } from "@aws-sdk/client-s3"; + import { getUniqueName } from "@aws-doc-sdk-examples/lib/utils/util-string.js"; import { main as getObjectRetention } from "../actions/get-object-retention.js"; import { legallyEmptyAndDeleteBuckets } from "../libs/s3Utils.js"; @@ -24,7 +24,9 @@ describe("get-object-retention.js Integration Test", () => { }); it("should get the object retention settings of an object", async () => { - // Setup + const retainUntilDate = new Date( + new Date().getTime() + 24 * 60 * 60 * 1000, + ); await client.send( new CreateBucketCommand({ Bucket: bucketName, @@ -44,21 +46,15 @@ describe("get-object-retention.js Integration Test", () => { Key: objectKey, Retention: { Mode: "GOVERNANCE", - RetainUntilDate: new Date(new Date().getTime() + 24 * 60 * 60 * 1000), + RetainUntilDate: retainUntilDate, }, }), ); - // Execute - const spy = vi.spyOn(console, "error"); - await getObjectRetention(client, bucketName, objectKey); - expect(spy).not.toHaveBeenCalled(); - - // Verify - const { Retention } = await client.send( - new GetObjectRetentionCommand({ Bucket: bucketName, Key: objectKey }), + const spy = vi.spyOn(console, "log"); + await getObjectRetention({ bucketName, key: objectKey }); + expect(spy).toHaveBeenCalledWith( + `${objectKey} in ${bucketName} will be retained until ${retainUntilDate}`, ); - expect(Retention.Mode).toBe("GOVERNANCE"); - expect(Retention.RetainUntilDate).toBeDefined(); }); }); diff --git a/javascriptv3/example_code/s3/tests/get-object.unit.test.js b/javascriptv3/example_code/s3/tests/get-object.unit.test.js index 3f39b54d610..46bca611058 100644 --- a/javascriptv3/example_code/s3/tests/get-object.unit.test.js +++ b/javascriptv3/example_code/s3/tests/get-object.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { NoSuchKey, S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -29,18 +30,46 @@ describe("get-object", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket", key: "foo" }); expect(spy).toHaveBeenCalledWith("foo"); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error message when the object key doesn't exist in the bucket", async () => { + const bucketName = "amzn-s3-demo-bucket"; + const key = "foo"; + send.mockRejectedValueOnce(new NoSuchKey()); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, key }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + const key = "foo"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName, key }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while getting object from ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + const key = "foo"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName, key })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/list-buckets.unit.test.js b/javascriptv3/example_code/s3/tests/list-buckets.unit.test.js index b1829898f8c..502c68fb22b 100644 --- a/javascriptv3/example_code/s3/tests/list-buckets.unit.test.js +++ b/javascriptv3/example_code/s3/tests/list-buckets.unit.test.js @@ -1,17 +1,21 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; -const send = vi.fn(); +const paginateListBuckets = vi.fn().mockImplementation(async function* () { + yield { + Buckets: [{ Name: "amzn-s3-demo-bucket" }], + Owner: { DisplayName: "bar" }, + }; +}); vi.doMock("@aws-sdk/client-s3", async () => { const actual = await vi.importActual("@aws-sdk/client-s3"); return { ...actual, - S3Client: class { - send = send; - }, + paginateListBuckets, }; }); @@ -19,26 +23,45 @@ const { main } = await import("../actions/list-buckets.js"); describe("list-buckets", () => { it("should log the response from the service", async () => { - send.mockResolvedValue({ - Buckets: [{ Name: "foo" }], - Owner: { DisplayName: "bar" }, - }); - const spy = vi.spyOn(console, "log"); await main(); expect(spy).toHaveBeenNthCalledWith(1, "bar owns 1 bucket:"); - expect(spy).toHaveBeenNthCalledWith(2, " • foo"); + expect(spy).toHaveBeenNthCalledWith(2, " • amzn-s3-demo-bucket"); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + paginateListBuckets.mockImplementationOnce( + // eslint-disable-next-line require-yield + async function* () { + throw error; + }, + ); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while listing buckets. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + paginateListBuckets.mockImplementationOnce( + // eslint-disable-next-line require-yield + async function* () { + throw new Error(); + }, + ); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => + main({ bucketName, keys: ["foo"] }), + ).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/list-objects.unit.test.js b/javascriptv3/example_code/s3/tests/list-objects.unit.test.js index 7e872020ccb..84a63a1c68e 100644 --- a/javascriptv3/example_code/s3/tests/list-objects.unit.test.js +++ b/javascriptv3/example_code/s3/tests/list-objects.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3Client, S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -9,7 +10,7 @@ vi.doMock("@aws-sdk/client-s3", async () => { const actual = await vi.importActual("@aws-sdk/client-s3"); return { ...actual, - S3Client: class { + S3Client: class extends S3Client { send = send; }, }; @@ -23,22 +24,47 @@ describe("list-objects", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket", pageSize: 1 }); - expect(spy).toHaveBeenNthCalledWith( - 1, - "Your bucket contains the following objects:\n", + expect(spy).toHaveBeenCalledWith(`Page 1\n------\n• foo\n`); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName, pageSize: "1" }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while listing objects for "${bucketName}". The bucket doesn't exist.`, ); - expect(spy).toHaveBeenNthCalledWith(2, " • foo\n"); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName, keys: ["foo"] }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while listing objects for "${bucketName}". ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => + main({ bucketName, pageSize: "1" }), + ).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/put-bucket-acl.unit.test.js b/javascriptv3/example_code/s3/tests/put-bucket-acl.unit.test.js index 6a80b2285e2..3ede7c1d4e5 100644 --- a/javascriptv3/example_code/s3/tests/put-bucket-acl.unit.test.js +++ b/javascriptv3/example_code/s3/tests/put-bucket-acl.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -18,23 +19,71 @@ vi.doMock("@aws-sdk/client-s3", async () => { const { main } = await import("../actions/put-bucket-acl.js"); describe("put-bucket-acl", () => { - it("should log the response from the service", async () => { - send.mockResolvedValue("foo"); + const bucketName = "amzn-s3-demo-bucket"; + const granteeCanonicalUserId = "canonical-id-1"; + const ownerCanonicalUserId = "canonical-id-2"; + + it("should log the successful response from the service", async () => { + send.mockResolvedValue({ $metadata: { httpStatusCode: 200 } }); const spy = vi.spyOn(console, "log"); - await main(); + await main({ + bucketName, + granteeCanonicalUserId, + ownerCanonicalUserId, + }); + + expect(spy).toHaveBeenCalledWith(`Granted READ access to ${bucketName}`); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ + bucketName, + granteeCanonicalUserId, + ownerCanonicalUserId, + }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting ACL for bucket ${bucketName}. The bucket doesn't exist.`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException({ + message: "Some S3 service exception", + }); + error.name = "ServiceException"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ + bucketName, + granteeCanonicalUserId, + ownerCanonicalUserId, + }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting ACL for bucket ${bucketName}. ServiceException: Some S3 service exception`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + send.mockRejectedValueOnce(new Error()); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => + main({ + bucketName, + granteeCanonicalUserId, + ownerCanonicalUserId, + }), + ).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/put-bucket-cors.unit.test.js b/javascriptv3/example_code/s3/tests/put-bucket-cors.unit.test.js index c3f5ba0d3ea..985045d691d 100644 --- a/javascriptv3/example_code/s3/tests/put-bucket-cors.unit.test.js +++ b/javascriptv3/example_code/s3/tests/put-bucket-cors.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -18,23 +19,52 @@ vi.doMock("@aws-sdk/client-s3", async () => { const { main } = await import("../actions/put-bucket-cors.js"); describe("put-bucket-cors", () => { - it("should log the response from the service", async () => { - send.mockResolvedValue("foo"); + it("should log a success message when the CORS rules are set", async () => { + send.mockResolvedValue({}); const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName: "amzn-s3-demo-bucket" }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Successfully set CORS rules for bucket: amzn-s3-demo-bucket`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting CORS rules for ${bucketName}. The bucket doesn't exist.`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting CORS rules for ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/put-bucket-policy.unit.test.js b/javascriptv3/example_code/s3/tests/put-bucket-policy.unit.test.js index 46cf61082c3..69a368f5c8d 100644 --- a/javascriptv3/example_code/s3/tests/put-bucket-policy.unit.test.js +++ b/javascriptv3/example_code/s3/tests/put-bucket-policy.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -20,21 +21,50 @@ const { main } = await import("../actions/put-bucket-policy.js"); describe("put-bucket-policy", () => { it("should log the response from the service", async () => { send.mockResolvedValue("foo"); - + const bucketName = "amzn-s3-demo-bucket"; const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `GetObject access to the bucket "${bucketName}" was granted to the provided IAM role.`, + ); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "MalformedPolicy"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting the bucket policy for the bucket "${bucketName}". The policy was malformed.`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while setting the bucket policy for the bucket "${bucketName}". ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/put-bucket-website.unit.test.js b/javascriptv3/example_code/s3/tests/put-bucket-website.unit.test.js index ae2bd12b75f..05ebdb2d999 100644 --- a/javascriptv3/example_code/s3/tests/put-bucket-website.unit.test.js +++ b/javascriptv3/example_code/s3/tests/put-bucket-website.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -20,21 +21,50 @@ const { main } = await import("../actions/put-bucket-website.js"); describe("put-bucket-website", () => { it("should log the response from the service", async () => { send.mockResolvedValue("foo"); - + const bucketName = "amzn-s3-demo-bucket"; const spy = vi.spyOn(console, "log"); - await main(); + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `The bucket "${bucketName}" has been configured as a static website.`, + ); + }); + + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "NoSuchBucket"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ bucketName }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while configuring the bucket "${bucketName}" as a static website. The bucket doesn't exist.`, + ); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ bucketName }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while configuring the bucket "${bucketName}" as a static website. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); - expect(spy).toHaveBeenCalledWith("foo"); + await expect(() => main({ bucketName })).rejects.toBeTruthy(); }); }); diff --git a/javascriptv3/example_code/s3/tests/put-object-legal-hold.integration.test.js b/javascriptv3/example_code/s3/tests/put-object-legal-hold.integration.test.js index 98b32bffc81..2ca7989ce2f 100644 --- a/javascriptv3/example_code/s3/tests/put-object-legal-hold.integration.test.js +++ b/javascriptv3/example_code/s3/tests/put-object-legal-hold.integration.test.js @@ -13,8 +13,8 @@ import { main as putObjectLegalHold } from "../actions/put-object-legal-hold.js" import { legallyEmptyAndDeleteBuckets } from "../libs/s3Utils.js"; const client = new S3Client({}); -const bucketName = getUniqueName("test-bucket"); -const objectKey = "test-object"; +const bucketName = getUniqueName("code-example"); +const objectKey = "file.txt"; describe("put-object-legal-hold.js Integration Test", () => { afterAll(async () => { @@ -34,13 +34,13 @@ describe("put-object-legal-hold.js Integration Test", () => { new PutObjectCommand({ Bucket: bucketName, Key: objectKey, - Body: "test content", + Body: "content", }), ); // Execute const spy = vi.spyOn(console, "error"); - await putObjectLegalHold(client, bucketName, objectKey); + await putObjectLegalHold({ bucketName, objectKey, legalHoldStatus: "ON" }); expect(spy).not.toHaveBeenCalled(); // Verify diff --git a/javascriptv3/example_code/s3/tests/put-object-lock-configuration.integration.test.js b/javascriptv3/example_code/s3/tests/put-object-lock-configuration.integration.test.js index 90d316622f8..818150d8197 100644 --- a/javascriptv3/example_code/s3/tests/put-object-lock-configuration.integration.test.js +++ b/javascriptv3/example_code/s3/tests/put-object-lock-configuration.integration.test.js @@ -15,7 +15,9 @@ import { getUniqueName } from "@aws-doc-sdk-examples/lib/utils/util-string.js"; import { legallyEmptyAndDeleteBuckets } from "../libs/s3Utils.js"; const client = new S3Client({}); -const bucketName = getUniqueName("test-bucket"); +const bucketName = getUniqueName( + process.env["S3_BUCKET_NAME"] || "object-lock-integ", +); describe("put-object-lock-configuration.js Integration Test", () => { afterAll(async () => { @@ -38,7 +40,7 @@ describe("put-object-lock-configuration.js Integration Test", () => { // Execute const spy = vi.spyOn(console, "error"); - await putObjectLockConfiguration(client, bucketName); + await putObjectLockConfiguration({ bucketName }); expect(spy).not.toHaveBeenCalled(); // Verify diff --git a/javascriptv3/example_code/s3/tests/put-object-retention.integration.test.js b/javascriptv3/example_code/s3/tests/put-object-retention.integration.test.js index 33370c77a1d..1c3fee6ebba 100644 --- a/javascriptv3/example_code/s3/tests/put-object-retention.integration.test.js +++ b/javascriptv3/example_code/s3/tests/put-object-retention.integration.test.js @@ -13,7 +13,7 @@ import { getUniqueName } from "@aws-doc-sdk-examples/lib/utils/util-string.js"; import { legallyEmptyAndDeleteBuckets } from "../libs/s3Utils.js"; const client = new S3Client({}); -const bucketName = getUniqueName("test-bucket"); +const bucketName = getUniqueName("code-example"); const objectKey = "test-object"; describe("put-object-retention.js Integration Test", () => { diff --git a/javascriptv3/example_code/s3/tests/put-object.unit.test.js b/javascriptv3/example_code/s3/tests/put-object.unit.test.js index 3744d0251cf..0cf8bccc3f7 100644 --- a/javascriptv3/example_code/s3/tests/put-object.unit.test.js +++ b/javascriptv3/example_code/s3/tests/put-object.unit.test.js @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { S3ServiceException } from "@aws-sdk/client-s3"; import { describe, it, expect, vi } from "vitest"; const send = vi.fn(); @@ -15,6 +16,12 @@ vi.doMock("@aws-sdk/client-s3", async () => { }; }); +vi.doMock("fs/promises", () => { + return { + readFile: () => Promise.resolve(Buffer.from("buffer")), + }; +}); + const { main } = await import("../actions/put-object.js"); describe("put-object", () => { @@ -23,18 +30,61 @@ describe("put-object", () => { const spy = vi.spyOn(console, "log"); - await main(); + await main({ + bucketName: "amzn-s3-demo-bucket", + key: "movies.json", + filePath: "path/to/movies.json", + }); expect(spy).toHaveBeenCalledWith("foo"); }); - it("should log errors", async () => { - send.mockRejectedValue("foo"); + it("should log a relevant error when the bucket doesn't exist", async () => { + const error = new S3ServiceException("The specified bucket does not exist"); + error.name = "EntityTooLarge"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); const spy = vi.spyOn(console, "error"); - await main(); + await main({ + bucketName, + key: "movies.json", + filePath: "path/to/movies.json", + }); - expect(spy).toHaveBeenCalledWith("foo"); + expect(spy).toHaveBeenCalledWith( + `Error from S3 while uploading object to ${bucketName}. \ +The object was too large. To upload objects larger than 5GB, use the S3 console (160GB max) \ +or the multipart upload API (5TB max).`, + ); + }); + + it("should indicate a failure came from S3 when the error isn't generic", async () => { + const error = new S3ServiceException("Some S3 service exception."); + error.name = "ServiceException"; + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(error); + + const spy = vi.spyOn(console, "error"); + + await main({ + bucketName, + key: "movies.json", + filePath: "path/to/movies.json", + }); + + expect(spy).toHaveBeenCalledWith( + `Error from S3 while uploading object to ${bucketName}. ${error.name}: ${error.message}`, + ); + }); + + it("should throw errors that are not S3 specific", async () => { + const bucketName = "amzn-s3-demo-bucket"; + send.mockRejectedValueOnce(new Error()); + + await expect(() => + main({ bucketName, key: "movies.json", filePath: "path/to/movies.json" }), + ).rejects.toBeTruthy(); }); });