From bc5b01fd1da23bfa97ca724765072f5d1a4518ef Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Thu, 12 Mar 2026 18:46:17 +0000 Subject: [PATCH 01/11] chore: add original unmodified storage samples as baseline --- storage/README.md | 2472 +++++++++++++++++ storage/addBucketConditionalBinding.js | 100 + storage/addBucketDefaultOwnerAcl.js | 53 + storage/addBucketIamMember.js | 73 + storage/addBucketLabel.js | 62 + storage/addBucketOwnerAcl.js | 53 + storage/addBucketWebsiteConfiguration.js | 66 + storage/addFileOwnerAcl.js | 59 + storage/bucketMetadata.js | 46 + storage/changeDefaultStorageClass.js | 54 + storage/changeFileCSEKToCMEK.js | 86 + storage/composeFile.js | 79 + storage/configureBucketCors.js | 80 + storage/configureRetries.js | 90 + storage/copyFile.js | 82 + storage/copyOldVersionOfFile.js | 86 + storage/createBucketWithDualRegion.js | 79 + .../createBucketWithHierarchicalNamespace.js | 65 + storage/createBucketWithObjectRetention.js | 56 + ...createBucketWithStorageClassAndLocation.js | 75 + storage/createBucketWithTurboReplication.js | 69 + storage/createNewBucket.js | 52 + storage/createNotification.js | 54 + storage/deleteBucket.js | 46 + storage/deleteFile.js | 63 + storage/deleteNotification.js | 50 + storage/deleteOldVersionOfFile.js | 59 + storage/disableBucketLifecycleManagement.js | 46 + storage/disableBucketVersioning.js | 54 + storage/disableDefaultEventBasedHold.js | 48 + storage/disableRequesterPays.js | 50 + storage/disableSoftDelete.js | 47 + storage/disableUniformBucketLevelAccess.js | 52 + storage/downloadByteRange.js | 81 + storage/downloadEncryptedFile.js | 73 + storage/downloadFile.js | 65 + ...downloadFileInChunksWithTransferManager.js | 77 + storage/downloadFileUsingRequesterPays.js | 72 + storage/downloadFolderWithTransferManager.js | 60 + storage/downloadIntoMemory.js | 56 + .../downloadManyFilesWithTransferManager.js | 67 + storage/downloadPublicFile.js | 65 + storage/enableBucketLifecycleManagement.js | 55 + storage/enableBucketVersioning.js | 54 + storage/enableDefaultEventBasedHold.js | 50 + storage/enableDefaultKMSKey.js | 59 + storage/enableRequesterPays.js | 48 + storage/enableUniformBucketLevelAccess.js | 54 + storage/fileChangeStorageClass.js | 77 + storage/fileSetMetadata.js | 86 + storage/generateEncryptionKey.js | 43 + storage/generateSignedUrl.js | 60 + storage/generateV4ReadSignedUrl.js | 65 + storage/generateV4SignedPolicy.js | 72 + storage/generateV4UploadSignedUrl.js | 68 + storage/getAutoclass.js | 52 + storage/getDefaultEventBasedHold.js | 45 + storage/getMetadata.js | 91 + storage/getMetadataNotifications.js | 61 + storage/getPublicAccessPrevention.js | 50 + storage/getRPO.js | 52 + storage/getRequesterPaysStatus.js | 55 + storage/getRetentionPolicy.js | 55 + storage/getServiceAccount.js | 52 + storage/getSoftDeletePolicy.js | 56 + storage/getSoftDeletedBucket.js | 52 + storage/getUniformBucketLevelAccess.js | 60 + storage/hmacKeyActivate.js | 57 + storage/hmacKeyCreate.js | 59 + storage/hmacKeyDeactivate.js | 57 + storage/hmacKeyDelete.js | 55 + storage/hmacKeyGet.js | 58 + storage/hmacKeysList.js | 52 + storage/listBuckets.js | 38 + storage/listBucketsPartialSuccess.js | 57 + storage/listFiles.js | 50 + storage/listFilesByPrefix.js | 83 + storage/listFilesPaginate.js | 55 + storage/listFilesWithOldVersions.js | 50 + storage/listNotifications.js | 50 + storage/listSoftDeletedBucket.js | 42 + storage/listSoftDeletedObjectVersions.js | 54 + storage/listSoftDeletedObjects.js | 48 + storage/lockRetentionPolicy.js | 58 + storage/makeBucketPublic.js | 49 + storage/makePublic.js | 49 + storage/moveFile.js | 76 + storage/moveFileAtomic.js | 76 + storage/package.json | 30 + storage/printBucketAcl.js | 49 + storage/printBucketAclForUser.js | 56 + storage/printFileAcl.js | 52 + storage/printFileAclForUser.js | 65 + storage/quickstart.js | 47 + storage/releaseEventBasedHold.js | 64 + storage/releaseTemporaryHold.js | 64 + storage/removeBucketConditionalBinding.js | 94 + storage/removeBucketCors.js | 50 + storage/removeBucketDefaultOwner.js | 53 + storage/removeBucketIamMember.js | 87 + storage/removeBucketLabel.js | 54 + storage/removeBucketOwnerAcl.js | 54 + storage/removeDefaultKMSKey.js | 54 + storage/removeFileOwnerAcl.js | 63 + storage/removeRetentionPolicy.js | 54 + storage/renameFile.js | 59 + storage/resources/.gitignore | 1 + storage/resources/resourcesSub1/testSub1.txt | 2 + storage/resources/test.txt | 1 + storage/resources/test2.txt | 1 + storage/restoreSoftDeletedBucket.js | 48 + storage/restoreSoftDeletedObject.js | 58 + storage/rotateEncryptionKey.js | 82 + storage/scripts/cleanup | 44 + storage/setAutoclass.js | 58 + storage/setClientEndpoint.js | 48 + storage/setEventBasedHold.js | 65 + storage/setObjectRetentionPolicy.js | 96 + storage/setPublicAccessPreventionEnforced.js | 54 + storage/setPublicAccessPreventionInherited.js | 49 + storage/setRPOAsyncTurbo.js | 55 + storage/setRPODefault.js | 55 + storage/setRetentionPolicy.js | 52 + storage/setSoftDeletePolicy.js | 47 + storage/setTemporaryHold.js | 64 + storage/streamFileDownload.js | 71 + storage/streamFileUpload.js | 73 + storage/system-test/acl.test.js | 144 + storage/system-test/bucketLifecycle.test.js | 80 + storage/system-test/bucketLock.test.js | 145 + storage/system-test/buckets.test.js | 485 ++++ storage/system-test/encryption.test.js | 110 + storage/system-test/files.test.js | 683 +++++ storage/system-test/hmacKey.test.js | 116 + storage/system-test/iam.test.js | 103 + storage/system-test/notifications.test.js | 99 + storage/system-test/quickstart.test.js | 36 + storage/system-test/requesterPays.test.js | 109 + storage/system-test/storage.test.js | 30 + ...t_9d800329-00da-4cdd-9a3e-7ac6743d5813.txt | 0 storage/system-test/transfer-manager.test.js | 118 + storage/uploadDirectory.js | 91 + storage/uploadDirectoryWithTransferManager.js | 58 + storage/uploadEncryptedFile.js | 70 + storage/uploadFile.js | 61 + .../uploadFileInChunksWithTransferManager.js | 67 + storage/uploadFileWithKmsKey.js | 69 + storage/uploadFromMemory.js | 54 + storage/uploadManyFilesWithTransferManager.js | 67 + storage/uploadWithoutAuthentication.js | 76 + .../uploadWithoutAuthenticationSignedUrl.js | 84 + storage/viewBucketIamMembers.js | 61 + 152 files changed, 12856 insertions(+) create mode 100644 storage/README.md create mode 100644 storage/addBucketConditionalBinding.js create mode 100644 storage/addBucketDefaultOwnerAcl.js create mode 100644 storage/addBucketIamMember.js create mode 100644 storage/addBucketLabel.js create mode 100644 storage/addBucketOwnerAcl.js create mode 100644 storage/addBucketWebsiteConfiguration.js create mode 100644 storage/addFileOwnerAcl.js create mode 100644 storage/bucketMetadata.js create mode 100644 storage/changeDefaultStorageClass.js create mode 100644 storage/changeFileCSEKToCMEK.js create mode 100644 storage/composeFile.js create mode 100644 storage/configureBucketCors.js create mode 100644 storage/configureRetries.js create mode 100644 storage/copyFile.js create mode 100644 storage/copyOldVersionOfFile.js create mode 100644 storage/createBucketWithDualRegion.js create mode 100644 storage/createBucketWithHierarchicalNamespace.js create mode 100644 storage/createBucketWithObjectRetention.js create mode 100644 storage/createBucketWithStorageClassAndLocation.js create mode 100644 storage/createBucketWithTurboReplication.js create mode 100644 storage/createNewBucket.js create mode 100644 storage/createNotification.js create mode 100644 storage/deleteBucket.js create mode 100644 storage/deleteFile.js create mode 100644 storage/deleteNotification.js create mode 100644 storage/deleteOldVersionOfFile.js create mode 100644 storage/disableBucketLifecycleManagement.js create mode 100644 storage/disableBucketVersioning.js create mode 100644 storage/disableDefaultEventBasedHold.js create mode 100644 storage/disableRequesterPays.js create mode 100644 storage/disableSoftDelete.js create mode 100644 storage/disableUniformBucketLevelAccess.js create mode 100644 storage/downloadByteRange.js create mode 100644 storage/downloadEncryptedFile.js create mode 100644 storage/downloadFile.js create mode 100644 storage/downloadFileInChunksWithTransferManager.js create mode 100644 storage/downloadFileUsingRequesterPays.js create mode 100644 storage/downloadFolderWithTransferManager.js create mode 100644 storage/downloadIntoMemory.js create mode 100644 storage/downloadManyFilesWithTransferManager.js create mode 100644 storage/downloadPublicFile.js create mode 100644 storage/enableBucketLifecycleManagement.js create mode 100644 storage/enableBucketVersioning.js create mode 100644 storage/enableDefaultEventBasedHold.js create mode 100644 storage/enableDefaultKMSKey.js create mode 100644 storage/enableRequesterPays.js create mode 100644 storage/enableUniformBucketLevelAccess.js create mode 100644 storage/fileChangeStorageClass.js create mode 100644 storage/fileSetMetadata.js create mode 100644 storage/generateEncryptionKey.js create mode 100644 storage/generateSignedUrl.js create mode 100644 storage/generateV4ReadSignedUrl.js create mode 100644 storage/generateV4SignedPolicy.js create mode 100644 storage/generateV4UploadSignedUrl.js create mode 100644 storage/getAutoclass.js create mode 100644 storage/getDefaultEventBasedHold.js create mode 100644 storage/getMetadata.js create mode 100644 storage/getMetadataNotifications.js create mode 100644 storage/getPublicAccessPrevention.js create mode 100644 storage/getRPO.js create mode 100644 storage/getRequesterPaysStatus.js create mode 100644 storage/getRetentionPolicy.js create mode 100644 storage/getServiceAccount.js create mode 100644 storage/getSoftDeletePolicy.js create mode 100644 storage/getSoftDeletedBucket.js create mode 100644 storage/getUniformBucketLevelAccess.js create mode 100644 storage/hmacKeyActivate.js create mode 100644 storage/hmacKeyCreate.js create mode 100644 storage/hmacKeyDeactivate.js create mode 100644 storage/hmacKeyDelete.js create mode 100644 storage/hmacKeyGet.js create mode 100644 storage/hmacKeysList.js create mode 100644 storage/listBuckets.js create mode 100644 storage/listBucketsPartialSuccess.js create mode 100644 storage/listFiles.js create mode 100644 storage/listFilesByPrefix.js create mode 100644 storage/listFilesPaginate.js create mode 100644 storage/listFilesWithOldVersions.js create mode 100644 storage/listNotifications.js create mode 100644 storage/listSoftDeletedBucket.js create mode 100644 storage/listSoftDeletedObjectVersions.js create mode 100644 storage/listSoftDeletedObjects.js create mode 100644 storage/lockRetentionPolicy.js create mode 100644 storage/makeBucketPublic.js create mode 100644 storage/makePublic.js create mode 100644 storage/moveFile.js create mode 100644 storage/moveFileAtomic.js create mode 100644 storage/package.json create mode 100644 storage/printBucketAcl.js create mode 100644 storage/printBucketAclForUser.js create mode 100644 storage/printFileAcl.js create mode 100644 storage/printFileAclForUser.js create mode 100644 storage/quickstart.js create mode 100644 storage/releaseEventBasedHold.js create mode 100644 storage/releaseTemporaryHold.js create mode 100644 storage/removeBucketConditionalBinding.js create mode 100644 storage/removeBucketCors.js create mode 100644 storage/removeBucketDefaultOwner.js create mode 100644 storage/removeBucketIamMember.js create mode 100644 storage/removeBucketLabel.js create mode 100644 storage/removeBucketOwnerAcl.js create mode 100644 storage/removeDefaultKMSKey.js create mode 100644 storage/removeFileOwnerAcl.js create mode 100644 storage/removeRetentionPolicy.js create mode 100644 storage/renameFile.js create mode 100644 storage/resources/.gitignore create mode 100644 storage/resources/resourcesSub1/testSub1.txt create mode 100644 storage/resources/test.txt create mode 100644 storage/resources/test2.txt create mode 100644 storage/restoreSoftDeletedBucket.js create mode 100644 storage/restoreSoftDeletedObject.js create mode 100644 storage/rotateEncryptionKey.js create mode 100644 storage/scripts/cleanup create mode 100644 storage/setAutoclass.js create mode 100644 storage/setClientEndpoint.js create mode 100644 storage/setEventBasedHold.js create mode 100644 storage/setObjectRetentionPolicy.js create mode 100644 storage/setPublicAccessPreventionEnforced.js create mode 100644 storage/setPublicAccessPreventionInherited.js create mode 100644 storage/setRPOAsyncTurbo.js create mode 100644 storage/setRPODefault.js create mode 100644 storage/setRetentionPolicy.js create mode 100644 storage/setSoftDeletePolicy.js create mode 100644 storage/setTemporaryHold.js create mode 100644 storage/streamFileDownload.js create mode 100644 storage/streamFileUpload.js create mode 100644 storage/system-test/acl.test.js create mode 100644 storage/system-test/bucketLifecycle.test.js create mode 100644 storage/system-test/bucketLock.test.js create mode 100644 storage/system-test/buckets.test.js create mode 100644 storage/system-test/encryption.test.js create mode 100644 storage/system-test/files.test.js create mode 100644 storage/system-test/hmacKey.test.js create mode 100644 storage/system-test/iam.test.js create mode 100644 storage/system-test/notifications.test.js create mode 100644 storage/system-test/quickstart.test.js create mode 100644 storage/system-test/requesterPays.test.js create mode 100644 storage/system-test/storage.test.js create mode 100644 storage/system-test/test_9d800329-00da-4cdd-9a3e-7ac6743d5813.txt create mode 100644 storage/system-test/transfer-manager.test.js create mode 100644 storage/uploadDirectory.js create mode 100644 storage/uploadDirectoryWithTransferManager.js create mode 100644 storage/uploadEncryptedFile.js create mode 100644 storage/uploadFile.js create mode 100644 storage/uploadFileInChunksWithTransferManager.js create mode 100644 storage/uploadFileWithKmsKey.js create mode 100644 storage/uploadFromMemory.js create mode 100644 storage/uploadManyFilesWithTransferManager.js create mode 100644 storage/uploadWithoutAuthentication.js create mode 100644 storage/uploadWithoutAuthenticationSignedUrl.js create mode 100644 storage/viewBucketIamMembers.js diff --git a/storage/README.md b/storage/README.md new file mode 100644 index 0000000000..04f33426fa --- /dev/null +++ b/storage/README.md @@ -0,0 +1,2472 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Storage: Node.js Samples](https://github.com/googleapis/nodejs-storage) + +[![Open in Cloud Shell][shell_img]][shell_link] + +> Node.js idiomatic client for [Cloud Storage][product-docs]. + +[Cloud Storage](https://cloud.google.com/storage/docs) allows world-wide +storage and retrieval of any amount of data at any time. You can use Google +Cloud Storage for a range of scenarios including serving website content, +storing data for archival and disaster recovery, or distributing large data +objects to users via direct download. + +## Table of Contents + +* [Before you begin](#before-you-begin) +* [Samples](#samples) + * [Add Bucket Conditional Binding](#add-bucket-conditional-binding) + * [Add Bucket Default Owner Acl](#add-bucket-default-owner-acl) + * [Add Bucket Iam Member](#add-bucket-iam-member) + * [Storage Add Bucket Label.](#storage-add-bucket-label.) + * [Add Bucket Owner Acl](#add-bucket-owner-acl) + * [Bucket Website Configuration.](#bucket-website-configuration.) + * [Add File Owner Acl](#add-file-owner-acl) + * [Storage Get Bucket Metadata.](#storage-get-bucket-metadata.) + * [Change Bucket's Default Storage Class.](#change-bucket's-default-storage-class.) + * [Storage File Convert CSEK to CMEK.](#storage-file-convert-csek-to-cmek.) + * [Storage Combine files.](#storage-combine-files.) + * [Storage Configure Bucket Cors.](#storage-configure-bucket-cors.) + * [Configure Retries](#configure-retries) + * [Copy File](#copy-file) + * [Copy Old Version Of File.](#copy-old-version-of-file.) + * [Create a Dual-Region Bucket](#create-a-dual-region-bucket) + * [Create a hierarchical namespace enabled bucket](#create-a-hierarchical-namespace-enabled-bucket) + * [Create a Bucket with object retention enabled.](#create-a-bucket-with-object-retention-enabled.) + * [Create Bucket With Storage Class and Location.](#create-bucket-with-storage-class-and-location.) + * [Create Bucket With Turbo Replication](#create-bucket-with-turbo-replication) + * [Create New Bucket](#create-new-bucket) + * [Create Notification](#create-notification) + * [Delete Bucket](#delete-bucket) + * [Delete File](#delete-file) + * [Delete Notification](#delete-notification) + * [Delete Old Version Of File.](#delete-old-version-of-file.) + * [Disable Bucket Lifecycle Management](#disable-bucket-lifecycle-management) + * [Storage Disable Bucket Versioning.](#storage-disable-bucket-versioning.) + * [Disable Default Event Based Hold](#disable-default-event-based-hold) + * [Disable Requester Pays](#disable-requester-pays) + * [Disable Soft Delete](#disable-soft-delete) + * [Disable Uniform Bucket Level Access](#disable-uniform-bucket-level-access) + * [Download Byte Range](#download-byte-range) + * [Download Encrypted File](#download-encrypted-file) + * [Download File](#download-file) + * [Download a File in Chunks With Transfer Manager](#download-a-file-in-chunks-with-transfer-manager) + * [Download File Using Requester Pays](#download-file-using-requester-pays) + * [Download Folder With Transfer Manager](#download-folder-with-transfer-manager) + * [Download Into Memory](#download-into-memory) + * [Download Many Files With Transfer Manager](#download-many-files-with-transfer-manager) + * [Storage Download Public File.](#storage-download-public-file.) + * [Enable Bucket Lifecycle Management](#enable-bucket-lifecycle-management) + * [Storage Enable Bucket Versioning.](#storage-enable-bucket-versioning.) + * [Enable Default Event Based Hold](#enable-default-event-based-hold) + * [Enable Default KMS Key](#enable-default-kms-key) + * [Enable Requester Pays](#enable-requester-pays) + * [Enable Uniform Bucket Level Access](#enable-uniform-bucket-level-access) + * [Change File's Storage Class.](#change-file's-storage-class.) + * [Storage Set File Metadata.](#storage-set-file-metadata.) + * [Generate Encryption Key](#generate-encryption-key) + * [Generate Signed Url](#generate-signed-url) + * [Generate V4 Read Signed Url](#generate-v4-read-signed-url) + * [Generate V4 Signed Policy](#generate-v4-signed-policy) + * [Generate V4 Upload Signed Url](#generate-v4-upload-signed-url) + * [Get Autoclass](#get-autoclass) + * [Get Default Event Based Hold](#get-default-event-based-hold) + * [Get Metadata](#get-metadata) + * [Get Metadata Notifications](#get-metadata-notifications) + * [Get Public Access Prevention](#get-public-access-prevention) + * [Get RPO](#get-rpo) + * [Get Requester Pays Status](#get-requester-pays-status) + * [Get Retention Policy](#get-retention-policy) + * [Storage Get Service Account.](#storage-get-service-account.) + * [Get Soft Delete Policy](#get-soft-delete-policy) + * [Get Soft Deleted Bucket](#get-soft-deleted-bucket) + * [Get Uniform Bucket Level Access](#get-uniform-bucket-level-access) + * [Activate HMAC SA Key.](#activate-hmac-sa-key.) + * [Create HMAC SA Key.](#create-hmac-sa-key.) + * [Deactivate HMAC SA Key.](#deactivate-hmac-sa-key.) + * [Delete HMAC SA Key.](#delete-hmac-sa-key.) + * [Get HMAC SA Key Metadata.](#get-hmac-sa-key-metadata.) + * [List HMAC SA Keys Metadata.](#list-hmac-sa-keys-metadata.) + * [List Buckets](#list-buckets) + * [List Buckets Partial Success](#list-buckets-partial-success) + * [List Files](#list-files) + * [List Files By Prefix](#list-files-by-prefix) + * [List Files Paginate](#list-files-paginate) + * [List Files with Old Versions.](#list-files-with-old-versions.) + * [List Notifications](#list-notifications) + * [List Soft Deleted Bucket](#list-soft-deleted-bucket) + * [List Soft Deleted Object Versions](#list-soft-deleted-object-versions) + * [List Soft Deleted Objects](#list-soft-deleted-objects) + * [Lock Retention Policy](#lock-retention-policy) + * [Storage Make Bucket Public.](#storage-make-bucket-public.) + * [Make Public](#make-public) + * [Move File](#move-file) + * [Move File Atomic](#move-file-atomic) + * [Print Bucket Acl](#print-bucket-acl) + * [Print Bucket Acl For User](#print-bucket-acl-for-user) + * [Print File Acl](#print-file-acl) + * [Print File Acl For User](#print-file-acl-for-user) + * [Quickstart](#quickstart) + * [Release Event Based Hold](#release-event-based-hold) + * [Release Temporary Hold](#release-temporary-hold) + * [Remove Bucket Conditional Binding](#remove-bucket-conditional-binding) + * [Storage Remove Bucket Cors Configuration.](#storage-remove-bucket-cors-configuration.) + * [Remove Bucket Default Owner](#remove-bucket-default-owner) + * [Remove Bucket Iam Member](#remove-bucket-iam-member) + * [Storage Remove Bucket Label.](#storage-remove-bucket-label.) + * [Remove Bucket Owner Acl](#remove-bucket-owner-acl) + * [Remove Default KMS Key.](#remove-default-kms-key.) + * [Remove File Owner Acl](#remove-file-owner-acl) + * [Remove Retention Policy](#remove-retention-policy) + * [Rename File](#rename-file) + * [Restore Soft Deleted Bucket](#restore-soft-deleted-bucket) + * [Restore Soft Deleted Object](#restore-soft-deleted-object) + * [Rotate Encryption Key](#rotate-encryption-key) + * [Set Autoclass](#set-autoclass) + * [Set Client Endpoint](#set-client-endpoint) + * [Set Event Based Hold](#set-event-based-hold) + * [Set the object retention policy of a File.](#set-the-object-retention-policy-of-a-file.) + * [Set Public Access Prevention Enforced](#set-public-access-prevention-enforced) + * [Set Public Access Prevention Inherited](#set-public-access-prevention-inherited) + * [Set RPO Async Turbo](#set-rpo-async-turbo) + * [Set RPO Default](#set-rpo-default) + * [Set Retention Policy](#set-retention-policy) + * [Set Soft Delete Policy](#set-soft-delete-policy) + * [Set Temporary Hold](#set-temporary-hold) + * [Stream File Download](#stream-file-download) + * [Stream File Upload](#stream-file-upload) + * [Upload a directory to a bucket.](#upload-a-directory-to-a-bucket.) + * [Upload Directory With Transfer Manager](#upload-directory-with-transfer-manager) + * [Upload Encrypted File](#upload-encrypted-file) + * [Upload File](#upload-file) + * [Upload a File in Chunks With Transfer Manager](#upload-a-file-in-chunks-with-transfer-manager) + * [Upload File With Kms Key](#upload-file-with-kms-key) + * [Upload From Memory](#upload-from-memory) + * [Upload Many Files With Transfer Manager](#upload-many-files-with-transfer-manager) + * [Upload Without Authentication](#upload-without-authentication) + * [Upload Without Authentication Signed Url](#upload-without-authentication-signed-url) + * [View Bucket Iam Members](#view-bucket-iam-members) + +## Before you begin + +Before running the samples, make sure you've followed the steps outlined in +[Using the client library](https://github.com/googleapis/nodejs-storage#using-the-client-library). + +`cd samples` + +`npm install` + +`cd ..` + +## Samples + + + +### Add Bucket Conditional Binding + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketConditionalBinding.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketConditionalBinding.js,samples/README.md) + +__Usage:__ + + +`node samples/addBucketConditionalBinding.js` + + +----- + + + + +### Add Bucket Default Owner Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketDefaultOwnerAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketDefaultOwnerAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/addBucketDefaultOwnerAcl.js` + + +----- + + + + +### Add Bucket Iam Member + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketIamMember.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketIamMember.js,samples/README.md) + +__Usage:__ + + +`node samples/addBucketIamMember.js` + + +----- + + + + +### Storage Add Bucket Label. + +Adds bucket label. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketLabel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketLabel.js,samples/README.md) + +__Usage:__ + + +`node addBucketLabel.js ` + + +----- + + + + +### Add Bucket Owner Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketOwnerAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketOwnerAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/addBucketOwnerAcl.js` + + +----- + + + + +### Bucket Website Configuration. + +Bucket Website Configuration. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketWebsiteConfiguration.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketWebsiteConfiguration.js,samples/README.md) + +__Usage:__ + + +`node addBucketWebsiteConfiguration.js ` + + +----- + + + + +### Add File Owner Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addFileOwnerAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addFileOwnerAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/addFileOwnerAcl.js` + + +----- + + + + +### Storage Get Bucket Metadata. + +Get bucket metadata. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/bucketMetadata.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/bucketMetadata.js,samples/README.md) + +__Usage:__ + + +`node bucketMetadata.js ` + + +----- + + + + +### Change Bucket's Default Storage Class. + +Change Bucket's Default Storage Class. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeDefaultStorageClass.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeDefaultStorageClass.js,samples/README.md) + +__Usage:__ + + +`node changeDefaultStorageClass.js ` + + +----- + + + + +### Storage File Convert CSEK to CMEK. + +Storage File Convert CSEK to CMEK. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeFileCSEKToCMEK.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeFileCSEKToCMEK.js,samples/README.md) + +__Usage:__ + + +`node changeFileCSEKToCMEK.js ` + + +----- + + + + +### Storage Combine files. + +Combine multiple files into one new file. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/composeFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/composeFile.js,samples/README.md) + +__Usage:__ + + +`node composeFile.js ` + + +----- + + + + +### Storage Configure Bucket Cors. + +Configures bucket cors. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureBucketCors.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureBucketCors.js,samples/README.md) + +__Usage:__ + + +`node configureBucketCors.js ` + + +----- + + + + +### Configure Retries + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureRetries.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureRetries.js,samples/README.md) + +__Usage:__ + + +`node samples/configureRetries.js` + + +----- + + + + +### Copy File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyFile.js,samples/README.md) + +__Usage:__ + + +`node samples/copyFile.js` + + +----- + + + + +### Copy Old Version Of File. + +Copy Old Version Of File. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyOldVersionOfFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyOldVersionOfFile.js,samples/README.md) + +__Usage:__ + + +`node copyOldVersionOfFile.js ` + + +----- + + + + +### Create a Dual-Region Bucket + +Create a Dual-Region Bucket with provided location and regions. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithDualRegion.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithDualRegion.js,samples/README.md) + +__Usage:__ + + +`node createBucketWithDualRegion.js ` + + +----- + + + + +### Create a hierarchical namespace enabled bucket + +Create a hierarchical namespace enabled bucket. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithHierarchicalNamespace.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithHierarchicalNamespace.js,samples/README.md) + +__Usage:__ + + +`node createBucketWithHierarchicalNamespace.js ` + + +----- + + + + +### Create a Bucket with object retention enabled. + +Create a Bucket with object retention enabled. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithObjectRetention.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithObjectRetention.js,samples/README.md) + +__Usage:__ + + +`node createBucketWithObjectRetention.js ` + + +----- + + + + +### Create Bucket With Storage Class and Location. + +Create Bucket With Storage Class and Location. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithStorageClassAndLocation.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithStorageClassAndLocation.js,samples/README.md) + +__Usage:__ + + +`node createBucketWithStorageClassAndLocation.js ` + + +----- + + + + +### Create Bucket With Turbo Replication + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithTurboReplication.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithTurboReplication.js,samples/README.md) + +__Usage:__ + + +`node samples/createBucketWithTurboReplication.js` + + +----- + + + + +### Create New Bucket + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNewBucket.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNewBucket.js,samples/README.md) + +__Usage:__ + + +`node samples/createNewBucket.js` + + +----- + + + + +### Create Notification + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNotification.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNotification.js,samples/README.md) + +__Usage:__ + + +`node samples/createNotification.js` + + +----- + + + + +### Delete Bucket + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteBucket.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteBucket.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteBucket.js` + + +----- + + + + +### Delete File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteFile.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteFile.js` + + +----- + + + + +### Delete Notification + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteNotification.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteNotification.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteNotification.js` + + +----- + + + + +### Delete Old Version Of File. + +Delete Old Version Of File. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteOldVersionOfFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteOldVersionOfFile.js,samples/README.md) + +__Usage:__ + + +`node deleteOldVersionOfFile.js ` + + +----- + + + + +### Disable Bucket Lifecycle Management + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketLifecycleManagement.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketLifecycleManagement.js,samples/README.md) + +__Usage:__ + + +`node samples/disableBucketLifecycleManagement.js` + + +----- + + + + +### Storage Disable Bucket Versioning. + +Disables bucket versioning. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketVersioning.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketVersioning.js,samples/README.md) + +__Usage:__ + + +`node disableBucketVersioning.js ` + + +----- + + + + +### Disable Default Event Based Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableDefaultEventBasedHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableDefaultEventBasedHold.js,samples/README.md) + +__Usage:__ + + +`node samples/disableDefaultEventBasedHold.js` + + +----- + + + + +### Disable Requester Pays + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableRequesterPays.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableRequesterPays.js,samples/README.md) + +__Usage:__ + + +`node samples/disableRequesterPays.js` + + +----- + + + + +### Disable Soft Delete + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableSoftDelete.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableSoftDelete.js,samples/README.md) + +__Usage:__ + + +`node samples/disableSoftDelete.js` + + +----- + + + + +### Disable Uniform Bucket Level Access + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableUniformBucketLevelAccess.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableUniformBucketLevelAccess.js,samples/README.md) + +__Usage:__ + + +`node samples/disableUniformBucketLevelAccess.js` + + +----- + + + + +### Download Byte Range + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadByteRange.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadByteRange.js,samples/README.md) + +__Usage:__ + + +`node samples/downloadByteRange.js` + + +----- + + + + +### Download Encrypted File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadEncryptedFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadEncryptedFile.js,samples/README.md) + +__Usage:__ + + +`node samples/downloadEncryptedFile.js` + + +----- + + + + +### Download File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFile.js,samples/README.md) + +__Usage:__ + + +`node samples/downloadFile.js` + + +----- + + + + +### Download a File in Chunks With Transfer Manager + +Downloads a single file in in chunks in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileInChunksWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileInChunksWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node downloadFileInChunksWithTransferManager.js ` + + +----- + + + + +### Download File Using Requester Pays + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileUsingRequesterPays.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileUsingRequesterPays.js,samples/README.md) + +__Usage:__ + + +`node samples/downloadFileUsingRequesterPays.js` + + +----- + + + + +### Download Folder With Transfer Manager + +Downloads a folder in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFolderWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFolderWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node downloadFolderWithTransferManager.js ` + + +----- + + + + +### Download Into Memory + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadIntoMemory.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadIntoMemory.js,samples/README.md) + +__Usage:__ + + +`node samples/downloadIntoMemory.js` + + +----- + + + + +### Download Many Files With Transfer Manager + +Downloads many files in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadManyFilesWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadManyFilesWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node downloadManyFilesWithTransferManager.js ` + + +----- + + + + +### Storage Download Public File. + +Download Public File. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadPublicFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadPublicFile.js,samples/README.md) + +__Usage:__ + + +`node downloadPublicFile.js ` + + +----- + + + + +### Enable Bucket Lifecycle Management + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketLifecycleManagement.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketLifecycleManagement.js,samples/README.md) + +__Usage:__ + + +`node samples/enableBucketLifecycleManagement.js` + + +----- + + + + +### Storage Enable Bucket Versioning. + +Enables bucket versioning. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketVersioning.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketVersioning.js,samples/README.md) + +__Usage:__ + + +`node enableBucketVersioning.js ` + + +----- + + + + +### Enable Default Event Based Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultEventBasedHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultEventBasedHold.js,samples/README.md) + +__Usage:__ + + +`node samples/enableDefaultEventBasedHold.js` + + +----- + + + + +### Enable Default KMS Key + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultKMSKey.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultKMSKey.js,samples/README.md) + +__Usage:__ + + +`node samples/enableDefaultKMSKey.js` + + +----- + + + + +### Enable Requester Pays + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableRequesterPays.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableRequesterPays.js,samples/README.md) + +__Usage:__ + + +`node samples/enableRequesterPays.js` + + +----- + + + + +### Enable Uniform Bucket Level Access + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableUniformBucketLevelAccess.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableUniformBucketLevelAccess.js,samples/README.md) + +__Usage:__ + + +`node samples/enableUniformBucketLevelAccess.js` + + +----- + + + + +### Change File's Storage Class. + +Change File's Storage Class. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileChangeStorageClass.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileChangeStorageClass.js,samples/README.md) + +__Usage:__ + + +`node fileChangeStorageClass.js ` + + +----- + + + + +### Storage Set File Metadata. + +Set file metadata. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileSetMetadata.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileSetMetadata.js,samples/README.md) + +__Usage:__ + + +`node fileSetMetadata.js ` + + +----- + + + + +### Generate Encryption Key + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateEncryptionKey.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateEncryptionKey.js,samples/README.md) + +__Usage:__ + + +`node samples/generateEncryptionKey.js` + + +----- + + + + +### Generate Signed Url + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateSignedUrl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateSignedUrl.js,samples/README.md) + +__Usage:__ + + +`node samples/generateSignedUrl.js` + + +----- + + + + +### Generate V4 Read Signed Url + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4ReadSignedUrl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4ReadSignedUrl.js,samples/README.md) + +__Usage:__ + + +`node samples/generateV4ReadSignedUrl.js` + + +----- + + + + +### Generate V4 Signed Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4SignedPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4SignedPolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/generateV4SignedPolicy.js` + + +----- + + + + +### Generate V4 Upload Signed Url + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4UploadSignedUrl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4UploadSignedUrl.js,samples/README.md) + +__Usage:__ + + +`node samples/generateV4UploadSignedUrl.js` + + +----- + + + + +### Get Autoclass + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getAutoclass.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getAutoclass.js,samples/README.md) + +__Usage:__ + + +`node samples/getAutoclass.js` + + +----- + + + + +### Get Default Event Based Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getDefaultEventBasedHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getDefaultEventBasedHold.js,samples/README.md) + +__Usage:__ + + +`node samples/getDefaultEventBasedHold.js` + + +----- + + + + +### Get Metadata + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadata.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadata.js,samples/README.md) + +__Usage:__ + + +`node samples/getMetadata.js` + + +----- + + + + +### Get Metadata Notifications + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadataNotifications.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadataNotifications.js,samples/README.md) + +__Usage:__ + + +`node samples/getMetadataNotifications.js` + + +----- + + + + +### Get Public Access Prevention + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getPublicAccessPrevention.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getPublicAccessPrevention.js,samples/README.md) + +__Usage:__ + + +`node samples/getPublicAccessPrevention.js` + + +----- + + + + +### Get RPO + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRPO.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRPO.js,samples/README.md) + +__Usage:__ + + +`node samples/getRPO.js` + + +----- + + + + +### Get Requester Pays Status + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRequesterPaysStatus.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRequesterPaysStatus.js,samples/README.md) + +__Usage:__ + + +`node samples/getRequesterPaysStatus.js` + + +----- + + + + +### Get Retention Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRetentionPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRetentionPolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/getRetentionPolicy.js` + + +----- + + + + +### Storage Get Service Account. + +Get Service Account. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getServiceAccount.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getServiceAccount.js,samples/README.md) + +__Usage:__ + + +`node getServiceAccount.js ` + + +----- + + + + +### Get Soft Delete Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getSoftDeletePolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getSoftDeletePolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/getSoftDeletePolicy.js` + + +----- + + + + +### Get Soft Deleted Bucket + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getSoftDeletedBucket.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getSoftDeletedBucket.js,samples/README.md) + +__Usage:__ + + +`node samples/getSoftDeletedBucket.js` + + +----- + + + + +### Get Uniform Bucket Level Access + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getUniformBucketLevelAccess.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getUniformBucketLevelAccess.js,samples/README.md) + +__Usage:__ + + +`node samples/getUniformBucketLevelAccess.js` + + +----- + + + + +### Activate HMAC SA Key. + +Activate HMAC SA Key. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyActivate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyActivate.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyActivate.js [projectId]` + + +----- + + + + +### Create HMAC SA Key. + +Create HMAC SA Key. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyCreate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyCreate.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyCreate.js [projectId]` + + +----- + + + + +### Deactivate HMAC SA Key. + +Deactivate HMAC SA Key. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDeactivate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDeactivate.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyDeactivate.js [projectId]` + + +----- + + + + +### Delete HMAC SA Key. + +Delete HMAC SA Key. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDelete.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDelete.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyDelete.js [projectId]` + + +----- + + + + +### Get HMAC SA Key Metadata. + +Get HMAC SA Key Metadata. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyGet.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyGet.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyGet.js [projectId]` + + +----- + + + + +### List HMAC SA Keys Metadata. + +List HMAC SA Keys Metadata. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeysList.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeysList.js,samples/README.md) + +__Usage:__ + + +`node hmacKeyList.js [projectId]` + + +----- + + + + +### List Buckets + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listBuckets.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listBuckets.js,samples/README.md) + +__Usage:__ + + +`node samples/listBuckets.js` + + +----- + + + + +### List Buckets Partial Success + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listBucketsPartialSuccess.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listBucketsPartialSuccess.js,samples/README.md) + +__Usage:__ + + +`node samples/listBucketsPartialSuccess.js` + + +----- + + + + +### List Files + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFiles.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFiles.js,samples/README.md) + +__Usage:__ + + +`node samples/listFiles.js` + + +----- + + + + +### List Files By Prefix + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesByPrefix.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesByPrefix.js,samples/README.md) + +__Usage:__ + + +`node samples/listFilesByPrefix.js` + + +----- + + + + +### List Files Paginate + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesPaginate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesPaginate.js,samples/README.md) + +__Usage:__ + + +`node samples/listFilesPaginate.js` + + +----- + + + + +### List Files with Old Versions. + +List Files with Old Versions. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesWithOldVersions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesWithOldVersions.js,samples/README.md) + +__Usage:__ + + +`node listFilesWithOldVersions.js ` + + +----- + + + + +### List Notifications + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listNotifications.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listNotifications.js,samples/README.md) + +__Usage:__ + + +`node samples/listNotifications.js` + + +----- + + + + +### List Soft Deleted Bucket + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listSoftDeletedBucket.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listSoftDeletedBucket.js,samples/README.md) + +__Usage:__ + + +`node samples/listSoftDeletedBucket.js` + + +----- + + + + +### List Soft Deleted Object Versions + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listSoftDeletedObjectVersions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listSoftDeletedObjectVersions.js,samples/README.md) + +__Usage:__ + + +`node samples/listSoftDeletedObjectVersions.js` + + +----- + + + + +### List Soft Deleted Objects + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listSoftDeletedObjects.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listSoftDeletedObjects.js,samples/README.md) + +__Usage:__ + + +`node samples/listSoftDeletedObjects.js` + + +----- + + + + +### Lock Retention Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/lockRetentionPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/lockRetentionPolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/lockRetentionPolicy.js` + + +----- + + + + +### Storage Make Bucket Public. + +Storage Make Bucket Public. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makeBucketPublic.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makeBucketPublic.js,samples/README.md) + +__Usage:__ + + +`node makeBucketPublic.js ` + + +----- + + + + +### Make Public + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makePublic.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makePublic.js,samples/README.md) + +__Usage:__ + + +`node samples/makePublic.js` + + +----- + + + + +### Move File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/moveFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/moveFile.js,samples/README.md) + +__Usage:__ + + +`node samples/moveFile.js` + + +----- + + + + +### Move File Atomic + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/moveFileAtomic.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/moveFileAtomic.js,samples/README.md) + +__Usage:__ + + +`node samples/moveFileAtomic.js` + + +----- + + + + +### Print Bucket Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/printBucketAcl.js` + + +----- + + + + +### Print Bucket Acl For User + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAclForUser.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAclForUser.js,samples/README.md) + +__Usage:__ + + +`node samples/printBucketAclForUser.js` + + +----- + + + + +### Print File Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/printFileAcl.js` + + +----- + + + + +### Print File Acl For User + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAclForUser.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAclForUser.js,samples/README.md) + +__Usage:__ + + +`node samples/printFileAclForUser.js` + + +----- + + + + +### Quickstart + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/quickstart.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) + +__Usage:__ + + +`node samples/quickstart.js` + + +----- + + + + +### Release Event Based Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseEventBasedHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseEventBasedHold.js,samples/README.md) + +__Usage:__ + + +`node samples/releaseEventBasedHold.js` + + +----- + + + + +### Release Temporary Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseTemporaryHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseTemporaryHold.js,samples/README.md) + +__Usage:__ + + +`node samples/releaseTemporaryHold.js` + + +----- + + + + +### Remove Bucket Conditional Binding + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketConditionalBinding.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketConditionalBinding.js,samples/README.md) + +__Usage:__ + + +`node samples/removeBucketConditionalBinding.js` + + +----- + + + + +### Storage Remove Bucket Cors Configuration. + +Removes bucket cors configuration. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketCors.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketCors.js,samples/README.md) + +__Usage:__ + + +`node removeBucketCors.js ` + + +----- + + + + +### Remove Bucket Default Owner + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketDefaultOwner.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketDefaultOwner.js,samples/README.md) + +__Usage:__ + + +`node samples/removeBucketDefaultOwner.js` + + +----- + + + + +### Remove Bucket Iam Member + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketIamMember.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketIamMember.js,samples/README.md) + +__Usage:__ + + +`node samples/removeBucketIamMember.js` + + +----- + + + + +### Storage Remove Bucket Label. + +Removes bucket label. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketLabel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketLabel.js,samples/README.md) + +__Usage:__ + + +`node removeBucketLabel.js labelone)` + + +----- + + + + +### Remove Bucket Owner Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketOwnerAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketOwnerAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/removeBucketOwnerAcl.js` + + +----- + + + + +### Remove Default KMS Key. + +Remove Default KMS Key. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeDefaultKMSKey.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeDefaultKMSKey.js,samples/README.md) + +__Usage:__ + + +`node removeDefaultKMSKey.js ` + + +----- + + + + +### Remove File Owner Acl + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeFileOwnerAcl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeFileOwnerAcl.js,samples/README.md) + +__Usage:__ + + +`node samples/removeFileOwnerAcl.js` + + +----- + + + + +### Remove Retention Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeRetentionPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeRetentionPolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/removeRetentionPolicy.js` + + +----- + + + + +### Rename File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/renameFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/renameFile.js,samples/README.md) + +__Usage:__ + + +`node samples/renameFile.js` + + +----- + + + + +### Restore Soft Deleted Bucket + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/restoreSoftDeletedBucket.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/restoreSoftDeletedBucket.js,samples/README.md) + +__Usage:__ + + +`node samples/restoreSoftDeletedBucket.js` + + +----- + + + + +### Restore Soft Deleted Object + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/restoreSoftDeletedObject.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/restoreSoftDeletedObject.js,samples/README.md) + +__Usage:__ + + +`node samples/restoreSoftDeletedObject.js` + + +----- + + + + +### Rotate Encryption Key + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/rotateEncryptionKey.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/rotateEncryptionKey.js,samples/README.md) + +__Usage:__ + + +`node samples/rotateEncryptionKey.js` + + +----- + + + + +### Set Autoclass + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setAutoclass.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setAutoclass.js,samples/README.md) + +__Usage:__ + + +`node samples/setAutoclass.js` + + +----- + + + + +### Set Client Endpoint + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setClientEndpoint.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setClientEndpoint.js,samples/README.md) + +__Usage:__ + + +`node samples/setClientEndpoint.js` + + +----- + + + + +### Set Event Based Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setEventBasedHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setEventBasedHold.js,samples/README.md) + +__Usage:__ + + +`node samples/setEventBasedHold.js` + + +----- + + + + +### Set the object retention policy of a File. + +Set the object retention policy of a File. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setObjectRetentionPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setObjectRetentionPolicy.js,samples/README.md) + +__Usage:__ + + +`node setObjectRetentionPolicy.js ` + + +----- + + + + +### Set Public Access Prevention Enforced + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionEnforced.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionEnforced.js,samples/README.md) + +__Usage:__ + + +`node samples/setPublicAccessPreventionEnforced.js` + + +----- + + + + +### Set Public Access Prevention Inherited + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionInherited.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionInherited.js,samples/README.md) + +__Usage:__ + + +`node samples/setPublicAccessPreventionInherited.js` + + +----- + + + + +### Set RPO Async Turbo + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPOAsyncTurbo.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPOAsyncTurbo.js,samples/README.md) + +__Usage:__ + + +`node samples/setRPOAsyncTurbo.js` + + +----- + + + + +### Set RPO Default + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPODefault.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPODefault.js,samples/README.md) + +__Usage:__ + + +`node samples/setRPODefault.js` + + +----- + + + + +### Set Retention Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRetentionPolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRetentionPolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/setRetentionPolicy.js` + + +----- + + + + +### Set Soft Delete Policy + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setSoftDeletePolicy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setSoftDeletePolicy.js,samples/README.md) + +__Usage:__ + + +`node samples/setSoftDeletePolicy.js` + + +----- + + + + +### Set Temporary Hold + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setTemporaryHold.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setTemporaryHold.js,samples/README.md) + +__Usage:__ + + +`node samples/setTemporaryHold.js` + + +----- + + + + +### Stream File Download + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileDownload.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileDownload.js,samples/README.md) + +__Usage:__ + + +`node samples/streamFileDownload.js` + + +----- + + + + +### Stream File Upload + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileUpload.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileUpload.js,samples/README.md) + +__Usage:__ + + +`node samples/streamFileUpload.js` + + +----- + + + + +### Upload a directory to a bucket. + +Uploads full hierarchy of a local directory to a bucket. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectory.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectory.js,samples/README.md) + +__Usage:__ + + +`node files.js upload-directory ` + + +----- + + + + +### Upload Directory With Transfer Manager + +Uploads a directory in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectoryWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectoryWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node uploadFolderWithTransferManager.js ` + + +----- + + + + +### Upload Encrypted File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadEncryptedFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadEncryptedFile.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadEncryptedFile.js` + + +----- + + + + +### Upload File + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFile.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadFile.js` + + +----- + + + + +### Upload a File in Chunks With Transfer Manager + +Uploads a single file in in chunks in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFileInChunksWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFileInChunksWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node uploadFileInChunksWithTransferManager.js ` + + +----- + + + + +### Upload File With Kms Key + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFileWithKmsKey.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFileWithKmsKey.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadFileWithKmsKey.js` + + +----- + + + + +### Upload From Memory + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFromMemory.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFromMemory.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadFromMemory.js` + + +----- + + + + +### Upload Many Files With Transfer Manager + +Uploads many files in parallel utilizing transfer manager. + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadManyFilesWithTransferManager.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadManyFilesWithTransferManager.js,samples/README.md) + +__Usage:__ + + +`node uploadManyFilesWithTransferManager.js ` + + +----- + + + + +### Upload Without Authentication + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthentication.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthentication.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadWithoutAuthentication.js` + + +----- + + + + +### Upload Without Authentication Signed Url + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthenticationSignedUrl.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthenticationSignedUrl.js,samples/README.md) + +__Usage:__ + + +`node samples/uploadWithoutAuthenticationSignedUrl.js` + + +----- + + + + +### View Bucket Iam Members + +View the [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/viewBucketIamMembers.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/viewBucketIamMembers.js,samples/README.md) + +__Usage:__ + + +`node samples/viewBucketIamMembers.js` + + + + + + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/README.md +[product-docs]: https://cloud.google.com/storage diff --git a/storage/addBucketConditionalBinding.js b/storage/addBucketConditionalBinding.js new file mode 100644 index 0000000000..f5f6754a2b --- /dev/null +++ b/storage/addBucketConditionalBinding.js @@ -0,0 +1,100 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + roleName = 'roles/storage.objectViewer', + title = 'match-prefix', + description = 'Applies to objects matching a prefix', + expression = 'resource.name.startsWith("projects/_/buckets/bucket-name/objects/prefix-a-")', + members = 'user:test@example.com' +) { + members = members.split(','); + // [START storage_add_bucket_conditional_iam_binding] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The role to grant + // const roleName = 'roles/storage.objectViewer'; + + // The members to grant the new role to + // const members = [ + // 'user:jdoe@example.com', + // 'group:admins@example.com', + // ]; + + // Create a condition + // const title = 'Title'; + // const description = 'Description'; + // const expression = 'resource.name.startsWith(\"projects/_/buckets/bucket-name/objects/prefix-a-\")'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addBucketConditionalBinding() { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // Gets and updates the bucket's IAM policy + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Set the policy's version to 3 to use condition in bindings. + policy.version = 3; + + // Adds the new roles to the bucket's IAM policy + policy.bindings.push({ + role: roleName, + members: members, + condition: { + title: title, + description: description, + expression: expression, + }, + }); + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + + console.log( + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); + + members.forEach(member => { + console.log(` ${member}`); + }); + + console.log('with condition:'); + console.log(` Title: ${title}`); + console.log(` Description: ${description}`); + console.log(` Expression: ${expression}`); + } + + addBucketConditionalBinding().catch(console.error); + // [END storage_add_bucket_conditional_iam_binding] +} +main(...process.argv.slice(2)); diff --git a/storage/addBucketDefaultOwnerAcl.js b/storage/addBucketDefaultOwnerAcl.js new file mode 100644 index 0000000000..7e598e6c00 --- /dev/null +++ b/storage/addBucketDefaultOwnerAcl.js @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { + // [START storage_add_bucket_default_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The email address of the user to add + // const userEmail = 'user-email-to-add'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addBucketDefaultOwner() { + // Makes the user an owner in the default ACL of the bucket. You can use + // addAllUsers(), addDomain(), addProject(), addGroup(), and + // addAllAuthenticatedUsers() to grant access to different types of entities. + // You can also use "readers" and "writers" to grant different roles. + await storage.bucket(bucketName).acl.default.owners.addUser(userEmail); + + console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`); + } + + addBucketDefaultOwner().catch(console.error); + // [END storage_add_bucket_default_owner] +} +main(...process.argv.slice(2)); diff --git a/storage/addBucketIamMember.js b/storage/addBucketIamMember.js new file mode 100644 index 0000000000..9ab6595de9 --- /dev/null +++ b/storage/addBucketIamMember.js @@ -0,0 +1,73 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + roleName = 'roles/storage.objectViewer', + members = 'user:test@example.com' +) { + //including this logic so as to not use yargs + members = members.split(','); + // [START storage_add_bucket_iam_member] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The role to grant + // const roleName = 'roles/storage.objectViewer'; + + // The members to grant the new role to + // const members = [ + // 'user:jdoe@example.com', + // 'group:admins@example.com', + // ]; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addBucketIamMember() { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Adds the new roles to the bucket's IAM policy + policy.bindings.push({ + role: roleName, + members: members, + }); + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + + console.log( + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); + + members.forEach(member => { + console.log(` ${member}`); + }); + } + + addBucketIamMember().catch(console.error); + // [END storage_add_bucket_iam_member] +} +main(...process.argv.slice(2)); diff --git a/storage/addBucketLabel.js b/storage/addBucketLabel.js new file mode 100644 index 0000000000..bb814a831b --- /dev/null +++ b/storage/addBucketLabel.js @@ -0,0 +1,62 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Add Bucket Label. +// description: Adds bucket label. +// usage: node addBucketLabel.js + +function main( + bucketName = 'my-bucket', + labelKey = 'labelone', + labelValue = 'labelonevalue' +) { + // [START storage_add_bucket_label] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The key of the label to add + // const labelKey = 'label-key-to-add'; + + // The value of the label to add + // const labelValue = 'label-value-to-add'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + const labels = { + [labelKey]: labelValue, + }; + + async function addBucketLabel() { + await storage.bucket(bucketName).setMetadata({labels}); + console.log(`Added label to bucket ${bucketName}`); + } + + addBucketLabel().catch(console.error); + // [END storage_add_bucket_label] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/addBucketOwnerAcl.js b/storage/addBucketOwnerAcl.js new file mode 100644 index 0000000000..1600756d9d --- /dev/null +++ b/storage/addBucketOwnerAcl.js @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { + // [START storage_add_bucket_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The email address of the user to add + // const userEmail = 'user-email-to-add'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addBucketOwner() { + // Makes the user an owner of the bucket. You can use addAllUsers(), + // addDomain(), addProject(), addGroup(), and addAllAuthenticatedUsers() + // to grant access to different types of entities. You can also use "readers" + // and "writers" to grant different roles. + await storage.bucket(bucketName).acl.owners.addUser(userEmail); + + console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`); + } + + addBucketOwner().catch(console.error); + // [END storage_add_bucket_owner] +} +main(...process.argv.slice(2)); diff --git a/storage/addBucketWebsiteConfiguration.js b/storage/addBucketWebsiteConfiguration.js new file mode 100644 index 0000000000..75d07a5ea7 --- /dev/null +++ b/storage/addBucketWebsiteConfiguration.js @@ -0,0 +1,66 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Bucket Website Configuration. +// description: Bucket Website Configuration. +// usage: node addBucketWebsiteConfiguration.js + +function main( + bucketName = 'my-bucket', + mainPageSuffix = 'http://example.com', + notFoundPage = 'http://example.com/404.html' +) { + // [START storage_define_bucket_website_configuration] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of the main page + // const mainPageSuffix = 'http://example.com'; + + // The Name of a 404 page + // const notFoundPage = 'http://example.com/404.html'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addBucketWebsiteConfiguration() { + await storage.bucket(bucketName).setMetadata({ + website: { + mainPageSuffix, + notFoundPage, + }, + }); + + console.log( + `Static website bucket ${bucketName} is set up to use ${mainPageSuffix} as the index page and ${notFoundPage} as the 404 page` + ); + } + + addBucketWebsiteConfiguration().catch(console.error); + // [END storage_define_bucket_website_configuration] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/addFileOwnerAcl.js b/storage/addFileOwnerAcl.js new file mode 100644 index 0000000000..4029f56923 --- /dev/null +++ b/storage/addFileOwnerAcl.js @@ -0,0 +1,59 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + userEmail = 'jdobry@google.com' +) { + // [START storage_add_file_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of the file to access + // const fileName = 'file.txt'; + + // The email address of the user to add + // const userEmail = 'user-email-to-add'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function addFileOwner() { + await storage + .bucket(bucketName) + .file(fileName) + .acl.owners.addUser(userEmail); + + console.log(`Added user ${userEmail} as an owner on file ${fileName}.`); + } + + addFileOwner().catch(console.error); + // [END storage_add_file_owner] +} +main(...process.argv.slice(2)); diff --git a/storage/bucketMetadata.js b/storage/bucketMetadata.js new file mode 100644 index 0000000000..eab6c80c59 --- /dev/null +++ b/storage/bucketMetadata.js @@ -0,0 +1,46 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Get Bucket Metadata. +// description: Get bucket metadata. +// usage: node bucketMetadata.js + +function main(bucketName = 'my-bucket') { + // [START storage_get_bucket_metadata] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getBucketMetadata() { + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Get Bucket Metadata + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + console.log(JSON.stringify(metadata, null, 2)); + } + // [END storage_get_bucket_metadata] + getBucketMetadata().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/changeDefaultStorageClass.js b/storage/changeDefaultStorageClass.js new file mode 100644 index 0000000000..c32346c73a --- /dev/null +++ b/storage/changeDefaultStorageClass.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Change Bucket's Default Storage Class. +// description: Change Bucket's Default Storage Class. +// usage: node changeDefaultStorageClass.js + +function main(bucketName = 'my-bucket', storageClass = 'standard') { + // [START storage_change_default_storage_class] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of a storage class + // See the StorageClass documentation for other valid storage classes: + // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/storage/StorageClass.html + // const storageClass = 'coldline'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function changeDefaultStorageClass() { + await storage.bucket(bucketName).setStorageClass(storageClass); + + console.log(`${bucketName} has been set to ${storageClass}`); + } + + changeDefaultStorageClass().catch(console.error); + // [END storage_change_default_storage_class] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/changeFileCSEKToCMEK.js b/storage/changeFileCSEKToCMEK.js new file mode 100644 index 0000000000..11c886ae5f --- /dev/null +++ b/storage/changeFileCSEKToCMEK.js @@ -0,0 +1,86 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage File Convert CSEK to CMEK. +// description: Storage File Convert CSEK to CMEK. +// usage: node changeFileCSEKToCMEK.js + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + encryptionKey = 'my-encription-key', + kmsKeyName = 'my-kms-key', + generationMatchPrecondition = 0 +) { + // [START storage_object_csek_to_cmek] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The Base64 encoded decryption key, which should be the same key originally + // used to encrypt the file + // const encryptionKey = 'TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g='; + + // The name of the KMS key to manage this file with + // const kmsKeyName = 'projects/your-project-id/locations/global/keyRings/your-key-ring/cryptoKeys/your-key'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function changeFileCSEKToCMEK() { + const rotateEncryptionKeyOptions = { + kmsKeyName, + // Optional: set a generation-match precondition to avoid potential race + // conditions and data corruptions. The request to copy is aborted if the + // object's generation number does not match your precondition. + preconditionOpts: { + ifGenerationMatch: generationMatchPrecondition, + }, + }; + + console.log(rotateEncryptionKeyOptions); + + await storage + .bucket(bucketName) + .file(fileName, { + encryptionKey: Buffer.from(encryptionKey, 'base64'), + }) + .rotateEncryptionKey({ + rotateEncryptionKeyOptions, + }); + + console.log( + `file ${fileName} in bucket ${bucketName} is now managed by KMS key ${kmsKeyName} instead of customer-supplied encryption key` + ); + } + + changeFileCSEKToCMEK().catch(console.error); + // [END storage_object_csek_to_cmek] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/composeFile.js b/storage/composeFile.js new file mode 100644 index 0000000000..183385e951 --- /dev/null +++ b/storage/composeFile.js @@ -0,0 +1,79 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Combine files. +// description: Combine multiple files into one new file. +// usage: node composeFile.js + +function main( + bucketName = 'my-bucket', + firstFileName = 'file-one.txt', + secondFileName = 'file-two.txt', + destinationFileName = 'file-one-two.txt', + destinationGenerationMatchPrecondition = 0 +) { + // [START storage_compose_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the first GCS file to compose + // const firstFileName = 'your-first-file-name'; + + // The ID of the second GCS file to compose + // const secondFileName = 'your-second-file-name'; + + // The ID to give the new composite file + // const destinationFileName = 'new-composite-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function composeFile() { + const bucket = storage.bucket(bucketName); + const sources = [firstFileName, secondFileName]; + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to compose is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const combineOptions = { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }; + await bucket.combine(sources, destinationFileName, combineOptions); + + console.log( + `New composite file ${destinationFileName} was created by combining ${firstFileName} and ${secondFileName}` + ); + } + + composeFile().catch(console.error); + // [END storage_compose_file] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/configureBucketCors.js b/storage/configureBucketCors.js new file mode 100644 index 0000000000..cd2893d00a --- /dev/null +++ b/storage/configureBucketCors.js @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Configure Bucket Cors. +// description: Configures bucket cors. +// usage: node configureBucketCors.js + +function main( + bucketName = 'my-bucket', + maxAgeSeconds = 3600, + method = 'POST', + origin = 'http://example.appspot.com', + responseHeader = 'content-type' +) { + // [START storage_cors_configuration] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The origin for this CORS config to allow requests from + // const origin = 'http://example.appspot.com'; + + // The response header to share across origins + // const responseHeader = 'Content-Type'; + + // The maximum amount of time the browser can make requests before it must + // repeat preflighted requests + // const maxAgeSeconds = 3600; + + // The name of the method + // See the HttpMethod documentation for other HTTP methods available: + // https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/urlfetch/HTTPMethod + // const method = 'GET'; + + async function configureBucketCors() { + await storage.bucket(bucketName).setCorsConfiguration([ + { + maxAgeSeconds, + method: [method], + origin: [origin], + responseHeader: [responseHeader], + }, + ]); + + console.log(`Bucket ${bucketName} was updated with a CORS config + to allow ${method} requests from ${origin} sharing + ${responseHeader} responses across origins`); + } + + configureBucketCors().catch(console.error); + // [END storage_cors_configuration] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/configureRetries.js b/storage/configureRetries.js new file mode 100644 index 0000000000..201fda22a0 --- /dev/null +++ b/storage/configureRetries.js @@ -0,0 +1,90 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const {IdempotencyStrategy} = require('@google-cloud/storage'); + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_configure_retries] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage({ + retryOptions: { + // If this is false, requests will not retry and the parameters + // below will not affect retry behavior. + autoRetry: true, + // The multiplier by which to increase the delay time between the + // completion of failed requests, and the initiation of the subsequent + // retrying request. + retryDelayMultiplier: 3, + // The total time between an initial request getting sent and its timeout. + // After timeout, an error will be returned regardless of any retry attempts + // made during this time period. + totalTimeout: 500, + // The maximum delay time between requests. When this value is reached, + // retryDelayMultiplier will no longer be used to increase delay time. + maxRetryDelay: 60, + // The maximum number of automatic retries attempted before returning + // the error. + maxRetries: 5, + // Will respect other retry settings and attempt to always retry + // conditionally idempotent operations, regardless of precondition + idempotencyStrategy: IdempotencyStrategy.RetryAlways, + }, + }); + console.log( + 'Functions are customized to be retried according to the following parameters:' + ); + console.log(`Auto Retry: ${storage.retryOptions.autoRetry}`); + console.log( + `Retry delay multiplier: ${storage.retryOptions.retryDelayMultiplier}` + ); + console.log(`Total timeout: ${storage.retryOptions.totalTimeout}`); + console.log(`Maximum retry delay: ${storage.retryOptions.maxRetryDelay}`); + console.log(`Maximum retries: ${storage.retryOptions.maxRetries}`); + console.log( + `Idempotency strategy: ${storage.retryOptions.idempotencyStrategy}` + ); + + async function deleteFileWithCustomizedRetrySetting() { + await storage.bucket(bucketName).file(fileName).delete(); + console.log(`File ${fileName} deleted with a customized retry strategy.`); + } + + deleteFileWithCustomizedRetrySetting(); + // [END storage_configure_retries] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/copyFile.js b/storage/copyFile.js new file mode 100644 index 0000000000..bc41947d8a --- /dev/null +++ b/storage/copyFile.js @@ -0,0 +1,82 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + srcBucketName = 'my-bucket', + srcFilename = 'test2.txt', + destBucketName = 'my-bucket', + destFileName = 'test3.txt', + destinationGenerationMatchPrecondition = 0 +) { + // [START storage_copy_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the bucket the original file is in + // const srcBucketName = 'your-source-bucket'; + + // The ID of the GCS file to copy + // const srcFilename = 'your-file-name'; + + // The ID of the bucket to copy the file to + // const destBucketName = 'target-file-bucket'; + + // The ID of the GCS file to create + // const destFileName = 'target-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function copyFile() { + const copyDestination = storage.bucket(destBucketName).file(destFileName); + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const copyOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; + + // Copies the file to the other bucket + await storage + .bucket(srcBucketName) + .file(srcFilename) + .copy(copyDestination, copyOptions); + + console.log( + `gs://${srcBucketName}/${srcFilename} copied to gs://${destBucketName}/${destFileName}` + ); + } + + copyFile().catch(console.error); + // [END storage_copy_file] +} +main(...process.argv.slice(2)); diff --git a/storage/copyOldVersionOfFile.js b/storage/copyOldVersionOfFile.js new file mode 100644 index 0000000000..780fe0f1a9 --- /dev/null +++ b/storage/copyOldVersionOfFile.js @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Copy Old Version Of File. +// description: Copy Old Version Of File. +// usage: node copyOldVersionOfFile.js + +function main( + srcBucketName = 'my-bucket', + srcFilename = 'test2.txt', + destBucketName = 'my-bucket', + destFileName = 'test3.txt', + generation = 1, + destinationGenerationMatchPrecondition = 0 +) { + // [START storage_copy_file_archived_generation] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const srcBucketName = "your-unique-bucket-name"; + + // The ID of the GCS file to copy an old version of + // const srcFilename = "your-file-name"; + + // The generation of fileToCopy to copy + // const generation = 1579287380533984; + + // The ID of the bucket to copy the file to + // const destBucketName = 'target-file-bucket'; + + // What to name the new file with the old data from srcFilename + // const destFileName = "your-new-file"; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function copyOldVersionOfFile() { + // Copies the file to the other bucket + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const copyOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; + + await storage + .bucket(srcBucketName) + .file(srcFilename, { + generation, + }) + .copy(storage.bucket(destBucketName).file(destFileName), copyOptions); + + console.log( + `Generation ${generation} of file ${srcFilename} in bucket ${srcBucketName} was copied to ${destFileName} in bucket ${destBucketName}` + ); + } + + copyOldVersionOfFile().catch(console.error); + // [END storage_copy_file_archived_generation] +} +main(...process.argv.slice(2)); diff --git a/storage/createBucketWithDualRegion.js b/storage/createBucketWithDualRegion.js new file mode 100644 index 0000000000..cd145d8e2a --- /dev/null +++ b/storage/createBucketWithDualRegion.js @@ -0,0 +1,79 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +// sample-metadata: +// title: Create a Dual-Region Bucket +// description: Create a Dual-Region Bucket with provided location and regions. +// usage: node createBucketWithDualRegion.js + +function main( + bucketName = 'my-bucket', + location = 'US', + region1 = 'US-EAST1', + region2 = 'US-WEST1' +) { + // [START storage_create_bucket_dual_region] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The bucket's pair of regions. Case-insensitive. + // See this documentation for other valid locations: + // https://cloud.google.com/storage/docs/locations + // const location = 'US'; + // const region1 = 'US-EAST1'; + // const region2 = 'US-WEST1'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createDualRegionBucket() { + // For regions supporting dual-regions see: https://cloud.google.com/storage/docs/locations + const [bucket] = await storage.createBucket(bucketName, { + location, + customPlacementConfig: { + dataLocations: [region1, region2], + }, + }); + + console.log(`Created '${bucket.name}'`); + console.log(`- location: '${bucket.metadata.location}'`); + console.log(`- locationType: '${bucket.metadata.locationType}'`); + console.log( + `- customPlacementConfig: '${JSON.stringify( + bucket.metadata.customPlacementConfig + )}'` + ); + } + + createDualRegionBucket().catch(console.error); + // [END storage_create_bucket_dual_region] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/createBucketWithHierarchicalNamespace.js b/storage/createBucketWithHierarchicalNamespace.js new file mode 100644 index 0000000000..8641cacff1 --- /dev/null +++ b/storage/createBucketWithHierarchicalNamespace.js @@ -0,0 +1,65 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +// sample-metadata: +// title: Create a hierarchical namespace enabled bucket +// description: Create a hierarchical namespace enabled bucket. +// usage: node createBucketWithHierarchicalNamespace.js + +function main(bucketName = 'my-bucket') { + // [START storage_create_bucket_hierarchical_namespace] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createBucketWithHierarchicalNamespace() { + const [bucket] = await storage.createBucket(bucketName, { + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: true, + }, + }, + hierarchicalNamespace: { + enabled: true, + }, + }); + + console.log( + `Created '${bucket.name}' with hierarchical namespace enabled.` + ); + } + + createBucketWithHierarchicalNamespace().catch(console.error); + // [END storage_create_bucket_hierarchical_namespace] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/createBucketWithObjectRetention.js b/storage/createBucketWithObjectRetention.js new file mode 100644 index 0000000000..c51ddf7b48 --- /dev/null +++ b/storage/createBucketWithObjectRetention.js @@ -0,0 +1,56 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Create a Bucket with object retention enabled. +// description: Create a Bucket with object retention enabled. +// usage: node createBucketWithObjectRetention.js + +function main(bucketName = 'my-bucket') { + // [START storage_create_bucket_with_object_retention] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createBucketWithObjectRetention() { + const [bucket] = await storage.createBucket(bucketName, { + enableObjectRetention: true, + }); + + console.log( + `Created '${bucket.name}' with object retention enabled setting: ${bucket.metadata.objectRetention.mode}` + ); + } + + createBucketWithObjectRetention().catch(console.error); + // [END storage_create_bucket_with_object_retention] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/createBucketWithStorageClassAndLocation.js b/storage/createBucketWithStorageClassAndLocation.js new file mode 100644 index 0000000000..27ee7e70a7 --- /dev/null +++ b/storage/createBucketWithStorageClassAndLocation.js @@ -0,0 +1,75 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +// sample-metadata: +// title: Create Bucket With Storage Class and Location. +// description: Create Bucket With Storage Class and Location. +// usage: node createBucketWithStorageClassAndLocation.js + +function main( + bucketName = 'my-bucket', + storageClass = 'coldline', + location = 'ASIA' +) { + // [START storage_create_bucket_class_location] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of a storage class + // See the StorageClass documentation for other valid storage classes: + // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/storage/StorageClass.html + // const storageClass = 'coldline'; + + // The name of a location + // See this documentation for other valid locations: + // http://g.co/cloud/storage/docs/locations#location-mr + // const location = 'ASIA'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createBucketWithStorageClassAndLocation() { + // For default values see: https://cloud.google.com/storage/docs/locations and + // https://cloud.google.com/storage/docs/storage-classes + const [bucket] = await storage.createBucket(bucketName, { + location, + [storageClass]: true, + }); + + console.log( + `${bucket.name} created with ${storageClass} class in ${location}` + ); + } + + createBucketWithStorageClassAndLocation().catch(console.error); + // [END storage_create_bucket_class_location] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/createBucketWithTurboReplication.js b/storage/createBucketWithTurboReplication.js new file mode 100644 index 0000000000..566f0c4380 --- /dev/null +++ b/storage/createBucketWithTurboReplication.js @@ -0,0 +1,69 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', location = 'NAM4') { + // [START storage_create_bucket_turbo_replication] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of a dual-region location + // See this documentation for other valid locations: + // https://cloud.google.com/storage/docs/locations#location-dr + // const location = 'NAM4'; + + // Flag to enable turbo replication for this bucket + const rpo = 'ASYNC_TURBO'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createBucketWithTurboReplication() { + // For default values see: https://cloud.google.com/storage/docs/locations and + // https://cloud.google.com/storage/docs/storage-classes + const [bucket] = await storage.createBucket(bucketName, { + location, + rpo, + }); + + console.log( + `${bucket.name} created with the recovery point objective (RPO) set to ${rpo} in ${location}.` + ); + } + + createBucketWithTurboReplication(); + // [END storage_create_bucket_turbo_replication] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/createNewBucket.js b/storage/createNewBucket.js new file mode 100644 index 0000000000..e3afa22edf --- /dev/null +++ b/storage/createNewBucket.js @@ -0,0 +1,52 @@ +/** + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_create_bucket] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project asscociated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function createBucket() { + // Creates a new bucket in the Asia region with the coldline default storage + // class. Leave the second argument blank for default settings. + // + // For default values see: https://cloud.google.com/storage/docs/locations and + // https://cloud.google.com/storage/docs/storage-classes + + const [bucket] = await storage.createBucket(bucketName, { + location: 'ASIA', + storageClass: 'COLDLINE', + }); + + console.log(`Bucket ${bucket.name} created.`); + } + + createBucket().catch(console.error); + // [END storage_create_bucket] +} + +main(...process.argv.slice(2)); diff --git a/storage/createNotification.js b/storage/createNotification.js new file mode 100644 index 0000000000..97013ff534 --- /dev/null +++ b/storage/createNotification.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ +const uuid = require('uuid'); + +function main( + bucketName = 'my-bucket', + topic = `nodejs-storage-samples-${uuid.v4()}` +) { + // [START storage_create_bucket_notifications] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of a topic + // const topic = 'my-topic'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function createNotification() { + // Creates a notification + await storage.bucket(bucketName).createNotification(topic); + + console.log('Notification subscription created.'); + } + + createNotification().catch(console.error); + // [END storage_create_bucket_notifications] +} +main(...process.argv.slice(2)); diff --git a/storage/deleteBucket.js b/storage/deleteBucket.js new file mode 100644 index 0000000000..20c2036d75 --- /dev/null +++ b/storage/deleteBucket.js @@ -0,0 +1,46 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_delete_bucket] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function deleteBucket() { + await storage.bucket(bucketName).delete(); + console.log(`Bucket ${bucketName} deleted`); + } + + deleteBucket().catch(console.error); + // [END storage_delete_bucket] +} + +main(...process.argv.slice(2)); diff --git a/storage/deleteFile.js b/storage/deleteFile.js new file mode 100644 index 0000000000..4747d3a322 --- /dev/null +++ b/storage/deleteFile.js @@ -0,0 +1,63 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + generationMatchPrecondition = 0 +) { + // [START storage_delete_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to delete is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const deleteOptions = { + ifGenerationMatch: generationMatchPrecondition, + }; + async function deleteFile() { + await storage.bucket(bucketName).file(fileName).delete(deleteOptions); + + console.log(`gs://${bucketName}/${fileName} deleted`); + } + + deleteFile().catch(console.error); + // [END storage_delete_file] +} +main(...process.argv.slice(2)); diff --git a/storage/deleteNotification.js b/storage/deleteNotification.js new file mode 100644 index 0000000000..07db6b77a7 --- /dev/null +++ b/storage/deleteNotification.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', notificationId = '1') { + // [START storage_delete_bucket_notification] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the notification + // const notificationId = '1'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function deleteNotification() { + // Deletes the notification from the bucket + await storage.bucket(bucketName).notification(notificationId).delete(); + + console.log(`Notification ${notificationId} deleted.`); + } + + deleteNotification().catch(console.error); + // [END storage_delete_bucket_notification] +} +main(...process.argv.slice(2)); diff --git a/storage/deleteOldVersionOfFile.js b/storage/deleteOldVersionOfFile.js new file mode 100644 index 0000000000..39434ecd14 --- /dev/null +++ b/storage/deleteOldVersionOfFile.js @@ -0,0 +1,59 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Delete Old Version Of File. +// description: Delete Old Version Of File. +// usage: node deleteOldVersionOfFile.js + +function main(bucketName = 'my-bucket', fileName = 'test.txt', generation = 1) { + // [START storage_delete_file_archived_generation] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The generation of fileName to delete + // const generation = 1579287380533984; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function deleteOldVersionOfFile() { + // Deletes the file from the bucket with given version + await storage + .bucket(bucketName) + .file(fileName, { + generation, + }) + .delete(); + + console.log( + `Generation ${generation} of file ${fileName} was deleted from ${bucketName}` + ); + } + + deleteOldVersionOfFile().catch(console.error); + // [END storage_delete_file_archived_generation] +} +main(...process.argv.slice(2)); diff --git a/storage/disableBucketLifecycleManagement.js b/storage/disableBucketLifecycleManagement.js new file mode 100644 index 0000000000..8ef7077e8b --- /dev/null +++ b/storage/disableBucketLifecycleManagement.js @@ -0,0 +1,46 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to disable Object Lifecycle Management for + * a bucket. + * + * For more information, see the documentation at https://cloud.google.com/storage/docs/lifecycle. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_disable_bucket_lifecycle_management] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function disableBucketLifecycleManagement() { + await storage.bucket(bucketName).setMetadata({lifecycle: null}); + + console.log(`Lifecycle management is disabled for bucket ${bucketName}`); + } + + disableBucketLifecycleManagement().catch(console.error); + // [END storage_disable_bucket_lifecycle_management] +} + +main(...process.argv.slice(2)); diff --git a/storage/disableBucketVersioning.js b/storage/disableBucketVersioning.js new file mode 100644 index 0000000000..4a1a438292 --- /dev/null +++ b/storage/disableBucketVersioning.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Disable Bucket Versioning. +// description: Disables bucket versioning. +// usage: node disableBucketVersioning.js + +function main(bucketName = 'my-bucket') { + // [START storage_disable_versioning] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function disableBucketVersioning() { + await storage.bucket(bucketName).setMetadata({ + versioning: { + enabled: false, + }, + }); + + console.log(`Versioning is disabled for bucket ${bucketName}`); + } + + disableBucketVersioning().catch(console.error); + // [END storage_disable_versioning] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/disableDefaultEventBasedHold.js b/storage/disableDefaultEventBasedHold.js new file mode 100644 index 0000000000..09310facd2 --- /dev/null +++ b/storage/disableDefaultEventBasedHold.js @@ -0,0 +1,48 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_disable_default_event_based_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function disableDefaultEventBasedHold() { + // Disables a default event-based hold for a bucket. + await storage.bucket(bucketName).setMetadata({ + defaultEventBasedHold: false, + }); + console.log(`Default event-based hold was disabled for ${bucketName}.`); + } + + disableDefaultEventBasedHold().catch(console.error); + // [END storage_disable_default_event_based_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/disableRequesterPays.js b/storage/disableRequesterPays.js new file mode 100644 index 0000000000..c587615ed3 --- /dev/null +++ b/storage/disableRequesterPays.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_disable_requester_pays] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function disableRequesterPays() { + // Disables requester-pays requests + await storage.bucket(bucketName).disableRequesterPays(); + + console.log( + `Requester-pays requests have been disabled for bucket ${bucketName}` + ); + } + + disableRequesterPays().catch(console.error); + // [END storage_disable_requester_pays] +} +main(...process.argv.slice(2)); diff --git a/storage/disableSoftDelete.js b/storage/disableSoftDelete.js new file mode 100644 index 0000000000..4ea98c5132 --- /dev/null +++ b/storage/disableSoftDelete.js @@ -0,0 +1,47 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_disable_soft_delete] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function disableSoftDelete() { + const options = { + softDeletePolicy: { + retentionDurationSeconds: 0, + }, + }; + + const [metadata] = await storage.bucket(bucketName).setMetadata(options); + + console.log(`Bucket ${metadata.name} soft delete policy was disabled`); + } + + disableSoftDelete().catch(console.error); + // [END storage_disable_soft_delete] +} + +main(...process.argv.slice(2)); diff --git a/storage/disableUniformBucketLevelAccess.js b/storage/disableUniformBucketLevelAccess.js new file mode 100644 index 0000000000..8a1cfd2495 --- /dev/null +++ b/storage/disableUniformBucketLevelAccess.js @@ -0,0 +1,52 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_disable_uniform_bucket_level_access] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + async function disableUniformBucketLevelAccess() { + // Disables uniform bucket-level access for the bucket + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: false, + }, + }, + }); + + console.log(`Uniform bucket-level access was disabled for ${bucketName}.`); + } + + disableUniformBucketLevelAccess().catch(console.error); + // [END storage_disable_uniform_bucket_level_access] +} +main(...process.argv.slice(2)); diff --git a/storage/downloadByteRange.js b/storage/downloadByteRange.js new file mode 100644 index 0000000000..bace089b39 --- /dev/null +++ b/storage/downloadByteRange.js @@ -0,0 +1,81 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ +const path = require('path'); +const cwd = path.join(__dirname, '..'); + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + startByte = 0, + endByte = 20, + destFileName = path.join(cwd, 'downloaded.txt') +) { + // [START storage_download_byte_range] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The starting byte at which to begin the download + // const startByte = 0; + + // The ending byte at which to end the download + // const endByte = 20; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadByteRange() { + const options = { + destination: destFileName, + start: startByte, + end: endByte, + }; + + // Downloads the file from the starting byte to the ending byte specified in options + await storage.bucket(bucketName).file(fileName).download(options); + + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName} from byte ${startByte} to byte ${endByte}.` + ); + } + + downloadByteRange(); + // [END storage_download_byte_range] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/downloadEncryptedFile.js b/storage/downloadEncryptedFile.js new file mode 100644 index 0000000000..76ffa1b969 --- /dev/null +++ b/storage/downloadEncryptedFile.js @@ -0,0 +1,73 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on encrypted + * files with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +const path = require('path'); + +function main( + bucketName = 'my-bucket', + srcFileName = path.join(__dirname, '../resources', 'test.txt'), + destFileName = 'test.txt', + encryptionKey = process.env.GOOGLE_CLOUD_KMS_KEY_US +) { + // [START storage_download_encrypted_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const srcFileName = 'your-file-name'; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // The Base64 encoded decryption key, which should be the same key originally + // used to encrypt the file + // const encryptionKey = 'TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g='; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadEncryptedFile() { + const options = { + destination: destFileName, + }; + + // Decrypts and downloads the file. This can only be done with the key used + // to encrypt and upload the file. + await storage + .bucket(bucketName) + .file(srcFileName) + .setEncryptionKey(Buffer.from(encryptionKey, 'base64')) + .download(options); + + console.log(`File ${srcFileName} downloaded to ${destFileName}`); + } + + downloadEncryptedFile().catch(console.error); + // [END storage_download_encrypted_file] +} +main(...process.argv.slice(2)); diff --git a/storage/downloadFile.js b/storage/downloadFile.js new file mode 100644 index 0000000000..3ea9d8f314 --- /dev/null +++ b/storage/downloadFile.js @@ -0,0 +1,65 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ +const path = require('path'); +const cwd = path.join(__dirname, '..'); + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + destFileName = path.join(cwd, 'downloaded.txt') +) { + // [START storage_download_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadFile() { + const options = { + destination: destFileName, + }; + + // Downloads the file + await storage.bucket(bucketName).file(fileName).download(options); + + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); + } + + downloadFile().catch(console.error); + // [END storage_download_file] +} +main(...process.argv.slice(2)); diff --git a/storage/downloadFileInChunksWithTransferManager.js b/storage/downloadFileInChunksWithTransferManager.js new file mode 100644 index 0000000000..5fe1e64cd6 --- /dev/null +++ b/storage/downloadFileInChunksWithTransferManager.js @@ -0,0 +1,77 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +const path = require('path'); +const cwd = path.join(__dirname, '..'); + +// sample-metadata: +// title: Download a File in Chunks With Transfer Manager +// description: Downloads a single file in in chunks in parallel utilizing transfer manager. +// usage: node downloadFileInChunksWithTransferManager.js + +function main( + bucketName = 'my-bucket', + fileName = 'file1.txt', + destFileName = path.join(cwd, fileName), + chunkSize = 1024 +) { + // [START storage_transfer_manager_download_chunks_concurrently] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the GCS file to download + // const fileName = 'your-file-name'; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // The size of each chunk to be downloaded + // const chunkSize = 1024; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function downloadFileInChunksWithTransferManager() { + // Downloads the files + await transferManager.downloadFileInChunks(fileName, { + destination: destFileName, + chunkSizeBytes: chunkSize, + }); + + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); + } + + downloadFileInChunksWithTransferManager().catch(console.error); + // [END storage_transfer_manager_download_chunks_concurrently] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/downloadFileUsingRequesterPays.js b/storage/downloadFileUsingRequesterPays.js new file mode 100644 index 0000000000..8f449e075b --- /dev/null +++ b/storage/downloadFileUsingRequesterPays.js @@ -0,0 +1,72 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +const uuid = require('uuid'); +const path = require('path'); + +function main( + projectId = 'cloud-devrel-public-resources', + bucketName = `nodejs-storage-samples-${uuid.v4()}`, + srcFileName = 'test.txt', + destFileName = path.join(__dirname, `test_${uuid.v4()}.txt`) +) { + // [START storage_download_file_requester_pays] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The project ID to bill + // const projectId = 'my-billable-project-id'; + + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const srcFileName = 'your-file-name'; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadFileUsingRequesterPays() { + const options = { + destination: destFileName, + userProject: projectId, + }; + + // Downloads the file + await storage.bucket(bucketName).file(srcFileName).download(options); + + console.log( + `gs://${bucketName}/${srcFileName} downloaded to ${destFileName} using requester-pays requests` + ); + } + + downloadFileUsingRequesterPays().catch(console.error); + // [END storage_download_file_requester_pays] +} +main(...process.argv.slice(2)); diff --git a/storage/downloadFolderWithTransferManager.js b/storage/downloadFolderWithTransferManager.js new file mode 100644 index 0000000000..9087f1f712 --- /dev/null +++ b/storage/downloadFolderWithTransferManager.js @@ -0,0 +1,60 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// sample-metadata: +// title: Download Folder With Transfer Manager +// description: Downloads a folder in parallel utilizing transfer manager. +// usage: node downloadFolderWithTransferManager.js + +function main(bucketName = 'my-bucket', folderName = 'my-folder') { + // [START storage_transfer_manager_download_folder] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the GCS folder to download. The folder will be downloaded to the local path of the executing code. + // const folderName = 'your-folder-name'; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function downloadFolderWithTransferManager() { + // Downloads the folder + await transferManager.downloadManyFiles(folderName); + + console.log( + `gs://${bucketName}/${folderName} downloaded to ${folderName}.` + ); + } + + downloadFolderWithTransferManager().catch(console.error); + // [END storage_transfer_manager_download_folder] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/downloadIntoMemory.js b/storage/downloadIntoMemory.js new file mode 100644 index 0000000000..c80fd9a863 --- /dev/null +++ b/storage/downloadIntoMemory.js @@ -0,0 +1,56 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_file_download_into_memory] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadIntoMemory() { + // Downloads the file into a buffer in memory. + const contents = await storage.bucket(bucketName).file(fileName).download(); + + console.log( + `Contents of gs://${bucketName}/${fileName} are ${contents.toString()}.` + ); + } + + downloadIntoMemory().catch(console.error); + // [END storage_file_download_into_memory] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/downloadManyFilesWithTransferManager.js b/storage/downloadManyFilesWithTransferManager.js new file mode 100644 index 0000000000..7a464ad4c6 --- /dev/null +++ b/storage/downloadManyFilesWithTransferManager.js @@ -0,0 +1,67 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// sample-metadata: +// title: Download Many Files With Transfer Manager +// description: Downloads many files in parallel utilizing transfer manager. +// usage: node downloadManyFilesWithTransferManager.js + +function main( + bucketName = 'my-bucket', + firstFileName = 'file1.txt', + secondFileName = 'file2.txt' +) { + // [START storage_transfer_manager_download_many] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the first GCS file to download + // const firstFileName = 'your-first-file-name'; + + // The ID of the second GCS file to download + // const secondFileName = 'your-second-file-name; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function downloadManyFilesWithTransferManager() { + // Downloads the files + await transferManager.downloadManyFiles([firstFileName, secondFileName]); + + for (const fileName of [firstFileName, secondFileName]) { + console.log(`gs://${bucketName}/${fileName} downloaded to ${fileName}.`); + } + } + + downloadManyFilesWithTransferManager().catch(console.error); + // [END storage_transfer_manager_download_many] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/downloadPublicFile.js b/storage/downloadPublicFile.js new file mode 100644 index 0000000000..426b9a7fbd --- /dev/null +++ b/storage/downloadPublicFile.js @@ -0,0 +1,65 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Download Public File. +// description: Download Public File. +// usage: node downloadPublicFile.js + +const path = require('path'); +const cwd = path.join(__dirname, '..'); + +function main( + bucketName = 'my-bucket', + srcFileName = 'test.txt', + destFileName = path.join(cwd, 'downloaded.txt') +) { + // [START storage_download_public_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const srcFilename = 'your-file-name'; + + // The path to which the file should be downloaded + // const destFileName = '/local/path/to/file.txt'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function downloadPublicFile() { + const options = { + destination: destFileName, + }; + + // Download public file. + await storage.bucket(bucketName).file(srcFileName).download(options); + + console.log( + `Downloaded public file ${srcFileName} from bucket name ${bucketName} to ${destFileName}` + ); + } + + downloadPublicFile().catch(console.error); + // [END storage_download_public_file] +} +main(...process.argv.slice(2)); diff --git a/storage/enableBucketLifecycleManagement.js b/storage/enableBucketLifecycleManagement.js new file mode 100644 index 0000000000..ce11309860 --- /dev/null +++ b/storage/enableBucketLifecycleManagement.js @@ -0,0 +1,55 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to enable Object Lifecycle Management for + * a bucket. + * + * For more information, see the documentation at https://cloud.google.com/storage/docs/lifecycle. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_enable_bucket_lifecycle_management] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function enableBucketLifecycleManagement() { + const [metadata] = await storage.bucket(bucketName).addLifecycleRule({ + action: { + type: 'Delete', + }, + condition: {age: 100}, + }); + + console.log( + `Lifecycle management is enabled for bucket ${bucketName} and the rules are:` + ); + + console.log(metadata.lifecycle.rule); + } + + enableBucketLifecycleManagement().catch(console.error); + // [END storage_enable_bucket_lifecycle_management] +} + +main(...process.argv.slice(2)); diff --git a/storage/enableBucketVersioning.js b/storage/enableBucketVersioning.js new file mode 100644 index 0000000000..66d81197cc --- /dev/null +++ b/storage/enableBucketVersioning.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Enable Bucket Versioning. +// description: Enables bucket versioning. +// usage: node enableBucketVersioning.js + +function main(bucketName = 'my-bucket') { + // [START storage_enable_versioning] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function enableBucketVersioning() { + await storage.bucket(bucketName).setMetadata({ + versioning: { + enabled: true, + }, + }); + + console.log(`Versioning is enabled for bucket ${bucketName}`); + } + + enableBucketVersioning().catch(console.error); + // [END storage_enable_versioning] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/enableDefaultEventBasedHold.js b/storage/enableDefaultEventBasedHold.js new file mode 100644 index 0000000000..406970fdd9 --- /dev/null +++ b/storage/enableDefaultEventBasedHold.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_enable_default_event_based_hold] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function enableDefaultEventBasedHold() { + // Enables a default event-based hold for the bucket. + await storage.bucket(bucketName).setMetadata({ + defaultEventBasedHold: true, + }); + + console.log(`Default event-based hold was enabled for ${bucketName}.`); + } + + enableDefaultEventBasedHold().catch(console.error); + // [END storage_enable_default_event_based_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/enableDefaultKMSKey.js b/storage/enableDefaultKMSKey.js new file mode 100644 index 0000000000..adc3b3505b --- /dev/null +++ b/storage/enableDefaultKMSKey.js @@ -0,0 +1,59 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + defaultKmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_ASIA +) { + // [START storage_set_bucket_default_kms_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The name of the KMS-key to use as a default + // const defaultKmsKeyName = 'my-key'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function enableDefaultKMSKey() { + await storage.bucket(bucketName).setMetadata({ + encryption: { + defaultKmsKeyName, + }, + }); + + console.log( + `Default KMS key for ${bucketName} was set to ${defaultKmsKeyName}.` + ); + } + + enableDefaultKMSKey().catch(console.error); + // [END storage_set_bucket_default_kms_key] +} + +main(...process.argv.slice(2)); diff --git a/storage/enableRequesterPays.js b/storage/enableRequesterPays.js new file mode 100644 index 0000000000..4425a1a650 --- /dev/null +++ b/storage/enableRequesterPays.js @@ -0,0 +1,48 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_enable_requester_pays] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function enableRequesterPays() { + await storage.bucket(bucketName).enableRequesterPays(); + + console.log( + `Requester-pays requests have been enabled for bucket ${bucketName}` + ); + } + + enableRequesterPays().catch(console.error); + // [END storage_enable_requester_pays] +} +main(...process.argv.slice(2)); diff --git a/storage/enableUniformBucketLevelAccess.js b/storage/enableUniformBucketLevelAccess.js new file mode 100644 index 0000000000..3bbc7c49aa --- /dev/null +++ b/storage/enableUniformBucketLevelAccess.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_enable_uniform_bucket_level_access] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Enables uniform bucket-level access for the bucket + async function enableUniformBucketLevelAccess() { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: true, + }, + }, + }); + + console.log(`Uniform bucket-level access was enabled for ${bucketName}.`); + } + + enableUniformBucketLevelAccess().catch(console.error); + // [END storage_enable_uniform_bucket_level_access] +} + +main(...process.argv.slice(2)); diff --git a/storage/fileChangeStorageClass.js b/storage/fileChangeStorageClass.js new file mode 100644 index 0000000000..4677c2a900 --- /dev/null +++ b/storage/fileChangeStorageClass.js @@ -0,0 +1,77 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Change File's Storage Class. +// description: Change File's Storage Class. +// usage: node fileChangeStorageClass.js + +function main( + bucketName = 'my-bucket', + fileName = 'file.txt', + storageClass = 'standard', + generationMatchPrecondition = 0 +) { + // [START storage_change_file_storage_class] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The name of a storage class + // See the StorageClass documentation for other valid storage classes: + // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/storage/StorageClass.html + // const storageClass = 'coldline'; + + async function fileChangeStorageClass() { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const setStorageClassOptions = { + ifGenerationMatch: generationMatchPrecondition, + }; + + await storage + .bucket(bucketName) + .file(fileName) + .setStorageClass(storageClass, setStorageClassOptions); + + console.log(`${fileName} has been set to ${storageClass}`); + } + + fileChangeStorageClass().catch(console.error); + // [END storage_change_file_storage_class] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/fileSetMetadata.js b/storage/fileSetMetadata.js new file mode 100644 index 0000000000..d7f0adf13f --- /dev/null +++ b/storage/fileSetMetadata.js @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Set File Metadata. +// description: Set file metadata. +// usage: node fileSetMetadata.js + +function main( + bucketName = 'my-bucket', + fileName = 'file.txt', + metagenerationMatchPrecondition = 0 +) { + // [START storage_set_metadata] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + async function setFileMetadata() { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; + + // Set file metadata. + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .setMetadata( + { + // Predefined metadata for server e.g. 'cacheControl', 'contentDisposition', + // 'contentEncoding', 'contentLanguage', 'contentType' + contentDisposition: + 'attachment; filename*=utf-8\'\'"anotherImage.jpg"', + contentType: 'image/jpeg', + + // A note or actionable items for user e.g. uniqueId, object description, + // or other useful information. + metadata: { + description: 'file description...', + modified: '1900-01-01', + }, + }, + options + ); + + console.log( + 'Updated metadata for object', + fileName, + 'in bucket ', + bucketName + ); + console.log(metadata); + } + + setFileMetadata().catch(console.error); + // [END storage_set_metadata] +} + +main(...process.argv.slice(2)); diff --git a/storage/generateEncryptionKey.js b/storage/generateEncryptionKey.js new file mode 100644 index 0000000000..2e3c331f65 --- /dev/null +++ b/storage/generateEncryptionKey.js @@ -0,0 +1,43 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on encrypted + * files with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main() { + // [START storage_generate_encryption_key] + const crypto = require('crypto'); + + function generateEncryptionKey() { + /** + * Generates a 256 bit (32 byte) AES encryption key and prints the base64 + * representation. + * + * This is included for demonstration purposes. You should generate your own + * key. Please remember that encryption keys should be handled with a + * comprehensive security policy. + */ + const buffer = crypto.randomBytes(32); + const encodedKey = buffer.toString('base64'); + console.log(`Base 64 encoded encryption key: ${encodedKey}`); + } + generateEncryptionKey(); + // [END storage_generate_encryption_key] +} +main(...process.argv.slice(2)); diff --git a/storage/generateSignedUrl.js b/storage/generateSignedUrl.js new file mode 100644 index 0000000000..3455d51bb3 --- /dev/null +++ b/storage/generateSignedUrl.js @@ -0,0 +1,60 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_generate_signed_url] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function generateSignedUrl() { + // These options will allow temporary read access to the file + const options = { + version: 'v2', // defaults to 'v2' if missing. + action: 'read', + expires: Date.now() + 1000 * 60 * 60, // one hour + }; + + // Get a v2 signed URL for the file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); + + console.log(`The signed url for ${fileName} is ${url}.`); + } + + generateSignedUrl().catch(console.error); + // [END storage_generate_signed_url] +} +main(...process.argv.slice(2)); diff --git a/storage/generateV4ReadSignedUrl.js b/storage/generateV4ReadSignedUrl.js new file mode 100644 index 0000000000..de55d6901b --- /dev/null +++ b/storage/generateV4ReadSignedUrl.js @@ -0,0 +1,65 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_generate_signed_url_v4] + /** + * TODO(developer): Uncomment the following lines before running the sample. + * Note: when creating a signed URL, unless running in a GCP environment, + * a service account must be used for authorization. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The full path of your file inside the GCS bucket, e.g. 'yourFile.jpg' or 'folder1/folder2/yourFile.jpg' + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function generateV4ReadSignedUrl() { + // These options will allow temporary read access to the file + const options = { + version: 'v4', + action: 'read', + expires: Date.now() + 15 * 60 * 1000, // 15 minutes + }; + + // Get a v4 signed URL for reading the file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); + + console.log('Generated GET signed URL:'); + console.log(url); + console.log('You can use this URL with any user agent, for example:'); + console.log(`curl '${url}'`); + } + + generateV4ReadSignedUrl().catch(console.error); + // [END storage_generate_signed_url_v4] +} +main(...process.argv.slice(2)); diff --git a/storage/generateV4SignedPolicy.js b/storage/generateV4SignedPolicy.js new file mode 100644 index 0000000000..f52a5d4aab --- /dev/null +++ b/storage/generateV4SignedPolicy.js @@ -0,0 +1,72 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_generate_signed_post_policy_v4] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function generateV4SignedPolicy() { + const bucket = storage.bucket(bucketName); + const file = bucket.file(fileName); + + // These options will allow temporary uploading of a file + // through an HTML form. + const expires = Date.now() + 10 * 60 * 1000; // 10 minutes + const options = { + expires, + fields: {'x-goog-meta-test': 'data'}, + }; + + // Get a v4 signed policy for uploading file + const [response] = await file.generateSignedPostPolicyV4(options); + + // Create an HTML form with the provided policy + let output = `
\n`; + // Include all fields returned in the HTML form as they're required + for (const name of Object.keys(response.fields)) { + const value = response.fields[name]; + output += ` \n`; + } + output += '
\n'; + output += '
\n'; + output += '
'; + + console.log(output); + } + + generateV4SignedPolicy().catch(console.error); + // [END storage_generate_signed_post_policy_v4] +} +main(...process.argv.slice(2)); diff --git a/storage/generateV4UploadSignedUrl.js b/storage/generateV4UploadSignedUrl.js new file mode 100644 index 0000000000..4b2b4c0219 --- /dev/null +++ b/storage/generateV4UploadSignedUrl.js @@ -0,0 +1,68 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_generate_upload_signed_url_v4] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The full path of your file inside the GCS bucket, e.g. 'yourFile.jpg' or 'folder1/folder2/yourFile.jpg' + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function generateV4UploadSignedUrl() { + // These options will allow temporary uploading of the file with outgoing + // Content-Type: application/octet-stream header. + const options = { + version: 'v4', + action: 'write', + expires: Date.now() + 15 * 60 * 1000, // 15 minutes + contentType: 'application/octet-stream', + }; + + // Get a v4 signed URL for uploading file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); + + console.log('Generated PUT signed URL:'); + console.log(url); + console.log('You can use this URL with any user agent, for example:'); + console.log( + "curl -X PUT -H 'Content-Type: application/octet-stream' " + + `--upload-file my-file '${url}'` + ); + } + + generateV4UploadSignedUrl().catch(console.error); + // [END storage_generate_upload_signed_url_v4] +} +main(...process.argv.slice(2)); diff --git a/storage/getAutoclass.js b/storage/getAutoclass.js new file mode 100644 index 0000000000..44ae5ff0a2 --- /dev/null +++ b/storage/getAutoclass.js @@ -0,0 +1,52 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_autoclass] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getAutoclass() { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log( + `Autoclass is ${ + metadata.autoclass.enabled ? 'enabled' : 'disabled' + } for ${metadata.name} at ${metadata.autoclass.toggleTime}. + Autoclass terminal storage class is last updated to ${ + metadata.autoclass.terminalStorageClass + } at ${metadata.autoclass.terminalStorageClassUpdateTime}.` + ); + } + + getAutoclass().catch(console.error); + // [END storage_get_autoclass] +} +main(...process.argv.slice(2)); diff --git a/storage/getDefaultEventBasedHold.js b/storage/getDefaultEventBasedHold.js new file mode 100644 index 0000000000..daf9d42252 --- /dev/null +++ b/storage/getDefaultEventBasedHold.js @@ -0,0 +1,45 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_default_event_based_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getDefaultEventBasedHold() { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log(`Default event-based hold: ${metadata.defaultEventBasedHold}.`); + } + + getDefaultEventBasedHold().catch(console.error); + // [END storage_get_default_event_based_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/getMetadata.js b/storage/getMetadata.js new file mode 100644 index 0000000000..ccaa6b8e3b --- /dev/null +++ b/storage/getMetadata.js @@ -0,0 +1,91 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_get_metadata] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getMetadata() { + // Gets the metadata for the file + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .getMetadata(); + + console.log(`Bucket: ${metadata.bucket}`); + console.log(`CacheControl: ${metadata.cacheControl}`); + console.log(`ComponentCount: ${metadata.componentCount}`); + console.log(`ContentDisposition: ${metadata.contentDisposition}`); + console.log(`ContentEncoding: ${metadata.contentEncoding}`); + console.log(`ContentLanguage: ${metadata.contentLanguage}`); + console.log(`ContentType: ${metadata.contentType}`); + console.log(`CustomTime: ${metadata.customTime}`); + console.log(`Crc32c: ${metadata.crc32c}`); + console.log(`ETag: ${metadata.etag}`); + console.log(`Generation: ${metadata.generation}`); + console.log(`Id: ${metadata.id}`); + console.log(`KmsKeyName: ${metadata.kmsKeyName}`); + console.log(`Md5Hash: ${metadata.md5Hash}`); + console.log(`MediaLink: ${metadata.mediaLink}`); + console.log(`Metageneration: ${metadata.metageneration}`); + console.log(`Name: ${metadata.name}`); + console.log(`Size: ${metadata.size}`); + console.log(`StorageClass: ${metadata.storageClass}`); + console.log(`TimeCreated: ${new Date(metadata.timeCreated)}`); + console.log(`Last Metadata Update: ${new Date(metadata.updated)}`); + console.log(`TurboReplication: ${metadata.rpo}`); + console.log( + `temporaryHold: ${metadata.temporaryHold ? 'enabled' : 'disabled'}` + ); + console.log( + `eventBasedHold: ${metadata.eventBasedHold ? 'enabled' : 'disabled'}` + ); + if (metadata.retentionExpirationTime) { + console.log( + `retentionExpirationTime: ${new Date(metadata.retentionExpirationTime)}` + ); + } + if (metadata.metadata) { + console.log('\n\n\nUser metadata:'); + for (const key in metadata.metadata) { + console.log(`${key}=${metadata.metadata[key]}`); + } + } + } + + getMetadata().catch(console.error); + // [END storage_get_metadata] +} +main(...process.argv.slice(2)); diff --git a/storage/getMetadataNotifications.js b/storage/getMetadataNotifications.js new file mode 100644 index 0000000000..47801b79b4 --- /dev/null +++ b/storage/getMetadataNotifications.js @@ -0,0 +1,61 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', notificationId = '1') { + // [START storage_print_pubsub_bucket_notification] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the notification + // const notificationId = '1'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getMetadata() { + // Get the notification metadata + const [metadata] = await storage + .bucket(bucketName) + .notification(notificationId) + .getMetadata(); + + console.log(`ID: ${metadata.id}`); + console.log(`Topic: ${metadata.topic}`); + console.log(`Event Types: ${metadata.event_types}`); + console.log(`Custom Attributes: ${metadata.custom_attributes}`); + console.log(`Payload Format: ${metadata.payload_format}`); + console.log(`Object Name Prefix: ${metadata.object_name_prefix}`); + console.log(`Etag: ${metadata.etag}`); + console.log(`Self Link: ${metadata.selfLink}`); + console.log(`Kind: ${metadata.kind}`); + } + + getMetadata().catch(console.error); + // [END storage_print_pubsub_bucket_notification] +} +main(...process.argv.slice(2)); diff --git a/storage/getPublicAccessPrevention.js b/storage/getPublicAccessPrevention.js new file mode 100644 index 0000000000..7a53f7ff22 --- /dev/null +++ b/storage/getPublicAccessPrevention.js @@ -0,0 +1,50 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_public_access_prevention] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getPublicAccessPrevention() { + // Gets Bucket Metadata and prints publicAccessPrevention value (either 'inherited' or 'enforced'). + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log( + `Public access prevention is ${metadata.iamConfiguration.publicAccessPrevention} for ${bucketName}.` + ); + } + + getPublicAccessPrevention(); + + // [END storage_get_public_access_prevention] +} + +main(...process.argv.slice(2)); diff --git a/storage/getRPO.js b/storage/getRPO.js new file mode 100644 index 0000000000..00d788208f --- /dev/null +++ b/storage/getRPO.js @@ -0,0 +1,52 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_rpo] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket in a dual-region + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getRPO() { + // Gets Bucket Metadata and prints RPO value (either 'default' or 'async_turbo'). + // If RPO is undefined, the bucket is a single region bucket + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log(`RPO is ${metadata.rpo} for ${bucketName}.`); + } + + getRPO(); + + // [END storage_get_rpo] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/getRequesterPaysStatus.js b/storage/getRequesterPaysStatus.js new file mode 100644 index 0000000000..fea0ad4e75 --- /dev/null +++ b/storage/getRequesterPaysStatus.js @@ -0,0 +1,55 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_requester_pays_status] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getRequesterPaysStatus() { + // Gets the requester-pays status of a bucket + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + let status; + if (metadata && metadata.billing && metadata.billing.requesterPays) { + status = 'enabled'; + } else { + status = 'disabled'; + } + console.log( + `Requester-pays requests are ${status} for bucket ${bucketName}.` + ); + } + + getRequesterPaysStatus().catch(console.error); + // [END storage_get_requester_pays_status] +} +main(...process.argv.slice(2)); diff --git a/storage/getRetentionPolicy.js b/storage/getRetentionPolicy.js new file mode 100644 index 0000000000..0b74b7266d --- /dev/null +++ b/storage/getRetentionPolicy.js @@ -0,0 +1,55 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_retention_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getRetentionPolicy() { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + if (metadata.retentionPolicy) { + const retentionPolicy = metadata.retentionPolicy; + console.log('A retention policy exists!'); + console.log(`Period: ${retentionPolicy.retentionPeriod}`); + console.log(`Effective time: ${retentionPolicy.effectiveTime}`); + if (retentionPolicy.isLocked) { + console.log('Policy is locked'); + } else { + console.log('Policy is unlocked'); + } + } + } + + getRetentionPolicy().catch(console.error); + // [END storage_get_retention_policy] +} +main(...process.argv.slice(2)); diff --git a/storage/getServiceAccount.js b/storage/getServiceAccount.js new file mode 100644 index 0000000000..6d4a29ba32 --- /dev/null +++ b/storage/getServiceAccount.js @@ -0,0 +1,52 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Get Service Account. +// description: Get Service Account. +// usage: node getServiceAccount.js + +function main(projectId = 'serviceAccountProjectId') { + // [START storage_get_service_account] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCP project + // const projectId = 'your-project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage({ + projectId, + }); + + async function getServiceAccount() { + const [serviceAccount] = await storage.getServiceAccount(); + console.log( + `The GCS service account for project ${projectId} is: ${serviceAccount.emailAddress}` + ); + } + + getServiceAccount().catch(console.error); + // [END storage_get_service_account] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/getSoftDeletePolicy.js b/storage/getSoftDeletePolicy.js new file mode 100644 index 0000000000..fa5efebf2f --- /dev/null +++ b/storage/getSoftDeletePolicy.js @@ -0,0 +1,56 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_soft_delete_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getSoftDeletePolicy() { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + const softDelete = metadata.softDeletePolicy; + if ( + !softDelete || + !softDelete.retentionDurationSeconds || + softDelete.retentionDurationSeconds === '0' + ) { + console.log(`Bucket ${metadata.name} soft delete policy was disabled`); + } else { + console.log(`Soft delete policy for ${metadata.name}`); + console.log( + `Soft delete Period: ${softDelete.retentionDurationSeconds} seconds` + ); + if (softDelete.effectiveTime) { + console.log(`Effective Time: ${softDelete.effectiveTime}`); + } + } + } + + getSoftDeletePolicy().catch(console.error); + // [END storage_get_soft_delete_policy] +} + +main(...process.argv.slice(2)); diff --git a/storage/getSoftDeletedBucket.js b/storage/getSoftDeletedBucket.js new file mode 100644 index 0000000000..2cd3e8b39e --- /dev/null +++ b/storage/getSoftDeletedBucket.js @@ -0,0 +1,52 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket', generation = 123456789) { + // [START storage_get_soft_deleted_bucket] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The generation of the bucket to restore + // const generation = 123456789; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getSoftDeletedBucket() { + const options = { + generation: generation, + softDeleted: true, + }; + + const [metadata] = await storage.bucket(bucketName).getMetadata(options); + + console.log(`Bucket: ${metadata.name}`); + console.log(`Generation: ${metadata.generation}`); + console.log(`SoftDeleteTime: ${metadata.softDeleteTime}`); + console.log(`HardDeleteTime: ${metadata.hardDeleteTime}`); + } + + getSoftDeletedBucket().catch(console.error); + // [END storage_get_soft_deleted_bucket] +} + +main(...process.argv.slice(2)); diff --git a/storage/getUniformBucketLevelAccess.js b/storage/getUniformBucketLevelAccess.js new file mode 100644 index 0000000000..e6382135e3 --- /dev/null +++ b/storage/getUniformBucketLevelAccess.js @@ -0,0 +1,60 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_get_uniform_bucket_level_access] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function getUniformBucketLevelAccess() { + // Gets Bucket Metadata and checks if uniform bucket-level access is enabled. + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + if (metadata.iamConfiguration) { + const uniformBucketLevelAccess = + metadata.iamConfiguration.uniformBucketLevelAccess; + console.log(`Uniform bucket-level access is enabled for ${bucketName}.`); + console.log( + `Bucket will be locked on ${uniformBucketLevelAccess.lockedTime}.` + ); + } else { + console.log( + `Uniform bucket-level access is not enabled for ${bucketName}.` + ); + } + } + + getUniformBucketLevelAccess().catch(console.error); + + // [END storage_get_uniform_bucket_level_access] +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeyActivate.js b/storage/hmacKeyActivate.js new file mode 100644 index 0000000000..6ce8a2dd9a --- /dev/null +++ b/storage/hmacKeyActivate.js @@ -0,0 +1,57 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Activate HMAC SA Key. +// description: Activate HMAC SA Key. +// usage: node hmacKeyActivate.js [projectId] + +function main( + hmacKeyAccessId = 'GOOG0234230X00', + projectId = 'serviceAccountProjectId' +) { + // [START storage_activate_hmac_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The access ID of the HMAC key + // const hmacKeyAccessId = 'GOOG0234230X00'; + + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Activate HMAC SA Key + async function activateHmacKey() { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'ACTIVE'}); + + console.log('The HMAC key is now active.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKeyMetadata)) { + console.log(`${key}: ${value}`); + } + } + // [END storage_activate_hmac_key] + activateHmacKey().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeyCreate.js b/storage/hmacKeyCreate.js new file mode 100644 index 0000000000..3fb3456801 --- /dev/null +++ b/storage/hmacKeyCreate.js @@ -0,0 +1,59 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Create HMAC SA Key. +// description: Create HMAC SA Key. +// usage: node hmacKeyCreate.js [projectId] + +function main( + serviceAccountEmail = 'service-account@example.com', + projectId = 'serviceAccountProjectId' +) { + // [START storage_create_hmac_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The service account email for which the new HMAC key will be created + // const serviceAccountEmail = 'service-account@iam.gserviceaccount.com'; + + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Create HMAC SA Key + async function createHmacKey() { + const [hmacKey, secret] = await storage.createHmacKey(serviceAccountEmail, { + projectId, + }); + + console.log(`The base64 encoded secret is: ${secret}`); + console.log('Do not miss that secret, there is no API to recover it.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKey.metadata)) { + console.log(`${key}: ${value}`); + } + } + // [END storage_create_hmac_key] + createHmacKey().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeyDeactivate.js b/storage/hmacKeyDeactivate.js new file mode 100644 index 0000000000..471bc84ca5 --- /dev/null +++ b/storage/hmacKeyDeactivate.js @@ -0,0 +1,57 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Deactivate HMAC SA Key. +// description: Deactivate HMAC SA Key. +// usage: node hmacKeyDeactivate.js [projectId] + +function main( + hmacKeyAccessId = 'GOOG0234230X00', + projectId = 'serviceAccountProjectId' +) { + // [START storage_deactivate_hmac_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The access ID of the HMAC key + // const hmacKeyAccessId = 'GOOG0234230X00'; + + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Deactivate HMAC SA Key + async function deactivateHmacKey() { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'INACTIVE'}); + + console.log('The HMAC key is now inactive.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKeyMetadata)) { + console.log(`${key}: ${value}`); + } + } + // [END storage_deactivate_hmac_key] + deactivateHmacKey().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeyDelete.js b/storage/hmacKeyDelete.js new file mode 100644 index 0000000000..337a273b0f --- /dev/null +++ b/storage/hmacKeyDelete.js @@ -0,0 +1,55 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Delete HMAC SA Key. +// description: Delete HMAC SA Key. +// usage: node hmacKeyDelete.js [projectId] + +function main( + hmacKeyAccessId = 'GOOG0234230X00', + projectId = 'serviceAccountProjectId' +) { + // [START storage_delete_hmac_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The access ID of the HMAC key + // const hmacKeyAccessId = 'GOOG0234230X00'; + + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Delete HMAC SA Key + async function deleteHmacKey() { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + await hmacKey.delete(); + + console.log( + 'The key is deleted, though it may still appear in getHmacKeys() results.' + ); + } + // [END storage_delete_hmac_key] + deleteHmacKey().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeyGet.js b/storage/hmacKeyGet.js new file mode 100644 index 0000000000..656036984b --- /dev/null +++ b/storage/hmacKeyGet.js @@ -0,0 +1,58 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Get HMAC SA Key Metadata. +// description: Get HMAC SA Key Metadata. +// usage: node hmacKeyGet.js [projectId] + +function main( + hmacKeyAccessId = 'GOOG0234230X00', + projectId = 'serviceAccountProjectId' +) { + // [START storage_get_hmac_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The access ID of the HMAC key + // const hmacKeyAccessId = 'GOOG0234230X00'; + + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Get HMAC SA Key Metadata + async function getHmacKey() { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + + // Populate the hmacKey object with metadata from server. + await hmacKey.getMetadata(); + + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKey.metadata)) { + console.log(`${key}: ${value}`); + } + } + // [END storage_get_hmac_key] + getHmacKey().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/hmacKeysList.js b/storage/hmacKeysList.js new file mode 100644 index 0000000000..6f6acf939d --- /dev/null +++ b/storage/hmacKeysList.js @@ -0,0 +1,52 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: List HMAC SA Keys Metadata. +// description: List HMAC SA Keys Metadata. +// usage: node hmacKeyList.js [projectId] + +function main(projectId = 'serviceAccountProjectId') { + // [START storage_list_hmac_keys] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the project to which the service account belongs + // const projectId = 'project-id'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // List HMAC SA Keys' Metadata + async function listHmacKeys() { + const [hmacKeys] = await storage.getHmacKeys({projectId}); + + // hmacKeys is an array of HmacKey objects. + for (const hmacKey of hmacKeys) { + console.log( + `Service Account Email: ${hmacKey.metadata.serviceAccountEmail}` + ); + console.log(`Access Id: ${hmacKey.metadata.accessId}`); + } + } + // [END storage_list_hmac_keys] + listHmacKeys().catch(console.error); +} + +main(...process.argv.slice(2)); diff --git a/storage/listBuckets.js b/storage/listBuckets.js new file mode 100644 index 0000000000..3c06f4fc2e --- /dev/null +++ b/storage/listBuckets.js @@ -0,0 +1,38 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main() { + // [START storage_list_buckets] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listBuckets() { + const [buckets] = await storage.getBuckets(); + + console.log('Buckets:'); + buckets.forEach(bucket => { + console.log(bucket.name); + }); + } + + listBuckets().catch(console.error); + // [END storage_list_buckets] +} + +main(...process.argv.slice(2)); diff --git a/storage/listBucketsPartialSuccess.js b/storage/listBucketsPartialSuccess.js new file mode 100644 index 0000000000..c1d0daf55a --- /dev/null +++ b/storage/listBucketsPartialSuccess.js @@ -0,0 +1,57 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main() { + // [START storage_list_buckets_partial_success] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listBucketsPartialSuccess() { + const option = { + returnPartialSuccess: true, + maxResults: 5, + }; + const [buckets, nextQuery, apiResponse] = await storage.getBuckets(option); + + if (nextQuery && nextQuery.pageToken) { + console.log(`Next Page Token: ${nextQuery.pageToken}`); + } + + console.log('\nBuckets:'); + buckets.forEach(bucket => { + if (bucket.unreachable) { + console.log(`${bucket.name} (unreachable: ${bucket.unreachable})`); + } else { + console.log(`${bucket.name}`); + } + }); + + if (apiResponse.unreachable && apiResponse.unreachable.length > 0) { + console.log('\nUnreachable Buckets:'); + apiResponse.unreachable.forEach(item => { + console.log(item); + }); + } + } + + listBucketsPartialSuccess().catch(console.error); + // [END storage_list_buckets_partial_success] +} + +main(...process.argv.slice(2)); diff --git a/storage/listFiles.js b/storage/listFiles.js new file mode 100644 index 0000000000..79cdd2c8b6 --- /dev/null +++ b/storage/listFiles.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_list_files] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listFiles() { + // Lists files in the bucket + const [files] = await storage.bucket(bucketName).getFiles(); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } + + listFiles().catch(console.error); + // [END storage_list_files] +} +main(...process.argv.slice(2)); diff --git a/storage/listFilesByPrefix.js b/storage/listFilesByPrefix.js new file mode 100644 index 0000000000..04fc988ae4 --- /dev/null +++ b/storage/listFilesByPrefix.js @@ -0,0 +1,83 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', prefix = 'test', delimiter = '/') { + // [START storage_list_files_with_prefix] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The directory prefix to search for + // const prefix = 'myDirectory/'; + + // The delimiter to use + // const delimiter = '/'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listFilesByPrefix() { + /** + * This can be used to list all blobs in a "folder", e.g. "public/". + * + * The delimiter argument can be used to restrict the results to only the + * "files" in the given "folder". Without the delimiter, the entire tree under + * the prefix is returned. For example, given these blobs: + * + * /a/1.txt + * /a/b/2.txt + * + * If you just specify prefix = 'a/', you'll get back: + * + * /a/1.txt + * /a/b/2.txt + * + * However, if you specify prefix='a/' and delimiter='/', you'll get back: + * + * /a/1.txt + */ + const options = { + prefix: prefix, + }; + + if (delimiter) { + options.delimiter = delimiter; + } + + // Lists files in the bucket, filtered by a prefix + const [files] = await storage.bucket(bucketName).getFiles(options); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } + + listFilesByPrefix().catch(console.error); + // [END storage_list_files_with_prefix] +} +main(...process.argv.slice(2)); diff --git a/storage/listFilesPaginate.js b/storage/listFilesPaginate.js new file mode 100644 index 0000000000..694ac4c268 --- /dev/null +++ b/storage/listFilesPaginate.js @@ -0,0 +1,55 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* eslint-disable no-unused-vars */ + +function main(bucketName = 'my-bucket') { + // [START storage_list_files_paginated] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listFilesPaginated() { + const bucket = storage.bucket(bucketName); + const [files, queryForPage2] = await bucket.getFiles({autoPaginate: false}); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + + // Page through the next set of results using "queryForPage2" + if (queryForPage2 !== null) { + const [files, queryForPage3] = await bucket.getFiles(queryForPage2); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + + // If necessary, continue cursoring using "queryForPage3" + } + } + + listFilesPaginated().catch(console.error); + // [END storage_list_files_paginated] +} +main(...process.argv.slice(2)); diff --git a/storage/listFilesWithOldVersions.js b/storage/listFilesWithOldVersions.js new file mode 100644 index 0000000000..c08f38cd40 --- /dev/null +++ b/storage/listFilesWithOldVersions.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: List Files with Old Versions. +// description: List Files with Old Versions. +// usage: node listFilesWithOldVersions.js + +function main(bucketName = 'my-bucket') { + // [START storage_list_file_archived_generations] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listFilesWithOldVersions() { + const [files] = await storage.bucket(bucketName).getFiles({ + versions: true, + }); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name, file.generation); + }); + } + + listFilesWithOldVersions().catch(console.error); + // [END storage_list_file_archived_generations] +} +main(...process.argv.slice(2)); diff --git a/storage/listNotifications.js b/storage/listNotifications.js new file mode 100644 index 0000000000..ba8a68e06f --- /dev/null +++ b/storage/listNotifications.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_list_bucket_notifications] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listNotifications() { + // Lists notifications in the bucket + const [notifications] = await storage.bucket(bucketName).getNotifications(); + + console.log('Notifications:'); + notifications.forEach(notification => { + console.log(notification.id); + }); + } + + listNotifications().catch(console.error); + // [END storage_list_bucket_notifications] +} +main(...process.argv.slice(2)); diff --git a/storage/listSoftDeletedBucket.js b/storage/listSoftDeletedBucket.js new file mode 100644 index 0000000000..679bd6caf6 --- /dev/null +++ b/storage/listSoftDeletedBucket.js @@ -0,0 +1,42 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main() { + // [START storage_list_soft_deleted_buckets] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listSoftDeletedBuckets() { + const options = { + softDeleted: true, + }; + + const [buckets] = await storage.getBuckets(options); + + console.log('Buckets:'); + buckets.forEach(bucket => { + console.log(bucket.name); + }); + } + + listSoftDeletedBuckets().catch(console.error); + // [END storage_list_soft_deleted_buckets] +} + +main(...process.argv.slice(2)); diff --git a/storage/listSoftDeletedObjectVersions.js b/storage/listSoftDeletedObjectVersions.js new file mode 100644 index 0000000000..0a7e711fff --- /dev/null +++ b/storage/listSoftDeletedObjectVersions.js @@ -0,0 +1,54 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_list_soft_deleted_object_versions] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listSoftDeletedObjectVersions() { + const options = { + softDeleted: true, + matchGlob: fileName, + }; + + const [files] = await storage.bucket(bucketName).getFiles(options); + + console.log('Files:'); + files.forEach(file => { + console.log( + `Name: ${file.name}, Generation: ${file.metadata.generation}` + ); + }); + } + + listSoftDeletedObjectVersions().catch(console.error); + // [END storage_list_soft_deleted_object_versions] +} + +main(...process.argv.slice(2)); diff --git a/storage/listSoftDeletedObjects.js b/storage/listSoftDeletedObjects.js new file mode 100644 index 0000000000..62014723f9 --- /dev/null +++ b/storage/listSoftDeletedObjects.js @@ -0,0 +1,48 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_list_soft_deleted_objects] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function listSoftDeletedObjects() { + const options = { + softDeleted: true, + }; + + const [files] = await storage.bucket(bucketName).getFiles(options); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } + + listSoftDeletedObjects().catch(console.error); + // [END storage_list_soft_deleted_objects] +} + +main(...process.argv.slice(2)); diff --git a/storage/lockRetentionPolicy.js b/storage/lockRetentionPolicy.js new file mode 100644 index 0000000000..b259fdf7ca --- /dev/null +++ b/storage/lockRetentionPolicy.js @@ -0,0 +1,58 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_lock_retention_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function lockRetentionPolicy() { + // Gets the current metageneration value for the bucket, required by + // lock_retention_policy + const [unlockedMetadata] = await storage.bucket(bucketName).getMetadata(); + + // Warning: Once a retention policy is locked, it cannot be unlocked. The + // retention period can only be increased + const [lockedMetadata] = await storage + .bucket(bucketName) + .lock(unlockedMetadata.metageneration); + console.log(`Retention policy for ${bucketName} is now locked`); + console.log( + `Retention policy effective as of ${lockedMetadata.retentionPolicy.effectiveTime}` + ); + + return lockedMetadata; + } + + lockRetentionPolicy().catch(console.error); + // [END storage_lock_retention_policy] +} +main(...process.argv.slice(2)); diff --git a/storage/makeBucketPublic.js b/storage/makeBucketPublic.js new file mode 100644 index 0000000000..46aaa958fa --- /dev/null +++ b/storage/makeBucketPublic.js @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Make Bucket Public. +// description: Storage Make Bucket Public. +// usage: node makeBucketPublic.js + +function main(bucketName = 'my-bucket') { + // [START storage_set_bucket_public_iam] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function makeBucketPublic() { + await storage.bucket(bucketName).makePublic(); + + console.log(`Bucket ${bucketName} is now publicly readable`); + } + + makeBucketPublic().catch(console.error); + // [END storage_set_bucket_public_iam] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/makePublic.js b/storage/makePublic.js new file mode 100644 index 0000000000..fd17d8b010 --- /dev/null +++ b/storage/makePublic.js @@ -0,0 +1,49 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_make_public] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function makePublic() { + await storage.bucket(bucketName).file(fileName).makePublic(); + + console.log(`gs://${bucketName}/${fileName} is now public.`); + } + + makePublic().catch(console.error); + // [END storage_make_public] +} +main(...process.argv.slice(2)); diff --git a/storage/moveFile.js b/storage/moveFile.js new file mode 100644 index 0000000000..29d62d4e68 --- /dev/null +++ b/storage/moveFile.js @@ -0,0 +1,76 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + srcFileName = 'test.txt', + destFileName = 'test2.txt', + destinationGenerationMatchPrecondition = 0 +) { + // [START storage_move_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-source-bucket'; + + // The ID of your GCS file + // const srcFileName = 'your-file-name'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function moveFile() { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const moveOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; + + // Moves the file within the bucket + await storage + .bucket(bucketName) + .file(srcFileName) + .move(destFileName, moveOptions); + + console.log( + `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` + ); + } + + moveFile().catch(console.error); + // [END storage_move_file] +} +main(...process.argv.slice(2)); diff --git a/storage/moveFileAtomic.js b/storage/moveFileAtomic.js new file mode 100644 index 0000000000..88047da8cf --- /dev/null +++ b/storage/moveFileAtomic.js @@ -0,0 +1,76 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + srcFileName = 'test.txt', + destFileName = 'test2.txt', + destinationGenerationMatchPrecondition = 0 +) { + // [START storage_move_object] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-source-bucket'; + + // The ID of your GCS file + // const srcFileName = 'your-file-name'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function moveFileAtomic() { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const moveOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; + + // Moves the file atomically within the bucket + await storage + .bucket(bucketName) + .file(srcFileName) + .moveFileAtomic(destFileName, moveOptions); + + console.log( + `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` + ); + } + + moveFileAtomic().catch(console.error); + // [END storage_move_object] +} +main(...process.argv.slice(2)); diff --git a/storage/package.json b/storage/package.json new file mode 100644 index 0000000000..539149afb7 --- /dev/null +++ b/storage/package.json @@ -0,0 +1,30 @@ +{ + "name": "@google-cloud/storage-samples", + "description": "Samples for the Cloud Storage Client Library for Node.js.", + "license": "Apache-2.0", + "author": "Google Inc.", + "engines": { + "node": ">=12" + }, + "repository": "googleapis/nodejs-storage", + "private": true, + "files": [ + "*.js" + ], + "scripts": { + "cleanup": "node scripts/cleanup", + "test": "mocha system-test/*.js --timeout 600000" + }, + "dependencies": { + "@google-cloud/pubsub": "^4.0.0", + "@google-cloud/storage": "^7.19.0", + "node-fetch": "^2.6.7", + "uuid": "^8.0.0", + "yargs": "^16.0.0" + }, + "devDependencies": { + "chai": "^4.2.0", + "mocha": "^8.0.0", + "p-limit": "^3.1.0" + } +} diff --git a/storage/printBucketAcl.js b/storage/printBucketAcl.js new file mode 100644 index 0000000000..9937fab68c --- /dev/null +++ b/storage/printBucketAcl.js @@ -0,0 +1,49 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_print_bucket_acl] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function printBucketAcl() { + // Gets the ACL for the bucket + const [acls] = await storage.bucket(bucketName).acl.get(); + + acls.forEach(acl => { + console.log(`${acl.role}: ${acl.entity}`); + }); + } + printBucketAcl().catch(console.error); + // [END storage_print_bucket_acl] +} + +main(...process.argv.slice(2)); diff --git a/storage/printBucketAclForUser.js b/storage/printBucketAclForUser.js new file mode 100644 index 0000000000..09a6ed7c43 --- /dev/null +++ b/storage/printBucketAclForUser.js @@ -0,0 +1,56 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { + // [START storage_print_bucket_acl_for_user] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The email address of the user to check + // const userEmail = 'user-email-to-check'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function printBucketAclForUser() { + const options = { + // Specify the user + entity: `user-${userEmail}`, + }; + + // Gets the user's ACL for the bucket + const [aclObject] = await storage.bucket(bucketName).acl.get(options); + + console.log(`${aclObject.role}: ${aclObject.entity}`); + } + + printBucketAclForUser().catch(console.error); + // [END storage_print_bucket_acl_for_user] +} + +main(...process.argv.slice(2)); diff --git a/storage/printFileAcl.js b/storage/printFileAcl.js new file mode 100644 index 0000000000..8c14ddebfa --- /dev/null +++ b/storage/printFileAcl.js @@ -0,0 +1,52 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', fileName = 'test.txt') { + // [START storage_print_file_acl] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function printFileAcl() { + // Gets the ACL for the file + const [acls] = await storage.bucket(bucketName).file(fileName).acl.get(); + + acls.forEach(acl => { + console.log(`${acl.role}: ${acl.entity}`); + }); + } + + printFileAcl().catch(console.error); + // [END storage_print_file_acl] +} +main(...process.argv.slice(2)); diff --git a/storage/printFileAclForUser.js b/storage/printFileAclForUser.js new file mode 100644 index 0000000000..f614749c73 --- /dev/null +++ b/storage/printFileAclForUser.js @@ -0,0 +1,65 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + userEmail = 'jdobry@google.com' +) { + // [START storage_print_file_acl_for_user] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The email address of the user to check + // const userEmail = 'user-email-to-check'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function printFileAclForUser() { + const options = { + // Specify the user + entity: `user-${userEmail}`, + }; + + // Gets the user's ACL for the file + const [aclObject] = await storage + .bucket(bucketName) + .file(fileName) + .acl.get(options); + + console.log(`${aclObject.role}: ${aclObject.entity}`); + } + + printFileAclForUser().catch(console.error); + // [END storage_print_file_acl_for_user] +} +main(...process.argv.slice(2)); diff --git a/storage/quickstart.js b/storage/quickstart.js new file mode 100644 index 0000000000..699924f852 --- /dev/null +++ b/storage/quickstart.js @@ -0,0 +1,47 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(bucketName = 'my-new-bucket') { + // [START storage_quickstart] + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // For more information on ways to initialize Storage, please see + // https://googleapis.dev/nodejs/storage/latest/Storage.html + + // Creates a client using Application Default Credentials + const storage = new Storage(); + + // Creates a client from a Google service account key + // const storage = new Storage({keyFilename: 'key.json'}); + + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + async function createBucket() { + // Creates the new bucket + await storage.createBucket(bucketName); + console.log(`Bucket ${bucketName} created.`); + } + + createBucket().catch(console.error); + // [END storage_quickstart] +} + +main(...process.argv.slice(2)); diff --git a/storage/releaseEventBasedHold.js b/storage/releaseEventBasedHold.js new file mode 100644 index 0000000000..cbce58d4be --- /dev/null +++ b/storage/releaseEventBasedHold.js @@ -0,0 +1,64 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + metagenerationMatchPrecondition = 0 +) { + // [START storage_release_event_based_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function releaseEventBasedHold() { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; + + await storage.bucket(bucketName).file(fileName).setMetadata( + { + eventBasedHold: false, + }, + options + ); + console.log(`Event-based hold was released for ${fileName}.`); + } + + releaseEventBasedHold().catch(console.error); + // [END storage_release_event_based_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/releaseTemporaryHold.js b/storage/releaseTemporaryHold.js new file mode 100644 index 0000000000..91f6c15b30 --- /dev/null +++ b/storage/releaseTemporaryHold.js @@ -0,0 +1,64 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + metagenerationMatchPrecondition = 0 +) { + // [START storage_release_temporary_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function releaseTemporaryHold() { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; + + await storage.bucket(bucketName).file(fileName).setMetadata( + { + temporaryHold: false, + }, + options + ); + console.log(`Temporary hold was released for ${fileName}.`); + } + + releaseTemporaryHold().catch(console.error); + // [END storage_release_temporary_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/removeBucketConditionalBinding.js b/storage/removeBucketConditionalBinding.js new file mode 100644 index 0000000000..7ba888ea5d --- /dev/null +++ b/storage/removeBucketConditionalBinding.js @@ -0,0 +1,94 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + roleName = 'roles/storage.objectViewer', + title = 'match-prefix', + description = 'Applies to objects matching a prefix', + expression = 'resource.name.startsWith("projects/_/buckets/bucket-name/objects/prefix-a-")' +) { + // [START storage_remove_bucket_conditional_iam_binding] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The role to grant + // const roleName = 'roles/storage.objectViewer'; + + // The members to grant the new role to + // const members = [ + // 'user:jdoe@example.com', + // 'group:admins@example.com', + // ]; + + // Create a condition + // const title = 'Title'; + // const description = 'Description'; + // const expression = 'resource.name.startsWith(\"projects/_/buckets/bucket-name/objects/prefix-a-\")'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketConditionalBinding() { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // Gets and updates the bucket's IAM policy + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Set the policy's version to 3 to use condition in bindings. + policy.version = 3; + + // Finds and removes the appropriate role-member group with specific condition. + const index = policy.bindings.findIndex( + binding => + binding.role === roleName && + binding.condition && + binding.condition.title === title && + binding.condition.description === description && + binding.condition.expression === expression + ); + + const binding = policy.bindings[index]; + if (binding) { + policy.bindings.splice(index, 1); + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + + console.log('Conditional Binding was removed.'); + } else { + // No matching role-member group with specific condition were found + throw new Error('No matching binding group found.'); + } + } + + removeBucketConditionalBinding().catch(console.error); + // [END storage_remove_bucket_conditional_iam_binding] +} +main(...process.argv.slice(2)); diff --git a/storage/removeBucketCors.js b/storage/removeBucketCors.js new file mode 100644 index 0000000000..62a122d80e --- /dev/null +++ b/storage/removeBucketCors.js @@ -0,0 +1,50 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Remove Bucket Cors Configuration. +// description: Removes bucket cors configuration. +// usage: node removeBucketCors.js + +function main(bucketName = 'my-bucket') { + // [START storage_remove_cors_configuration] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketCors() { + await storage.bucket(bucketName).setCorsConfiguration([]); + + console.log(`Removed CORS configuration from bucket ${bucketName}`); + } + + removeBucketCors().catch(console.error); + // [END storage_remove_cors_configuration] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/removeBucketDefaultOwner.js b/storage/removeBucketDefaultOwner.js new file mode 100644 index 0000000000..94fb8a9aa9 --- /dev/null +++ b/storage/removeBucketDefaultOwner.js @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { + // [START storage_remove_bucket_default_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The email address of the user to remove + // const userEmail = 'user-email-to-remove'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketDefaultOwner() { + // Removes the user from the access control list of the bucket. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage.bucket(bucketName).acl.default.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + } + + removeBucketDefaultOwner().catch(console.error); + // [END storage_remove_bucket_default_owner] +} + +main(...process.argv.slice(2)); diff --git a/storage/removeBucketIamMember.js b/storage/removeBucketIamMember.js new file mode 100644 index 0000000000..fe333a2f48 --- /dev/null +++ b/storage/removeBucketIamMember.js @@ -0,0 +1,87 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + roleName = 'roles/storage.objectViewer', + members = 'user:test@example.com' +) { + members = members.split(','); + // [START storage_remove_bucket_iam_member] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The role to revoke + // const roleName = 'roles/storage.objectViewer'; + + // The members to revoke the roles from + // const members = [ + // 'user:jdoe@example.com', + // 'group:admins@example.com', + // ]; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketIamMember() { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Finds and updates the appropriate role-member group, without a condition. + const index = policy.bindings.findIndex( + binding => binding.role === roleName && !binding.condition + ); + + const role = policy.bindings[index]; + if (role) { + role.members = role.members.filter( + member => members.indexOf(member) === -1 + ); + + // Updates the policy object with the new (or empty) role-member group + if (role.members.length === 0) { + policy.bindings.splice(index, 1); + } else { + policy.bindings.index = role; + } + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + } else { + // No matching role-member group(s) were found + throw new Error('No matching role-member group(s) found.'); + } + + console.log( + `Removed the following member(s) with role ${roleName} from ${bucketName}:` + ); + members.forEach(member => { + console.log(` ${member}`); + }); + } + + removeBucketIamMember().catch(console.error); + // [END storage_remove_bucket_iam_member] +} +main(...process.argv.slice(2)); diff --git a/storage/removeBucketLabel.js b/storage/removeBucketLabel.js new file mode 100644 index 0000000000..8df82f0121 --- /dev/null +++ b/storage/removeBucketLabel.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Storage Remove Bucket Label. +// description: Removes bucket label. +// usage: node removeBucketLabel.js labelone) + +function main(bucketName = 'my-bucket', labelKey = 'labelone') { + // [START storage_remove_bucket_label] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The key of the label to remove from the bucket + // const labelKey = 'label-key-to-remove'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketLabel() { + const labels = {}; + // To remove a label set the value of the key to null. + labels[labelKey] = null; + await storage.bucket(bucketName).setMetadata({labels}); + console.log(`Removed labels from bucket ${bucketName}`); + } + + removeBucketLabel().catch(console.error); + // [END storage_remove_bucket_label] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/removeBucketOwnerAcl.js b/storage/removeBucketOwnerAcl.js new file mode 100644 index 0000000000..5f3e8950bf --- /dev/null +++ b/storage/removeBucketOwnerAcl.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { + // [START storage_remove_bucket_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The email address of the user to remove + // const userEmail = 'user-email-to-remove'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeBucketOwner() { + // Removes the user from the access control list of the bucket. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage.bucket(bucketName).acl.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + } + + removeBucketOwner().catch(console.error); + + // [END storage_remove_bucket_owner] +} + +main(...process.argv.slice(2)); diff --git a/storage/removeDefaultKMSKey.js b/storage/removeDefaultKMSKey.js new file mode 100644 index 0000000000..8c754f9232 --- /dev/null +++ b/storage/removeDefaultKMSKey.js @@ -0,0 +1,54 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Remove Default KMS Key. +// description: Remove Default KMS Key. +// usage: node removeDefaultKMSKey.js + +function main(bucketName = 'my-bucket') { + // [START storage_bucket_delete_default_kms_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeDefaultKMSKey() { + await storage.bucket(bucketName).setMetadata({ + encryption: { + defaultKmsKeyName: null, + }, + }); + + console.log(`Default KMS key was removed from ${bucketName}`); + } + + removeDefaultKMSKey().catch(console.error); + // [END storage_bucket_delete_default_kms_key] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/removeFileOwnerAcl.js b/storage/removeFileOwnerAcl.js new file mode 100644 index 0000000000..2a72e1f7e0 --- /dev/null +++ b/storage/removeFileOwnerAcl.js @@ -0,0 +1,63 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on bucket and + * file Access Control Lists with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + userEmail = 'jdobry@google.com' +) { + // [START storage_remove_file_owner] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The email address of the user to remove + // const userEmail = 'user-email-to-remove'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeFileOwner() { + // Removes the user from the access control list of the file. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage + .bucket(bucketName) + .file(fileName) + .acl.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from file ${fileName}.`); + } + + removeFileOwner().catch(console.error); + // [END storage_remove_file_owner] +} + +main(...process.argv.slice(2)); diff --git a/storage/removeRetentionPolicy.js b/storage/removeRetentionPolicy.js new file mode 100644 index 0000000000..f7ec28c7d0 --- /dev/null +++ b/storage/removeRetentionPolicy.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket') { + // [START storage_remove_retention_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function removeRetentionPolicy() { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + if (metadata.retentionPolicy && metadata.retentionPolicy.isLocked) { + console.log( + 'Unable to remove retention period as retention policy is locked.' + ); + return null; + } else { + const results = await storage.bucket(bucketName).removeRetentionPeriod(); + console.log(`Removed bucket ${bucketName} retention policy.`); + return results; + } + } + + removeRetentionPolicy().catch(console.error); + // [END storage_remove_retention_policy] +} +main(...process.argv.slice(2)); diff --git a/storage/renameFile.js b/storage/renameFile.js new file mode 100644 index 0000000000..0bd53108f1 --- /dev/null +++ b/storage/renameFile.js @@ -0,0 +1,59 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + srcBucketName = 'my-bucket', + srcFileName = 'test2.txt', + destFileName = 'test4.txt' +) { + // [START storage_rename_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the bucket the original file is in + // const srcBucketName = 'your-source-bucket'; + + // The ID of the GCS file to rename + // const srcFilename = 'your-file-name'; + + // The new ID of the GCS file + // const destFileName = 'target-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function renameFile() { + // renames the file + await storage.bucket(srcBucketName).file(srcFileName).rename(destFileName); + + console.log( + `gs://${srcBucketName}/${srcFileName} renamed to gs://${srcBucketName}/${destFileName}.` + ); + } + + renameFile().catch(console.error); + // [END storage_rename_file] +} +main(...process.argv.slice(2)); diff --git a/storage/resources/.gitignore b/storage/resources/.gitignore new file mode 100644 index 0000000000..6738013702 --- /dev/null +++ b/storage/resources/.gitignore @@ -0,0 +1 @@ +downloaded.txt diff --git a/storage/resources/resourcesSub1/testSub1.txt b/storage/resources/resourcesSub1/testSub1.txt new file mode 100644 index 0000000000..51f4b307d5 --- /dev/null +++ b/storage/resources/resourcesSub1/testSub1.txt @@ -0,0 +1,2 @@ +Sub1 +Hello World! \ No newline at end of file diff --git a/storage/resources/test.txt b/storage/resources/test.txt new file mode 100644 index 0000000000..c57eff55eb --- /dev/null +++ b/storage/resources/test.txt @@ -0,0 +1 @@ +Hello World! \ No newline at end of file diff --git a/storage/resources/test2.txt b/storage/resources/test2.txt new file mode 100644 index 0000000000..010302410b --- /dev/null +++ b/storage/resources/test2.txt @@ -0,0 +1 @@ +Hello World 2! \ No newline at end of file diff --git a/storage/restoreSoftDeletedBucket.js b/storage/restoreSoftDeletedBucket.js new file mode 100644 index 0000000000..c6a2bbff5f --- /dev/null +++ b/storage/restoreSoftDeletedBucket.js @@ -0,0 +1,48 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket', generation = 123456789) { + // [START storage_restore_soft_deleted_bucket] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The generation of the bucket to restore + // const generation = 123456789; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function restoreSoftDeletedBucket() { + const options = { + generation: generation, + }; + + await storage.bucket(bucketName).restore(options); + + console.log(`Soft deleted bucket ${bucketName} was restored.`); + } + + restoreSoftDeletedBucket().catch(console.error); + // [END storage_restore_soft_deleted_bucket] +} + +main(...process.argv.slice(2)); diff --git a/storage/restoreSoftDeletedObject.js b/storage/restoreSoftDeletedObject.js new file mode 100644 index 0000000000..396ce52e78 --- /dev/null +++ b/storage/restoreSoftDeletedObject.js @@ -0,0 +1,58 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + generation = 123456789 +) { + // [START storage_restore_object] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The generation of your GCS file to restore + // const generation = 123456789; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function restoreSoftDeletedObject() { + const options = { + generation: generation, + }; + + const restoredFile = await storage + .bucket(bucketName) + .file(fileName) + .restore(options); + + console.log(`Soft deleted object ${restoredFile.name} was restored`); + } + + restoreSoftDeletedObject().catch(console.error); + // [END storage_restore_object] +} + +main(...process.argv.slice(2)); diff --git a/storage/rotateEncryptionKey.js b/storage/rotateEncryptionKey.js new file mode 100644 index 0000000000..9bcbd45011 --- /dev/null +++ b/storage/rotateEncryptionKey.js @@ -0,0 +1,82 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on encrypted + * files with the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + oldKey = process.env.GOOGLE_CLOUD_KMS_KEY_US, + newKey = process.env.GOOGLE_CLOUD_KMS_KEY_ASIA, + generationMatchPrecondition = 0 +) { + // [START storage_rotate_encryption_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The Base64 encoded AES-256 encryption key originally used to encrypt the + // object. See the documentation on Customer-Supplied Encryption keys for + // more info: + // https://cloud.google.com/storage/docs/encryption/using-customer-supplied-keys + // The Base64 encoded AES-256 encryption key originally used to encrypt the + // const oldKey = 'TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g='; + + // The new encryption key to use + // const newKey = '0mMWhFvQOdS4AmxRpo8SJxXn5MjFhbz7DkKBUdUIef8='; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function rotateEncryptionKey() { + const rotateEncryptionKeyOptions = { + encryptionKey: Buffer.from(newKey, 'base64'), + + // Optional: set a generation-match precondition to avoid potential race + // conditions and data corruptions. The request to copy is aborted if the + // object's generation number does not match your precondition. + preconditionOpts: { + ifGenerationMatch: generationMatchPrecondition, + }, + }; + await storage + .bucket(bucketName) + .file(fileName, { + encryptionKey: Buffer.from(oldKey, 'base64'), + }) + .rotateEncryptionKey({ + rotateEncryptionKeyOptions, + }); + + console.log('Encryption key rotated successfully'); + } + + rotateEncryptionKey().catch(console.error); + // [END storage_rotate_encryption_key] +} +main(...process.argv.slice(2)); diff --git a/storage/scripts/cleanup b/storage/scripts/cleanup new file mode 100644 index 0000000000..61bd73114f --- /dev/null +++ b/storage/scripts/cleanup @@ -0,0 +1,44 @@ +#!/usr/bin/env node + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const storage = new Storage(); +const NAME_REG_EXP = /^nodejs-storage-samples-[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$/; + +storage + .getBuckets() + .then(([buckets]) => { + let promise = Promise.resolve(); + + buckets + .filter((bucket) => NAME_REG_EXP.test(bucket.name)) + .forEach((bucket) => { + promise = promise.then(() => { + return bucket.deleteFiles() + .then(() => bucket.deleteFiles(), console.error) + .then(() => { + console.log(`Deleting ${bucket.name}`); + return bucket.delete(); + }, console.error) + .catch(console.error); + }); + }); + }) + .catch((err) => { + console.error('ERROR:', err); + }); diff --git a/storage/setAutoclass.js b/storage/setAutoclass.js new file mode 100644 index 0000000000..d8c906ac09 --- /dev/null +++ b/storage/setAutoclass.js @@ -0,0 +1,58 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main( + bucketName = 'my-bucket', + toggle = true, + terminalStorageClass = 'ARCHIVE' +) { + // [START storage_set_autoclass] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The terminal storage class to be set on your GCS bucket. Valid values are NEARLINE and ARCHIVE. + // const terminalStorageClass = 'NEARLINE'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function setAutoclass() { + // Configure the Autoclass setting for a bucket. + // terminalStorageClass field is optional and defaults to NEARLINE if not otherwise specified. + // Valid terminalStorageClass values are NEARLINE and ARCHIVE. + const [metadata] = await storage.bucket(bucketName).setMetadata({ + autoclass: { + enabled: toggle, + terminalStorageClass, + }, + }); + + console.log( + `Autoclass terminal storage class is ${metadata.autoclass.terminalStorageClass}.` + ); + } + + setAutoclass().catch(console.error); + // [END storage_set_autoclass] +} + +main(...process.argv.slice(2)); diff --git a/storage/setClientEndpoint.js b/storage/setClientEndpoint.js new file mode 100644 index 0000000000..912d8ef77e --- /dev/null +++ b/storage/setClientEndpoint.js @@ -0,0 +1,48 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates set a custom endpoint with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(apiEndpoint = 'https://storage.googleapis.com') { + // [START storage_set_client_endpoint] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The custom endpoint to which requests should be made + // const apiEndpoint = 'https://yourcustomendpoint.com'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage({ + apiEndpoint: apiEndpoint, + useAuthWithCustomEndpoint: true, + }); + + console.log(`Client initiated with endpoint: ${storage.apiEndpoint}.`); + + // [END storage_set_client_endpoint] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/setEventBasedHold.js b/storage/setEventBasedHold.js new file mode 100644 index 0000000000..cda14b4c3a --- /dev/null +++ b/storage/setEventBasedHold.js @@ -0,0 +1,65 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + metagenerationMatchPrecondition = 0 +) { + // [START storage_set_event_based_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function setEventBasedHold() { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; + + // Set event-based hold + await storage.bucket(bucketName).file(fileName).setMetadata( + { + eventBasedHold: true, + }, + options + ); + console.log(`Event-based hold was set for ${fileName}.`); + } + + setEventBasedHold().catch(console.error); + // [END storage_set_event_based_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/setObjectRetentionPolicy.js b/storage/setObjectRetentionPolicy.js new file mode 100644 index 0000000000..9cfe3444ca --- /dev/null +++ b/storage/setObjectRetentionPolicy.js @@ -0,0 +1,96 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Set the object retention policy of a File. +// description: Set the object retention policy of a File. +// usage: node setObjectRetentionPolicy.js + +function main( + bucketName = 'my-bucket', + destFileName = 'file.txt', + contents = 'this is the file content' +) { + // [START storage_set_object_retention_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // The content to be uploaded in the GCS file + // const contents = 'your file content'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + // The bucket in the sample below will be created in the project associated with this client. + // For more information, please see https://cloud.google.com/docs/authentication/production or https://googleapis.dev/nodejs/storage/latest/Storage.html + const storage = new Storage(); + + async function setObjectRetentionPolicy() { + // Get a reference to the bucket + const myBucket = storage.bucket(bucketName); + + // Create a reference to a file object + const file = myBucket.file(destFileName); + + // Save the file data + await file.save(contents); + + // Set the retention policy for the file + const retentionDate = new Date(); + retentionDate.setDate(retentionDate.getDate() + 10); + const [metadata] = await file.setMetadata({ + retention: { + mode: 'Unlocked', + retainUntilTime: retentionDate.toISOString(), + }, + }); + + console.log( + `Retention policy for file ${file.name} was set to: ${metadata.retention.mode}` + ); + + // To modify an existing policy on an unlocked file object, pass in the override parameter + const newRetentionDate = new Date(retentionDate.getDate()); + newRetentionDate.setDate(newRetentionDate.getDate() + 9); + const [newMetadata] = await file.setMetadata({ + retention: { + mode: 'Unlocked', + retainUntilTime: newRetentionDate, + }, + overrideUnlockedRetention: true, + }); + + console.log( + `Retention policy for file ${file.name} was updated to: ${newMetadata.retention.retainUntilTime}` + ); + } + + setObjectRetentionPolicy().catch(console.error); + // [END storage_set_object_retention_policy] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/setPublicAccessPreventionEnforced.js b/storage/setPublicAccessPreventionEnforced.js new file mode 100644 index 0000000000..036bf8bec6 --- /dev/null +++ b/storage/setPublicAccessPreventionEnforced.js @@ -0,0 +1,54 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_set_public_access_prevention_enforced] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Enforces public access prevention for the bucket + async function setPublicAccessPreventionEnforced() { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + publicAccessPrevention: 'enforced', + }, + }); + + console.log( + `Public access prevention is set to enforced for ${bucketName}.` + ); + } + + setPublicAccessPreventionEnforced(); + // [END storage_set_public_access_prevention_enforced] +} + +main(...process.argv.slice(2)); diff --git a/storage/setPublicAccessPreventionInherited.js b/storage/setPublicAccessPreventionInherited.js new file mode 100644 index 0000000000..14f61e0cdb --- /dev/null +++ b/storage/setPublicAccessPreventionInherited.js @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_set_public_access_prevention_inherited] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + async function setPublicAccessPreventionInherited() { + // Sets public access prevention to 'inherited' for the bucket + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + publicAccessPrevention: 'inherited', + }, + }); + + console.log(`Public access prevention is 'inherited' for ${bucketName}.`); + } + + setPublicAccessPreventionInherited(); + // [END storage_set_public_access_prevention_inherited] +} +main(...process.argv.slice(2)); diff --git a/storage/setRPOAsyncTurbo.js b/storage/setRPOAsyncTurbo.js new file mode 100644 index 0000000000..3ce3c54701 --- /dev/null +++ b/storage/setRPOAsyncTurbo.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_set_rpo_async_turbo] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket in a dual-region + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Enable turbo replication for the bucket by setting rpo to ASYNC_TURBO. + // The bucket must be a dual-region bucket. + async function setRPOAsyncTurbo() { + await storage.bucket(bucketName).setMetadata({ + rpo: 'ASYNC_TURBO', + }); + + console.log(`Turbo replication enabled for ${bucketName}.`); + } + + setRPOAsyncTurbo(); + // [END storage_set_rpo_async_turbo] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/setRPODefault.js b/storage/setRPODefault.js new file mode 100644 index 0000000000..0e1f604c2e --- /dev/null +++ b/storage/setRPODefault.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on buckets with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_set_rpo_default] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The name of your GCS bucket in a dual-region + // const bucketName = 'Name of a bucket, e.g. my-bucket'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Disable turbo replication for the bucket by setting RPO to default. + // The bucket must be a dual-region bucket. + async function setRPODefault() { + await storage.bucket(bucketName).setMetadata({ + rpo: 'DEFAULT', + }); + + console.log(`Turbo replication disabled for ${bucketName}.`); + } + + setRPODefault(); + // [END storage_set_rpo_default] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/setRetentionPolicy.js b/storage/setRetentionPolicy.js new file mode 100644 index 0000000000..21d4d22010 --- /dev/null +++ b/storage/setRetentionPolicy.js @@ -0,0 +1,52 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main(bucketName = 'my-bucket', retentionPeriod = 5) { + // [START storage_set_retention_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The retention period for objects in bucket + // const retentionPeriod = 3600; // 1 hour in seconds + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function setRetentionPolicy() { + const [metadata] = await storage + .bucket(bucketName) + .setRetentionPeriod(retentionPeriod); + console.log( + `Bucket ${bucketName} retention period set for ${metadata.retentionPolicy.retentionPeriod} seconds.` + ); + } + + setRetentionPolicy().catch(console.error); + // [END storage_set_retention_policy] +} +main(...process.argv.slice(2)); diff --git a/storage/setSoftDeletePolicy.js b/storage/setSoftDeletePolicy.js new file mode 100644 index 0000000000..89f631604b --- /dev/null +++ b/storage/setSoftDeletePolicy.js @@ -0,0 +1,47 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +function main(bucketName = 'my-bucket') { + // [START storage_set_soft_delete_policy] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function setSoftDeletePolicy() { + const options = { + softDeletePolicy: { + retentionDurationSeconds: 604800, // 7 days (in seconds) + }, + }; + + const [metadata] = await storage.bucket(bucketName).setMetadata(options); + + console.log(`Bucket ${metadata.name} soft delete policy set to 7 days`); + } + + setSoftDeletePolicy().catch(console.error); + // [END storage_set_soft_delete_policy] +} + +main(...process.argv.slice(2)); diff --git a/storage/setTemporaryHold.js b/storage/setTemporaryHold.js new file mode 100644 index 0000000000..db503748a0 --- /dev/null +++ b/storage/setTemporaryHold.js @@ -0,0 +1,64 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to use Bucket Lock operations on buckets + * and objects using the Google Cloud Storage API. + * + * For more information read the documentation + * at https://cloud.google.com/storage/docs/bucket-lock + */ + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + metagenerationMatchPrecondition = 0 +) { + // [START storage_set_temporary_hold] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function setTemporaryHold() { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; + + await storage.bucket(bucketName).file(fileName).setMetadata( + { + temporaryHold: true, + }, + options + ); + console.log(`Temporary hold was set for ${fileName}.`); + } + + setTemporaryHold().catch(console.error); + // [END storage_set_temporary_hold] +} +main(...process.argv.slice(2)); diff --git a/storage/streamFileDownload.js b/storage/streamFileDownload.js new file mode 100644 index 0000000000..b25aa87f7d --- /dev/null +++ b/storage/streamFileDownload.js @@ -0,0 +1,71 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ +const path = require('path'); +const cwd = path.join(__dirname, '..'); +const fs = require('fs'); + +function main( + bucketName = 'my-bucket', + fileName = 'test.txt', + destFileName = path.join(cwd, 'downloaded.txt') +) { + // [START storage_stream_file_download] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of your GCS file + // const fileName = 'your-file-name'; + + // The filename and file path where you want to download the file + // const destFileName = '/local/path/to/file.txt'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function streamFileDownload() { + // The example below demonstrates how we can reference a remote file, then + // pipe its contents to a local file. + // Once the stream is created, the data can be piped anywhere (process, sdout, etc) + await storage + .bucket(bucketName) + .file(fileName) + .createReadStream() //stream is created + .pipe(fs.createWriteStream(destFileName)) + .on('finish', () => { + // The file download is complete + }); + + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); + } + + streamFileDownload().catch(console.error); + // [END storage_stream_file_download] +} +main(...process.argv.slice(2)); diff --git a/storage/streamFileUpload.js b/storage/streamFileUpload.js new file mode 100644 index 0000000000..19c240f603 --- /dev/null +++ b/storage/streamFileUpload.js @@ -0,0 +1,73 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + destFileName = 'file.txt', + contents = 'this is the file content' +) { + // [START storage_stream_file_upload] + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // The content to be uploaded in the GCS file + // const contents = 'your file content'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Import Node.js stream + const stream = require('stream'); + + // Creates a client + const storage = new Storage(); + + // Get a reference to the bucket + const myBucket = storage.bucket(bucketName); + + // Create a reference to a file object + const file = myBucket.file(destFileName); + + // Create a pass through stream from a string + const passthroughStream = new stream.PassThrough(); + passthroughStream.write(contents); + passthroughStream.end(); + + async function streamFileUpload() { + passthroughStream.pipe(file.createWriteStream()).on('finish', () => { + // The file upload is complete + }); + + console.log(`${destFileName} uploaded to ${bucketName}`); + } + + streamFileUpload().catch(console.error); + // [END storage_stream_file_upload] +} + +main(...process.argv.slice(2)); diff --git a/storage/system-test/acl.test.js b/storage/system-test/acl.test.js new file mode 100644 index 0000000000..5074b04b76 --- /dev/null +++ b/storage/system-test/acl.test.js @@ -0,0 +1,144 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); +const path = require('path'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); +const userEmail = 'jdobry@google.com'; +const fileName = 'test.txt'; +const filePath = path.join(__dirname, '..', 'resources', fileName); + +before(async () => { + await bucket.create(); + await bucket.upload(filePath); +}); + +after(async () => { + try { + await bucket.deleteFiles({force: true}); + } catch (err) { + // ignore error + } + try { + await bucket.deleteFiles({force: true}); + } catch (err) { + // ignore error + } + try { + await bucket.delete(); + } catch (err) { + // ignore error + } +}); + +it('should print acl for a bucket', () => { + const out = execSync(`node printBucketAcl.js ${bucketName}`); + assert.match(out, /OWNER: project-editors-/); + assert.match(out, /OWNER: project-owners-/); + assert.match(out, /READER: project-viewers-/); +}); + +it('should print acl for a file', () => { + const out = execSync(`node printFileAcl.js ${bucketName} ${fileName}`); + assert.match(out, /OWNER: project-editors-/); + assert.match(out, /OWNER: project-owners-/); + assert.match(out, /READER: project-viewers-/); +}); + +it('should print a users acl for a bucket', async () => { + await bucket.acl.readers.addUser(userEmail); + const out = execSync( + `node printBucketAclForUser.js ${bucketName} ${userEmail}` + ); + assert.match(out, new RegExp(`READER: user-${userEmail}`)); + await bucket.acl.readers.deleteUser(userEmail); +}); + +it('should add a user as an owner on a bucket', () => { + const out = execSync(`node addBucketOwnerAcl.js ${bucketName} ${userEmail}`); + assert.match( + out, + new RegExp(`Added user ${userEmail} as an owner on bucket ${bucketName}.`) + ); +}); + +it('should remove a user from a bucket', () => { + const out = execSync( + `node removeBucketOwnerAcl.js ${bucketName} ${userEmail}` + ); + assert.match( + out, + new RegExp(`Removed user ${userEmail} from bucket ${bucketName}.`) + ); +}); + +it('should add a user as a default owner on a bucket', () => { + const out = execSync( + `node addBucketDefaultOwnerAcl.js ${bucketName} ${userEmail}` + ); + assert.match( + out, + new RegExp(`Added user ${userEmail} as an owner on bucket ${bucketName}.`) + ); +}); + +it('should remove a default user from a bucket', () => { + const out = execSync( + `node removeBucketDefaultOwner.js ${bucketName} ${userEmail}` + ); + assert.match( + out, + new RegExp(`Removed user ${userEmail} from bucket ${bucketName}.`) + ); +}); + +it('should print a users acl for a file', async () => { + await bucket.file(fileName).acl.readers.addUser(userEmail); + const out = execSync( + `node printFileAclForUser.js ${bucketName} ${fileName} ${userEmail}` + ); + assert.match(out, new RegExp(`READER: user-${userEmail}`)); + await bucket.file(fileName).acl.readers.deleteUser(userEmail); +}); + +it('should add a user as an owner on a bucket', () => { + const out = execSync( + `node addFileOwnerAcl.js ${bucketName} ${fileName} ${userEmail}` + ); + assert.match( + out, + new RegExp(`Added user ${userEmail} as an owner on file ${fileName}.`) + ); +}); + +it('should remove a user from a bucket', () => { + const out = execSync( + `node removeFileOwnerAcl.js ${bucketName} ${fileName} ${userEmail}` + ); + assert.match( + out, + new RegExp(`Removed user ${userEmail} from file ${fileName}.`) + ); +}); diff --git a/storage/system-test/bucketLifecycle.test.js b/storage/system-test/bucketLifecycle.test.js new file mode 100644 index 0000000000..8b1adf9b55 --- /dev/null +++ b/storage/system-test/bucketLifecycle.test.js @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, beforeEach, after, describe, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); + +describe('Bucket lifecycle management', () => { + before(async () => { + await bucket.create(); + }); + + beforeEach(async () => { + await bucket.setMetadata({lifecycle: null}); + }); + + after(async () => { + await bucket.delete().catch(console.error); + }); + + it('should add a lifecycle delete rule', async () => { + const output = execSync( + `node enableBucketLifecycleManagement.js ${bucketName}` + ); + assert.include( + output, + `Lifecycle management is enabled for bucket ${bucketName} and the rules are:` + ); + const [metadata] = await bucket.getMetadata(); + assert.deepStrictEqual(metadata.lifecycle.rule[0], { + action: {type: 'Delete'}, + condition: {age: 100}, + }); + }); + + it('should disable all lifecycle rules', async () => { + // Add a lifecycle rule in order for the sample to delete. + await bucket.addLifecycleRule({ + action: {type: 'Delete'}, + condition: {age: 100}, + }); + + const [metadata] = await bucket.getMetadata(); + assert.deepStrictEqual(metadata.lifecycle.rule[0], { + action: {type: 'Delete'}, + condition: {age: 100}, + }); + + const output = execSync( + `node disableBucketLifecycleManagement.js ${bucketName}` + ); + assert.include( + output, + `Lifecycle management is disabled for bucket ${bucketName}` + ); + const [newMetadata] = await bucket.getMetadata(); + assert.isUndefined(newMetadata.lifecycle); + }); +}); diff --git a/storage/system-test/bucketLock.test.js b/storage/system-test/bucketLock.test.js new file mode 100644 index 0000000000..7f4693adeb --- /dev/null +++ b/storage/system-test/bucketLock.test.js @@ -0,0 +1,145 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const path = require('path'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const cwd = path.join(__dirname, '..'); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); +const fileName = 'test.txt'; + +const uploadFilePath = path.join(cwd, 'resources', fileName); + +before(async () => { + await bucket.create(); + await bucket.upload(uploadFilePath); +}); + +after(async () => { + try { + await bucket.deleteFiles({force: true}); + } catch (err) { + // ignore error + } + try { + await bucket.delete(); + } catch (err) { + // ignore error + } +}); + +it('should set a retention policy on a bucket', () => { + const retentionPeriod = 5; + const output = execSync( + `node setRetentionPolicy.js ${bucketName} ${retentionPeriod}` + ); + assert.match( + output, + new RegExp( + `Bucket ${bucketName} retention period set for ${retentionPeriod} seconds` + ) + ); +}); + +it('should get a retention policy on a bucket', () => { + const output = execSync(`node getRetentionPolicy.js ${bucketName}`); + assert.match(output, /A retention policy exists!/); +}); + +it('should enable default event-based hold on a bucket', () => { + const output = execSync(`node enableDefaultEventBasedHold.js ${bucketName}`); + assert.match( + output, + new RegExp(`Default event-based hold was enabled for ${bucketName}.`) + ); +}); + +it('should get default event-based hold on a bucket', () => { + const output = execSync(`node getDefaultEventBasedHold.js ${bucketName}`); + assert.match(output, /Default event-based hold: true./); +}); + +it('should disable default event-based hold on a bucket', () => { + const output = execSync(`node disableDefaultEventBasedHold.js ${bucketName}`); + assert.match( + output, + new RegExp(`Default event-based hold was disabled for ${bucketName}`) + ); +}); + +it('should set an event-based hold on a file', async () => { + const [metadata] = await bucket.file(fileName).getMetadata(); + const output = execSync( + `node setEventBasedHold.js ${bucketName} ${fileName} ${metadata.metageneration}` + ); + assert.match(output, new RegExp(`Event-based hold was set for ${fileName}`)); +}); + +it('should release an event-based hold on a file', async () => { + const [metadata] = await bucket.file(fileName).getMetadata(); + const output = execSync( + `node releaseEventBasedHold.js ${bucketName} ${fileName} ${metadata.metageneration}` + ); + assert.match( + output, + new RegExp(`Event-based hold was released for ${fileName}.`) + ); +}); + +it('should remove a retention policy on a bucket', () => { + const output = execSync(`node removeRetentionPolicy.js ${bucketName}`); + assert.match( + output, + new RegExp(`Removed bucket ${bucketName} retention policy.`) + ); +}); + +it('should set an temporary hold on a file', async () => { + const [metadata] = await bucket.file(fileName).getMetadata(); + const output = execSync( + `node setTemporaryHold.js ${bucketName} ${fileName} ${metadata.metageneration}` + ); + assert.match(output, new RegExp(`Temporary hold was set for ${fileName}.`)); +}); + +it('should release an temporary hold on a file', async () => { + const [metadata] = await bucket.file(fileName).getMetadata(); + const output = execSync( + `node releaseTemporaryHold.js ${bucketName} ${fileName} ${metadata.metageneration}` + ); + assert.match( + output, + new RegExp(`Temporary hold was released for ${fileName}.`) + ); +}); + +it('should lock a bucket with a retention policy', () => { + const retentionPeriod = 5; + execSync(`node setRetentionPolicy.js ${bucketName} ${retentionPeriod}`); + const output = execSync(`node lockRetentionPolicy.js ${bucketName}`); + assert.match( + output, + new RegExp(`Retention policy for ${bucketName} is now locked`) + ); +}); diff --git a/storage/system-test/buckets.test.js b/storage/system-test/buckets.test.js new file mode 100644 index 0000000000..4e2a03ebe9 --- /dev/null +++ b/storage/system-test/buckets.test.js @@ -0,0 +1,485 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const samplesTestBucketPrefix = `nodejs-storage-samples-${uuid.v4()}`; +const bucketName = `${samplesTestBucketPrefix}-a`; +const bucketNameDualRegion = `${samplesTestBucketPrefix}-b`; +const bucketNameDualRegionTurbo = `${samplesTestBucketPrefix}-c`; +const bucketNameWithClassAndLocation = `${samplesTestBucketPrefix}-d`; +const bucketNameAutoclass = `${samplesTestBucketPrefix}-e`; +const bucketNameObjectRetention = `${samplesTestBucketPrefix}-f`; +const bucketNameHierarchicalNamespace = `${samplesTestBucketPrefix}-g`; +const defaultKmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_ASIA; +const bucket = storage.bucket(bucketName); +const bucketWithClassAndLocation = storage.bucket( + bucketNameWithClassAndLocation +); +const dualRegionBucket = storage.bucket(bucketNameDualRegion); +const dualRegionBucketTurbo = storage.bucket(bucketNameDualRegionTurbo); +const objectRetentionBucket = storage.bucket(bucketNameObjectRetention); + +const PUBLIC_ACCESS_PREVENTION_INHERITED = 'inherited'; +const PUBLIC_ACCESS_PREVENTION_ENFORCED = 'enforced'; + +const DUAL_REGION = { + LOCATION: 'US', + REGIONS: ['US-EAST1', 'US-WEST1'], +}; +const RPO_ASYNC_TURBO = 'ASYNC_TURBO'; +const RPO_DEFAULT = 'DEFAULT'; + +async function deleteAllBucketsAsync() { + const [buckets] = await storage.getBuckets({prefix: samplesTestBucketPrefix}); + + for (const bucket of buckets) { + await bucket.deleteFiles({force: true}); + await bucket.delete({ignoreNotFound: true}); + } +} + +after(deleteAllBucketsAsync); +afterEach(async () => { + await new Promise(res => setTimeout(res, 1000)); +}); + +it('should create a bucket', async () => { + const output = execSync(`node createNewBucket.js ${bucketName}`); + assert.match(output, new RegExp(`Bucket ${bucketName} created`)); + const [exists] = await bucket.exists(); + assert.strictEqual(exists, true); +}); + +it('should list buckets', () => { + const output = execSync('node listBuckets.js'); + assert.match(output, /Buckets:/); + assert.match(output, new RegExp(bucketName)); +}); + +it('should get bucket metadata', async () => { + const output = execSync(`node bucketMetadata.js ${bucketName}`); + assert.include(output, bucketName); +}); + +it('should set autoclass terminal storage class to ARCHIVE', async () => { + await storage.createBucket(bucketNameAutoclass, { + autoclass: { + enabled: true, + terminalStorageClass: 'NEARLINE', + }, + }); + const output = execSync( + `node setAutoclass.js ${bucketNameAutoclass} ${true} ARCHIVE` + ); + assert.include(output, 'ARCHIVE'); +}); + +it('should disable autoclass', async () => { + const output = execSync( + `node setAutoclass.js ${bucketNameAutoclass} ${false}` + ); + assert.include(output, 'Autoclass'); +}); + +it('should get autoclass', async () => { + const output = execSync(`node getAutoclass.js ${bucketNameAutoclass}`); + assert.include(output, `Autoclass is disabled for ${bucketNameAutoclass}`); +}); + +it('should set a buckets default KMS key', async () => { + const output = execSync( + `node enableDefaultKMSKey.js ${bucketName} ${defaultKmsKeyName}` + ); + assert.include( + output, + `Default KMS key for ${bucketName} was set to ${defaultKmsKeyName}` + ); + const metadata = await bucket.getMetadata(); + assert.strictEqual( + metadata[0].encryption.defaultKmsKeyName, + defaultKmsKeyName + ); +}); + +it('should remove a buckets default KMS key', async () => { + const output = execSync(`node removeDefaultKMSKey.js ${bucketName}`); + assert.include(output, `Default KMS key was removed from ${bucketName}`); + const [metadata] = await bucket.getMetadata(); + assert.ok(!metadata.encryption); +}); + +it("should enable a bucket's uniform bucket-level access", async () => { + const output = execSync( + `node enableUniformBucketLevelAccess.js ${bucketName}` + ); + assert.match( + output, + new RegExp(`Uniform bucket-level access was enabled for ${bucketName}`) + ); + + const metadata = await bucket.getMetadata(); + assert.strictEqual( + metadata[0].iamConfiguration.uniformBucketLevelAccess.enabled, + true + ); +}); + +it("should get a bucket's uniform bucket-level access metadata", async () => { + const output = execSync(`node getUniformBucketLevelAccess.js ${bucketName}`); + + assert.match( + output, + new RegExp(`Uniform bucket-level access is enabled for ${bucketName}`) + ); + + const [metadata] = await bucket.getMetadata(); + assert.ok(metadata.iamConfiguration.uniformBucketLevelAccess.enabled); + assert.strictEqual( + metadata.iamConfiguration.uniformBucketLevelAccess.lockedTime !== null, + true + ); +}); + +it("should disable a bucket's uniform bucket-level access", async () => { + const output = execSync( + `node disableUniformBucketLevelAccess.js ${bucketName}` + ); + assert.match( + output, + new RegExp(`Uniform bucket-level access was disabled for ${bucketName}`) + ); + + const metadata = await bucket.getMetadata(); + assert.strictEqual( + metadata[0].iamConfiguration.uniformBucketLevelAccess.enabled, + false + ); +}); + +it('should configure a bucket cors', async () => { + execSync( + `node configureBucketCors.js ${bucketName} 3600 POST http://example.appspot.com content-type` + ); + await bucket.getMetadata(); + assert.deepStrictEqual(bucket.metadata.cors[0], { + origin: ['http://example.appspot.com'], + method: ['POST'], + responseHeader: ['content-type'], + maxAgeSeconds: 3600, + }); +}); + +it('should remove a bucket cors configuration', async () => { + const output = execSync(`node removeBucketCors.js ${bucketName}`); + assert.include( + output, + `Removed CORS configuration from bucket ${bucketName}` + ); + await bucket.getMetadata(); + assert.ok(!bucket.metadata.cors); +}); + +it('should set public access prevention to enforced', async () => { + const output = execSync( + `node setPublicAccessPreventionEnforced.js ${bucketName}` + ); + assert.match( + output, + new RegExp(`Public access prevention is set to enforced for ${bucketName}.`) + ); + + const metadata = await bucket.getMetadata(); + assert.strictEqual( + metadata[0].iamConfiguration.publicAccessPrevention, + PUBLIC_ACCESS_PREVENTION_ENFORCED + ); +}); + +it("should get a bucket's public access prevention metadata", async () => { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + publicAccessPrevention: PUBLIC_ACCESS_PREVENTION_ENFORCED, + }, + }); + + const output = execSync(`node getPublicAccessPrevention.js ${bucketName}`); + + assert.match( + output, + new RegExp(`Public access prevention is enforced for ${bucketName}.`) + ); + + const [metadata] = await bucket.getMetadata(); + assert.ok(metadata.iamConfiguration.publicAccessPrevention); +}); + +it('should set public access prevention to inherited', async () => { + const output = execSync( + `node setPublicAccessPreventionInherited.js ${bucketName}` + ); + assert.match( + output, + new RegExp(`Public access prevention is 'inherited' for ${bucketName}.`) + ); + + const metadata = await bucket.getMetadata(); + assert.strictEqual( + metadata[0].iamConfiguration.publicAccessPrevention, + PUBLIC_ACCESS_PREVENTION_INHERITED + ); +}); + +it('should create a dual-region bucket', async () => { + const output = execSync( + `node createBucketWithDualRegion.js ${bucketNameDualRegion} ${DUAL_REGION.LOCATION} ${DUAL_REGION.REGIONS[0]} ${DUAL_REGION.REGIONS[1]}` + ); + + // Ensure the sample outputs the desired result + assert.include(output, bucketNameDualRegion); + assert.include(output, DUAL_REGION.LOCATION); + assert.include(output, DUAL_REGION.REGIONS[0]); + assert.include(output, DUAL_REGION.REGIONS[1]); + assert.include(output, 'dual-region'); + + // Make API request for further verification + const [exists] = await dualRegionBucket.exists(); + assert.strictEqual(exists, true); + + const [metadata] = await dualRegionBucket.getMetadata(); + + assert.strictEqual(metadata.location, DUAL_REGION.LOCATION); + + assert(metadata.customPlacementConfig); + assert(Array.isArray(metadata.customPlacementConfig.dataLocations)); + + const dataLocations = metadata.customPlacementConfig.dataLocations; + + assert(dataLocations.includes(DUAL_REGION.REGIONS[0])); + assert(dataLocations.includes(DUAL_REGION.REGIONS[1])); + + assert.strictEqual(metadata.locationType, 'dual-region'); +}); + +it('should create a dual-region bucket with turbo replication enabled', async () => { + const output = execSync( + `node createBucketWithTurboReplication.js ${bucketNameDualRegionTurbo}` + ); + assert.match( + output, + new RegExp( + `${bucketNameDualRegionTurbo} created with the recovery point objective \\(RPO\\) set to ASYNC_TURBO in NAM4.` + ) + ); + const [exists] = await dualRegionBucketTurbo.exists(); + assert.strictEqual(exists, true); +}); + +it("should get a bucket's RPO metadata", async () => { + await storage.bucket(bucketNameDualRegionTurbo).setMetadata({ + rpo: RPO_ASYNC_TURBO, + }); + + const output = execSync(`node getRPO.js ${bucketNameDualRegionTurbo}`); + assert.match( + output, + new RegExp(`RPO is ASYNC_TURBO for ${bucketNameDualRegionTurbo}.`) + ); + + const metadata = await dualRegionBucketTurbo.getMetadata(); + assert.strictEqual(metadata[0].rpo, RPO_ASYNC_TURBO); +}); + +it("should set a bucket's RPO to ASYNC_TURBO", async () => { + const output = execSync( + `node setRPOAsyncTurbo.js ${bucketNameDualRegionTurbo}` + ); + assert.match( + output, + new RegExp(`Turbo replication enabled for ${bucketNameDualRegionTurbo}.`) + ); + + const metadata = await dualRegionBucketTurbo.getMetadata(); + assert.strictEqual(metadata[0].rpo, RPO_ASYNC_TURBO); +}); + +it("should set a bucket's RPO to DEFAULT", async () => { + const output = execSync(`node setRPODefault.js ${bucketNameDualRegionTurbo}`); + assert.match( + output, + new RegExp(`Turbo replication disabled for ${bucketNameDualRegionTurbo}.`) + ); + + const metadata = await dualRegionBucketTurbo.getMetadata(); + assert.strictEqual(metadata[0].rpo, RPO_DEFAULT); +}); + +it('should create a hierarchical namespace enabled bucket', async () => { + const output = execSync( + `node createBucketWithHierarchicalNamespace.js ${bucketNameHierarchicalNamespace}` + ); + assert.match( + output, + new RegExp( + `Created '${bucketNameHierarchicalNamespace}' with hierarchical namespace enabled.` + ) + ); + + const metadata = await dualRegionBucketTurbo.getMetadata(); + assert.strictEqual(metadata[0].rpo, RPO_DEFAULT); +}); + +it("should add a bucket's website configuration", async () => { + const output = execSync( + `node addBucketWebsiteConfiguration.js ${bucketName} http://example.com http://example.com/404.html` + ); + + assert.include( + output, + `Static website bucket ${bucketName} is set up to use http://example.com as the index page and http://example.com/404.html as the 404 page` + ); + + const [metadata] = await bucket.getMetadata(); + assert.deepStrictEqual(metadata.website, { + mainPageSuffix: 'http://example.com', + notFoundPage: 'http://example.com/404.html', + }); +}); + +/** + * TODO: Re-enable once the test environment allows public IAM roles. + * Currently disabled to avoid 403 errors when adding 'allUsers' or + * 'allAuthenticatedUsers' permissions. + */ +it.skip('should make bucket publicly readable', async () => { + const output = execSync(`node makeBucketPublic.js ${bucketName}`); + assert.match( + output, + new RegExp(`Bucket ${bucketName} is now publicly readable`) + ); + const [policy] = await bucket.iam.getPolicy(); + const objectViewerBinding = policy.bindings.filter(binding => { + return binding.role === 'roles/storage.legacyBucketReader'; + })[0]; + + assert(objectViewerBinding.members.includes('allUsers')); +}); + +it("should enable a bucket's versioning", async () => { + const output = execSync(`node enableBucketVersioning.js ${bucketName}`); + assert.include(output, `Versioning is enabled for bucket ${bucketName}`); + await bucket.getMetadata(); + assert.strictEqual(bucket.metadata.versioning.enabled, true); +}); + +it("should disable a bucket's versioning", async () => { + const output = execSync(`node disableBucketVersioning.js ${bucketName}`); + assert.include(output, `Versioning is disabled for bucket ${bucketName}`); + await bucket.getMetadata(); + assert.strictEqual(bucket.metadata.versioning.enabled, false); +}); + +it('should add label to bucket', async () => { + const output = execSync( + `node addBucketLabel.js ${bucketName} labelone labelonevalue` + ); + assert.include(output, `Added label to bucket ${bucketName}`); + const [labels] = await storage.bucket(bucketName).getLabels(); + assert.isTrue('labelone' in labels); +}); + +it('should remove label to bucket', async () => { + const output = execSync(`node removeBucketLabel.js ${bucketName} labelone`); + assert.include(output, `Removed labels from bucket ${bucketName}`); + const [labels] = await storage.bucket(bucketName).getLabels(); + assert.isFalse('labelone' in labels); +}); + +it("should change a bucket's default storage class", async () => { + const output = execSync( + `node changeDefaultStorageClass.js ${bucketName} coldline` + ); + assert.include(output, `${bucketName} has been set to coldline`); + const [metadata] = await bucket.getMetadata(); + assert.strictEqual(metadata.storageClass, 'COLDLINE'); +}); + +it('should create bucket with storage class and location', async () => { + const output = execSync( + `node createBucketWithStorageClassAndLocation.js ${bucketNameWithClassAndLocation} coldline ASIA` + ); + assert.include( + output, + `${bucketNameWithClassAndLocation} created with coldline class in ASIA` + ); + const [metadata] = await bucketWithClassAndLocation.getMetadata(); + assert.strictEqual(metadata.storageClass, 'COLDLINE'); + assert.strictEqual(metadata.location, 'ASIA'); +}); + +it("should set a bucket's soft delete policy", async () => { + const output = execSync(`node setSoftDeletePolicy.js ${bucketName}`); + assert.include( + output, + `Bucket ${bucketName} soft delete policy set to 7 days` + ); +}); + +it("should get a bucket's soft delete policy", async () => { + const output = execSync(`node getSoftDeletePolicy.js ${bucketName}`); + assert.include(output, `Soft delete policy for ${bucketName}`); + assert.include(output, 'Soft delete Period: 604800 seconds'); + assert.match(output, new RegExp('Effective Time:')); +}); + +it("should disable a bucket's soft delete policy", async () => { + const output = execSync(`node disableSoftDelete.js ${bucketName}`); + assert.include( + output, + `Bucket ${bucketName} soft delete policy was disabled` + ); + await bucket.getMetadata(); + assert.strictEqual( + bucket.metadata.softDeletePolicy.retentionDurationSeconds, + '0' + ); +}); + +it('should delete a bucket', async () => { + const output = execSync(`node deleteBucket.js ${bucketName}`); + assert.match(output, new RegExp(`Bucket ${bucketName} deleted`)); + const [exists] = await bucket.exists(); + assert.strictEqual(exists, false); +}); + +it('should create a bucket with object retention enabled', async () => { + const output = execSync( + `node createBucketWithObjectRetention.js ${bucketNameObjectRetention}` + ); + assert.include( + output, + `Created '${bucketNameObjectRetention}' with object retention enabled setting: Enabled` + ); + const [metadata] = await objectRetentionBucket.getMetadata(); + assert.strictEqual(metadata.objectRetention.mode, 'Enabled'); +}); diff --git a/storage/system-test/encryption.test.js b/storage/system-test/encryption.test.js new file mode 100644 index 0000000000..e970df61e3 --- /dev/null +++ b/storage/system-test/encryption.test.js @@ -0,0 +1,110 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); +const {promisify} = require('util'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); +const kmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_US; + +const fileName = 'test.txt'; +const filePath = path.join(__dirname, '../resources', fileName); +const downloadFilePath = path.join(__dirname, '../resources/downloaded.txt'); +const doesNotExistPrecondition = 0; + +const key = crypto.randomBytes(32).toString('base64'); + +before(async () => { + await bucket.create(bucketName); +}); + +after(async () => { + promisify(fs.unlink)(downloadFilePath).catch(console.error); + // Try deleting all files twice, just to make sure + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.delete().catch(console.error); +}); + +it('should generate a key', () => { + const output = execSync('node generateEncryptionKey.js'); + assert.match(output, /Base 64 encoded encryption key:/); +}); + +it('should upload a file', async () => { + const output = execSync( + `node uploadEncryptedFile.js ${bucketName} ${filePath} ${fileName} ${key} ${doesNotExistPrecondition}` + ); + assert.match( + output, + new RegExp(`File ${filePath} uploaded to gs://${bucketName}/${fileName}`) + ); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); +}); + +it('should download a file', () => { + const output = execSync( + `node downloadEncryptedFile.js ${bucketName} ${fileName} ${downloadFilePath} ${key}` + ); + assert.match( + output, + new RegExp(`File ${fileName} downloaded to ${downloadFilePath}`) + ); + fs.statSync(downloadFilePath); +}); + +it('should rotate keys', async () => { + const newKey = crypto.randomBytes(32).toString('base64'); + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .getMetadata(); + const output = execSync( + `node rotateEncryptionKey.js ${bucketName} ${fileName} ${key} ${newKey} ${metadata.generation}` + ); + assert.include(output, 'Encryption key rotated successfully'); +}); + +it('should convert CSEK to KMS key', async () => { + const encryptedFileName = 'encrypted-file'; + const file = bucket.file(encryptedFileName, { + encryptionKey: Buffer.from(key, 'base64'), + }); + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .getMetadata(); + await file.save('secret data', {resumable: false}); + const output = execSync( + `node changeFileCSEKToCMEK.js ${bucketName} ${encryptedFileName} ${key} ${kmsKeyName} ${metadata.generation}` + ); + assert.include( + output, + `file ${encryptedFileName} in bucket ${bucketName} is now managed by KMS key ${kmsKeyName} instead of customer-supplied encryption key` + ); +}); diff --git a/storage/system-test/files.test.js b/storage/system-test/files.test.js new file mode 100644 index 0000000000..2578f33978 --- /dev/null +++ b/storage/system-test/files.test.js @@ -0,0 +1,683 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it, describe} = require('mocha'); +const cp = require('child_process'); +const fetch = require('node-fetch'); +const uuid = require('uuid'); +const {promisify} = require('util'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const cwd = path.join(__dirname, '..'); +const bucketName = generateName(); +const bucket = storage.bucket(bucketName); +const softDeleteBucketName = generateName(); +const softDeleteBucket = storage.bucket(softDeleteBucketName); +const objectRetentionBucketName = generateName(); +const objectRetentionBucket = storage.bucket(objectRetentionBucketName); +const fileContents = 'these-are-my-contents'; +const fileName = 'test.txt'; +const memoryFileName = 'testmemory.txt'; +const movedFileName = 'test2.txt'; +const copiedFileName = 'test3.txt'; +const renamedFileName = 'test4.txt'; +const signedFileName = 'signed-upload.txt'; +const kmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_US; +const filePath = path.join(cwd, 'resources', fileName); +const folderPath = path.join(cwd, 'resources'); +const downloadFilePath = path.join(cwd, 'downloaded.txt'); +const startByte = 0; +const endByte = 20; +const doesNotExistPrecondition = 0; + +const fileContent = fs.readFileSync(filePath, 'utf-8'); + +describe('file', () => { + before(async () => { + await bucket.create(); + }); + + after(async () => { + await promisify(fs.unlink)(downloadFilePath).catch(console.error); + // Try deleting all files twice, just to make sure + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.delete().catch(console.error); + }); + + it('should upload a file', async () => { + const output = execSync( + `node uploadFile.js ${bucketName} ${filePath} ${fileName} ${doesNotExistPrecondition}` + ); + assert.match(output, new RegExp(`${filePath} uploaded to ${bucketName}`)); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should upload a file from memory', async () => { + const output = execSync( + `node uploadFromMemory.js ${bucketName} ${fileContents} ${memoryFileName}` + ); + assert.match( + output, + new RegExp( + `${memoryFileName} with contents ${fileContents} uploaded to ${bucketName}.` + ) + ); + const [exists] = await bucket.file(memoryFileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should upload a file without authentication', async () => { + const output = execSync( + `node uploadWithoutAuthentication.js ${bucketName} ${fileContents} ${fileName} ${doesNotExistPrecondition}` + ); + assert.match(output, new RegExp(`${fileName} uploaded to ${bucketName}`)); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should upload a file without authentication using signed url strategy', async () => { + const output = execSync( + `node uploadWithoutAuthenticationSignedUrl.js ${bucketName} ${fileContents} ${fileName}` + ); + assert.match(output, new RegExp(`${fileName} uploaded to ${bucketName}`)); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should upload a file using a stream', async () => { + const output = execSync( + `node streamFileUpload.js ${bucketName} ${fileName} ${fileContents}` + ); + assert.match(output, new RegExp(`${fileName} uploaded to ${bucketName}`)); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); + const response = await bucket.file(fileName).download(); + assert.strictEqual(response[0].toString(), fileContents); + }); + + it('should upload a file with a kms key', async () => { + const [metadata] = await bucket.file(fileName).getMetadata(); + const output = execSync( + `node uploadFileWithKmsKey.js ${bucketName} ${filePath} ${kmsKeyName} ${metadata.generation}` + ); + assert.include( + output, + `${filePath} uploaded to ${bucketName} using ${kmsKeyName}` + ); + const [exists] = await bucket.file(fileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should upload a local directory', done => { + const output = execSync( + `node uploadDirectory.js ${bucketName} ${folderPath}` + ); + + const fileList = []; + getFileList(folderPath); + + function getFileList(directory) { + const items = fs.readdirSync(directory); + items.forEach(item => { + const fullPath = path.join(directory, item); + const stat = fs.lstatSync(fullPath); + if (stat.isFile()) { + fileList.push(fullPath); + } else { + getFileList(fullPath); + } + }); + } + + assert.match( + output, + new RegExp( + `${fileList.length} files uploaded to ${bucketName} successfully.` + ) + ); + + Promise.all( + fileList.map(file => + bucket + .file( + path.relative(path.dirname(folderPath), file).replace(/\\/g, '/') + ) + .exists() + ) + ).then(resps => { + const ctr = resps.reduce((acc, cur) => { + return acc + cur[0]; + }, 0); + assert.strictEqual(ctr, fileList.length); + done(); + }, assert.ifError); + }); + + it('should download a file', () => { + const output = execSync( + `node downloadFile.js ${bucketName} ${fileName} ${downloadFilePath}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${fileName} downloaded to ${downloadFilePath}.` + ) + ); + fs.statSync(downloadFilePath); + }); + + it('should download a file into memory', () => { + const output = execSync( + `node downloadIntoMemory.js ${bucketName} ${memoryFileName}` + ); + assert.match( + output, + new RegExp( + `Contents of gs://${bucketName}/${memoryFileName} are ${fileContents}.` + ) + ); + }); + + it('should download a file using a stream', () => { + const output = execSync( + `node streamFileDownload.js ${bucketName} ${fileName} ${downloadFilePath}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${fileName} downloaded to ${downloadFilePath}.` + ) + ); + fs.statSync(downloadFilePath); + }); + + it('should download a file using a given byte range', () => { + const output = execSync( + `node downloadByteRange.js ${bucketName} ${fileName} ${startByte} ${endByte} ${downloadFilePath}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${fileName} downloaded to ${downloadFilePath} from byte ${startByte} to byte ${endByte}.` + ) + ); + fs.statSync(downloadFilePath); + }); + + it('should move a file', async () => { + const output = execSync( + `node moveFile.js ${bucketName} ${fileName} ${movedFileName} ${doesNotExistPrecondition}` + ); + assert.include( + output, + `gs://${bucketName}/${fileName} moved to gs://${bucketName}/${movedFileName}` + ); + const [exists] = await bucket.file(movedFileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should atomically move a file', async () => { + const movedFileName = 'test1.txt'; + const file = bucket.file(fileName); + await file.save(fileName); + const output = execSync( + `node moveFileAtomic.js ${bucketName} ${fileName} ${movedFileName} ${doesNotExistPrecondition}` + ); + assert.include( + output, + `gs://${bucketName}/${fileName} moved to gs://${bucketName}/${movedFileName}` + ); + const [[destExists], [sourceExists]] = await Promise.all([ + bucket.file(movedFileName).exists(), + bucket.file(fileName).exists(), + ]); + assert.strictEqual(destExists, true); + assert.strictEqual(sourceExists, false); + }); + + it('should copy a file', async () => { + const output = execSync( + `node copyFile.js ${bucketName} ${movedFileName} ${bucketName} ${copiedFileName} ${doesNotExistPrecondition}` + ); + assert.include( + output, + `gs://${bucketName}/${movedFileName} copied to gs://${bucketName}/${copiedFileName}` + ); + const [exists] = await bucket.file(copiedFileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should list files', () => { + const output = execSync(`node listFiles.js ${bucketName}`); + assert.match(output, /Files:/); + assert.match(output, new RegExp(movedFileName)); + assert.match(output, new RegExp(copiedFileName)); + }); + + it('should list files by a prefix', () => { + let output = execSync(`node listFilesByPrefix.js ${bucketName} test "/"`); + assert.match(output, /Files:/); + assert.match(output, new RegExp(movedFileName)); + assert.match(output, new RegExp(copiedFileName)); + + output = execSync(`node listFilesByPrefix.js ${bucketName} foo`); + assert.match(output, /Files:/); + assert.notMatch(output, new RegExp(movedFileName)); + assert.notMatch(output, new RegExp(copiedFileName)); + }); + + it('should list files with pagination', () => { + const output = execSync(`node listFilesPaginate.js ${bucketName}`); + assert.match(output, /Files:/); + assert.match(output, new RegExp(movedFileName)); + assert.match(output, new RegExp(copiedFileName)); + }); + + it('should rename a file', async () => { + const output = execSync( + `node renameFile.js ${bucketName} ${movedFileName} ${renamedFileName}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${movedFileName} renamed to gs://${bucketName}/${renamedFileName}.` + ) + ); + const [exists] = await bucket.file(renamedFileName).exists(); + assert.strictEqual(exists, true); + + const [oldFileExists] = await bucket.file(movedFileName).exists(); + assert.strictEqual(oldFileExists, false); + }); + + describe('public data', () => { + let GOOGLE_APPLICATION_CREDENTIALS; + let GOOGLE_CLOUD_PROJECT; + const publicFileName = 'public.txt'; + const downloadPublicFilePath = path.join(cwd, 'public-downloaded.txt'); + + before(async () => { + // CI authentication is done with ADC. Cache it here, restore it `after` + // Incase of sample fails it's restore from here. + await bucket.file(publicFileName).save('public data'); + GOOGLE_APPLICATION_CREDENTIALS = + process.env.GOOGLE_APPLICATION_CREDENTIALS; + GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; + }); + + after(async () => { + await promisify(fs.unlink)(downloadPublicFilePath).catch(console.error); + process.env.GOOGLE_APPLICATION_CREDENTIALS = + GOOGLE_APPLICATION_CREDENTIALS; + process.env.GOOGLE_CLOUD_PROJECT = GOOGLE_CLOUD_PROJECT; + await bucket.file(publicFileName).delete(); + }); + + /** + * TODO: Re-enable once the test environment allows public IAM roles. + * Currently disabled to avoid 403 errors when adding 'allUsers' or + * 'allAuthenticatedUsers' permissions. + */ + it.skip('should make a file public', () => { + const output = execSync( + `node makePublic.js ${bucketName} ${publicFileName}` + ); + assert.match( + output, + new RegExp(`gs://${bucketName}/${publicFileName} is now public`) + ); + }); + + it('should download public file', () => { + const output = execSync( + `node downloadPublicFile.js ${bucketName} ${publicFileName} ${downloadPublicFilePath}` + ); + assert.include( + output, + `Downloaded public file ${publicFileName} from bucket name ${bucketName} to ${downloadPublicFilePath}` + ); + fs.statSync(downloadPublicFilePath); + }); + }); + + it('should generate a v2 signed URL for a file', async () => { + const output = await execSync( + `node generateSignedUrl ${bucketName} ${copiedFileName}` + ); + assert.match( + output, + new RegExp(`The signed url for ${copiedFileName} is `) + ); + }); + + it('should generate a v4 signed URL and read a file', async () => { + const output = await execSync( + `node generateV4ReadSignedUrl.js ${bucketName} ${copiedFileName}` + ); + + const expected = /URL:\n([^\s]+)/; + assert.match(output, expected); + + const match = output.match(expected); + const res = await fetch(match[1]); + const text = await res.text(); + assert.strictEqual(text, fileContent); + }); + + it('should generate a v4 signed URL and upload a file', async () => { + const output = execSync( + `node generateV4UploadSignedUrl.js ${bucketName} ${signedFileName}` + ); + + const expected = /URL:\n([^\s]+)/; + assert.match(output, expected); + + const match = output.match(expected); + const req = { + method: 'PUT', + headers: {'Content-Type': 'application/octet-stream'}, + body: fileContent, + }; + await fetch(match[1], req); + + await new Promise((resolve, reject) => { + let remoteContent = ''; + bucket + .file(signedFileName) + .createReadStream() + .on('response', res => { + assert.strictEqual( + res.headers['content-type'], + 'application/octet-stream' + ); + }) + .on('data', buf => (remoteContent += buf.toString())) + .on('end', () => { + assert.strictEqual(remoteContent, fileContent); + resolve(); + }) + .on('error', reject); + }); + }); + + it('should generate a v4 signed policy', async () => { + const output = execSync( + `node generateV4SignedPolicy.js ${bucketName} ${signedFileName}` + ); + + assert.include( + output, + `
'); + }); + + it('should get metadata for a file', () => { + const output = execSync( + `node getMetadata.js ${bucketName} ${copiedFileName}` + ); + assert.include(output, `Bucket: ${bucketName}`); + assert.include(output, `Name: ${copiedFileName}`); + }); + + it('should set metadata for a file', async () => { + const [metadata] = await bucket.file(copiedFileName).getMetadata(); + + // used in sample + const userMetadata = { + description: 'file description...', + modified: '1900-01-01', + }; + const output = execSync( + `node fileSetMetadata.js ${bucketName} ${copiedFileName} ${metadata.metageneration} ` + ); + + assert.match( + output, + new RegExp(`description: '${userMetadata.description}'`) + ); + assert.match(output, new RegExp(`modified: '${userMetadata.modified}'`)); + }); + + it('should set storage class for a file', async () => { + const output = execSync( + `node fileChangeStorageClass.js ${bucketName} ${copiedFileName} standard ${doesNotExistPrecondition}` + ); + assert.include(output, `${copiedFileName} has been set to standard`); + const [metadata] = await storage + .bucket(bucketName) + .file(copiedFileName) + .getMetadata(); + assert.strictEqual(metadata.storageClass, 'STANDARD'); + }); + + it('should combine multiple files into one new file', async () => { + const firstFileName = 'file-one.txt'; + const secondFileName = 'file-two.txt'; + const destinationFileName = 'file-one-two.txt'; + + const files = [ + {file: bucket.file(firstFileName), contents: '123'}, + {file: bucket.file(secondFileName), contents: '456'}, + ]; + + await Promise.all(files.map(file => createFileAsync(file))); + const destinationFile = bucket.file(destinationFileName); + + const output = execSync( + `node composeFile.js ${bucketName} ${firstFileName} ${secondFileName} ${destinationFileName}` + ); + assert.include( + output, + `New composite file ${destinationFileName} was created by combining ${firstFileName} and ${secondFileName}` + ); + + const [contents] = await destinationFile.download(); + assert.strictEqual( + contents.toString(), + files.map(x => x.contents).join('') + ); + }); + + it('should delete a file', async () => { + const [metadata] = await bucket.file(copiedFileName).getMetadata(); + const output = execSync( + `node deleteFile.js ${bucketName} ${copiedFileName} ${metadata.generation}` + ); + assert.match( + output, + new RegExp(`gs://${bucketName}/${copiedFileName} deleted`) + ); + const [exists] = await bucket.file(copiedFileName).exists(); + assert.strictEqual(exists, false); + }); + + describe('file archived generations', () => { + const bucketNameWithVersioning = generateName(); + const fileName = 'file-one.txt'; + const bucketWithVersioning = storage.bucket(bucketNameWithVersioning); + + before(async () => { + const versionedFile = bucketWithVersioning.file(fileName); + const filesToCreate = [ + {file: versionedFile, contents: '123'}, + {file: versionedFile, contents: '456'}, + ]; + await storage.createBucket(bucketNameWithVersioning, { + versioning: { + enabled: true, + }, + }); + await Promise.all(filesToCreate.map(file => createFileAsync(file))); + }); + + after(async () => { + await bucketWithVersioning.deleteFiles({ + versions: true, + force: true, + }); + await bucketWithVersioning.delete(); + }); + + it('should list file with old versions', async () => { + const output = execSync( + `node listFilesWithOldVersions.js ${bucketNameWithVersioning}` + ); + assert.notEqual(output.indexOf(fileName), output.lastIndexOf(fileName)); + }); + + it('should copy file with old versions', async () => { + const destFileName = 'file-two.txt'; + const [files] = await bucketWithVersioning.getFiles({versions: true}); + const generation = files[0].metadata.generation; + const output = execSync( + `node copyOldVersionOfFile.js ${bucketNameWithVersioning} ${fileName} ${bucketNameWithVersioning} ${destFileName} ${generation}` + ); + assert.match( + output, + new RegExp( + `Generation ${generation} of file ${fileName} in bucket ${bucketNameWithVersioning} was copied to ${destFileName} in bucket ${bucketNameWithVersioning}` + ) + ); + const [exists] = await bucketWithVersioning.file(destFileName).exists(); + assert.strictEqual(exists, true); + }); + + it('should delete a file with customized retry settings', () => { + const output = execSync( + `node configureRetries.js ${bucketName} ${fileName}` + ); + assert.match( + output, + new RegExp(`File ${fileName} deleted with a customized retry strategy.`) + ); + }); + + it('should delete file with versions', async () => { + const [files] = await bucketWithVersioning.getFiles({versions: true}); + const generation = files[0].metadata.generation; + const output = execSync( + `node deleteOldVersionOfFile.js ${bucketNameWithVersioning} ${fileName} ${generation}` + ); + assert.match( + output, + new RegExp( + `Generation ${generation} of file ${fileName} was deleted from ${bucketNameWithVersioning}` + ) + ); + const [exists] = await bucketWithVersioning + .file(fileName, { + generation, + }) + .exists(); + assert.strictEqual(exists, false); + }); + }); + + describe('Object Retention', () => { + before(async () => { + await storage.createBucket(objectRetentionBucketName, { + enableObjectRetention: true, + }); + }); + + it('should create a file with unlocked retention and then override it', async () => { + const output = execSync( + `node setObjectRetentionPolicy.js ${objectRetentionBucketName} ${fileName} ${fileContent}` + ); + assert.include(output, 'Retention policy for file'); + const file = objectRetentionBucket.file(fileName); + const [metadata] = await file.getMetadata(); + assert(metadata.retention.retainUntilTime); + assert(metadata.retention.mode.toUpperCase(), 'UNLOCKED'); + }); + }); + + describe('Object Soft Delete', () => { + let generation; + before(async () => { + await storage.createBucket(softDeleteBucketName, { + softDeletePolicy: { + retentionDurationSeconds: 604800, + }, + }); + const file = softDeleteBucket.file(fileName); + await file.save(fileName); + const [metadata] = await softDeleteBucket.file(fileName).getMetadata(); + generation = metadata.generation; + await file.delete(); + }); + + after(async () => { + await softDeleteBucket.deleteFiles(); + await softDeleteBucket.delete(); + }); + + it('should list soft deleted objects', async () => { + const output = await execSync( + `node listSoftDeletedObjects.js ${softDeleteBucketName}` + ); + assert.match(output, /Files:/); + assert.match(output, new RegExp(fileName)); + }); + + it('should list soft deleted object versions', async () => { + const output = await execSync( + `node listSoftDeletedObjectVersions.js ${softDeleteBucketName} ${fileName}` + ); + assert.match(output, /Files:/); + assert.match( + output, + new RegExp(`Name: ${fileName}, Generation: ${generation}`) + ); + }); + + it('should restore soft deleted object', async () => { + const output = await execSync( + `node restoreSoftDeletedObject.js ${softDeleteBucketName} ${fileName} ${generation}` + ); + assert.include(output, `Soft deleted object ${fileName} was restored`); + const [exists] = await softDeleteBucket.file(fileName).exists(); + assert.strictEqual(exists, true); + }); + }); +}); + +function generateName() { + return `nodejs-storage-samples-${uuid.v4()}`; +} + +function createFileAsync(fileObject) { + return fileObject.file.save(fileObject.contents); +} diff --git a/storage/system-test/hmacKey.test.js b/storage/system-test/hmacKey.test.js new file mode 100644 index 0000000000..20c4e149bf --- /dev/null +++ b/storage/system-test/hmacKey.test.js @@ -0,0 +1,116 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); +const pLimit = require('p-limit'); +const storage = new Storage(); +const SERVICE_ACCOUNT_EMAIL = process.env.HMAC_KEY_TEST_SERVICE_ACCOUNT; +const SERVICE_ACCOUNT_PROJECT = process.env.HMAC_PROJECT; + +describe('HMAC SA Key samples', () => { + let hmacKey; + + before(async () => { + await deleteStaleHmacKeys(SERVICE_ACCOUNT_EMAIL, SERVICE_ACCOUNT_PROJECT); + [hmacKey] = await storage.createHmacKey(SERVICE_ACCOUNT_EMAIL, { + projectId: SERVICE_ACCOUNT_PROJECT, + }); + }); + + after(async () => { + await deleteStaleHmacKeys(SERVICE_ACCOUNT_EMAIL, SERVICE_ACCOUNT_PROJECT); + }); + + it('should create an HMAC Key', async () => { + const output = execSync( + `node hmacKeyCreate.js ${SERVICE_ACCOUNT_EMAIL} ${SERVICE_ACCOUNT_PROJECT}` + ); + assert.include(output, 'The base64 encoded secret is:'); + }); + + it('should list HMAC Keys', async () => { + const output = execSync(`node hmacKeysList.js ${SERVICE_ACCOUNT_PROJECT}`); + assert.include(output, `Service Account Email: ${SERVICE_ACCOUNT_EMAIL}`); + }); + + it('should get HMAC Key', async () => { + const output = execSync( + `node hmacKeyGet.js ${hmacKey.metadata.accessId} ${SERVICE_ACCOUNT_PROJECT}` + ); + assert.include(output, 'The HMAC key metadata is:'); + }); + + it('should deactivate HMAC Key', async () => { + const output = execSync( + `node hmacKeyDeactivate.js ${hmacKey.metadata.accessId} ${SERVICE_ACCOUNT_PROJECT}` + ); + assert.include(output, 'The HMAC key is now inactive.'); + }); + + it('should activate HMAC Key', async () => { + const output = execSync( + `node hmacKeyActivate.js ${hmacKey.metadata.accessId} ${SERVICE_ACCOUNT_PROJECT}` + ); + assert.include(output, 'The HMAC key is now active.'); + }); + + it('should delete HMAC key', async () => { + // Deactivate then delete + execSync( + `node hmacKeyDeactivate.js ${hmacKey.metadata.accessId} ${SERVICE_ACCOUNT_PROJECT}` + ); + const output = execSync( + `node hmacKeyDelete.js ${hmacKey.metadata.accessId} ${SERVICE_ACCOUNT_PROJECT}` + ); + assert.include( + output, + 'The key is deleted, though it may still appear in getHmacKeys() results.' + ); + }); +}); + +/* + * Delete HMAC Keys older than 1 hour + */ +async function deleteStaleHmacKeys(serviceAccountEmail, projectId) { + const old = new Date(); + old.setHours(old.getHours() - 1); + // list all HMAC keys for the given service account. + const [hmacKeys] = await storage.getHmacKeys({ + serviceAccountEmail, + projectId, + }); + + const limit = pLimit(10); + await Promise.all( + hmacKeys + .filter(hmacKey => { + const hmacKeyCreated = new Date(hmacKey.metadata.timeCreated); + return hmacKey.metadata.state !== 'DELETED' && hmacKeyCreated < old; + }) + .map(hmacKey => + limit(async () => { + await hmacKey.setMetadata({state: 'INACTIVE'}); + await hmacKey.delete(); + }) + ) + ); +} diff --git a/storage/system-test/iam.test.js b/storage/system-test/iam.test.js new file mode 100644 index 0000000000..22da024e15 --- /dev/null +++ b/storage/system-test/iam.test.js @@ -0,0 +1,103 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); +const userEmail = 'test@example.com'; +const roleName = 'roles/storage.objectViewer'; + +// Condition +const title = 'match-prefix'; +const description = 'Applies to objects matching a prefix'; +const expression = + 'resource.name.startsWith("projects/_/buckets/bucket-name/objects/prefix-a-")'; + +before(async () => { + await bucket.create(); + // UniformBucketLevelAccess must be enabled to add a conditional binding. + await bucket.setMetadata({ + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: true, + }, + }, + }); +}); + +after(async () => { + await bucket.delete().catch(console.error); +}); + +it('should add multiple members to a role on a bucket', async () => { + const output = execSync( + `node addBucketIamMember.js ${bucketName} ${roleName} "user:${userEmail}"` + ); + assert.include( + output, + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); + assert.match(output, new RegExp(`user:${userEmail}`)); +}); + +it('should add conditional binding to a bucket', async () => { + const output = execSync( + `node addBucketConditionalBinding.js ${bucketName} ${roleName} '${title}' '${description}' '${expression}' "user:${userEmail}"` + ); + assert.include( + output, + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); + assert.include(output, 'with condition:'); + assert.include(output, `Title: ${title}`); + assert.include(output, `Description: ${description}`); + assert.include(output, `Expression: ${expression}`); +}); + +it('should list members of a role on a bucket', async () => { + const output = execSync(`node viewBucketIamMembers.js ${bucketName}`); + assert.match(output, new RegExp(`Bindings for bucket ${bucketName}:`)); + assert.match(output, new RegExp(`Role: ${roleName}`)); + assert.match(output, new RegExp('Members:')); + assert.match(output, new RegExp(`user:${userEmail}`)); +}); + +it('should remove multiple members from a role on a bucket', async () => { + const output = execSync( + `node removeBucketIamMember.js ${bucketName} ${roleName} "user:${userEmail}"` + ); + assert.ok( + output.includes( + `Removed the following member(s) with role ${roleName} from ${bucketName}:` + ) + ); + assert.match(output, new RegExp(`user:${userEmail}`)); +}); + +it('should remove conditional binding to a bucket', async () => { + const output = execSync( + `node removeBucketConditionalBinding.js ${bucketName} ${roleName} '${title}' '${description}' '${expression}'` + ); + assert.include(output, 'Conditional Binding was removed'); +}); diff --git a/storage/system-test/notifications.test.js b/storage/system-test/notifications.test.js new file mode 100644 index 0000000000..ef88d8b07c --- /dev/null +++ b/storage/system-test/notifications.test.js @@ -0,0 +1,99 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {PubSub} = require('@google-cloud/pubsub'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const bucket = storage.bucket(bucketName); +const notificationId = '1'; +const notification = bucket.notification(notificationId); +const topicName = `nodejs-storage-samples-${uuid.v4()}`; +const pubsub = new PubSub(); +const topic = pubsub.topic(topicName); + +before(async () => { + await bucket.create(); + await topic.create(); + await topic.iam.setPolicy({ + bindings: [ + { + role: 'roles/pubsub.editor', + members: ['allUsers'], + }, + ], + }); +}); + +after(async () => { + await bucket.delete().catch(console.error); + await topic.delete().catch(console.error); +}); + +it('should create a notification', async () => { + const output = execSync( + `node createNotification.js ${bucketName} ${topicName}` + ); + assert.match(output, /Notification subscription created./); + const [exists] = await notification.exists(); + assert.strictEqual(exists, true); +}); + +it('should list notifications', async () => { + const output = execSync(`node listNotifications.js ${bucketName}`); + assert.match(output, /Notifications:/); + assert.match(output, new RegExp(notificationId)); +}); + +it('should get metadata', async () => { + const metadata = await notification.getMetadata(); + const output = execSync( + `node getMetadataNotifications.js ${bucketName} ${notificationId}` + ); + assert.match(output, /ID:/); + assert.match(output, new RegExp(metadata.id)); + assert.match(output, /Topic:/); + assert.match(output, new RegExp(metadata.topic)); + assert.match(output, /Event Types:/); + assert.match(output, new RegExp(metadata.event_types)); + assert.match(output, /Custom Attributes:/); + assert.match(output, new RegExp(metadata.custom_attributes)); + assert.match(output, /Payload Format:/); + assert.match(output, new RegExp(metadata.payload_format)); + assert.match(output, /Object Name Prefix:/); + assert.match(output, new RegExp(metadata.object_name_prefix)); + assert.match(output, /Etag:/); + assert.match(output, /Self Link:/); + assert.match(output, new RegExp(metadata.selfLink)); + assert.match(output, /Kind:/); + assert.match(output, new RegExp(metadata.kind)); +}); + +it('should delete a notification', async () => { + const output = execSync( + `node deleteNotification.js ${bucketName} ${notificationId}` + ); + assert.match(output, new RegExp(`Notification ${notificationId} deleted.`)); + const [exists] = await notification.exists(); + assert.strictEqual(exists, false); +}); diff --git a/storage/system-test/quickstart.test.js b/storage/system-test/quickstart.test.js new file mode 100644 index 0000000000..4e3ffe0038 --- /dev/null +++ b/storage/system-test/quickstart.test.js @@ -0,0 +1,36 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); +const {Storage} = require('@google-cloud/storage'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; + +after(async () => { + const bucket = storage.bucket(bucketName); + await bucket.delete().catch(console.error); +}); + +it('should run the quickstart', async () => { + const stdout = execSync(`node quickstart ${bucketName}`); + assert.match(stdout, /Bucket .* created./); +}); diff --git a/storage/system-test/requesterPays.test.js b/storage/system-test/requesterPays.test.js new file mode 100644 index 0000000000..76611db164 --- /dev/null +++ b/storage/system-test/requesterPays.test.js @@ -0,0 +1,109 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const fs = require('fs'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {before, after, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); +const path = require('path'); +const {promisify} = require('util'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const storage = new Storage(); +const cwd = path.join(__dirname, '..'); +const bucketName = `nodejs-storage-samples-${uuid.v4()}`; +const fileName = 'test.txt'; +const bucket = storage.bucket(bucketName); +const projectId = process.env.GCLOUD_PROJECT; + +const uploadFilePath = path.join(cwd, 'resources', fileName); +const downloadFilePath = path.join(__dirname, `test_${uuid.v4()}.txt`); + +before(async () => { + await bucket.create(); + // Upload a test file (to download later) + await bucket.upload(uploadFilePath); +}); + +after(async () => { + await promisify(fs.unlink)(downloadFilePath).catch(console.error); + // Try deleting all files twice, just to make sure + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.delete().catch(console.error); +}); + +it.skip('should error on requester-pays requests if they are disabled', () => { + const result = execSync( + `node downloadFileUsingRequesterPays.js ${projectId} ${bucketName} ${fileName} ${downloadFilePath}` + ); + assert.ok(result.stderr); + assert.match( + result.stderr, + /User project prohibited for non requester pays bucket/ + ); +}); + +it('should fetch requester-pays status on a default bucket', () => { + const out = execSync(`node getRequesterPaysStatus.js ${bucketName}`); + assert.include( + out, + `Requester-pays requests are disabled for bucket ${bucketName}` + ); +}); + +it('should enable requester-pays requests', () => { + const out = execSync(`node enableRequesterPays.js ${bucketName}`); + assert.include( + out, + `Requester-pays requests have been enabled for bucket ${bucketName}` + ); +}); + +it('should fetch requester-pays status on a modified bucket', () => { + const out = execSync(`node getRequesterPaysStatus.js ${bucketName}`); + assert.include( + out, + `Requester-pays requests are enabled for bucket ${bucketName}.` + ); +}); + +it('should download a file using requester-pays requests', () => { + const out = execSync( + `node downloadFileUsingRequesterPays.js ${projectId} ${bucketName} ${fileName} ${downloadFilePath}` + ); + assert.include( + out, + `gs://${bucketName}/${fileName} downloaded to ${downloadFilePath} using requester-pays requests` + ); + fs.statSync(downloadFilePath); +}); + +it('should disable requester-pays requests', () => { + const out = execSync(`node disableRequesterPays.js ${bucketName}`); + assert.include( + out, + `Requester-pays requests have been disabled for bucket ${bucketName}` + ); +}); + +it('should get service account', () => { + const out = execSync(`node getServiceAccount.js ${projectId}`); + assert.include(out, '@gs-project-accounts.iam.gserviceaccount.com'); +}); diff --git a/storage/system-test/storage.test.js b/storage/system-test/storage.test.js new file mode 100644 index 0000000000..9aacf774be --- /dev/null +++ b/storage/system-test/storage.test.js @@ -0,0 +1,30 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +it('should intialize storage with a custom api endpoint', async () => { + const apiEndpoint = 'https://storage.googleapis.com'; + const output = execSync(`node setClientEndpoint.js ${apiEndpoint}`); + assert.match( + output, + new RegExp(`Client initiated with endpoint: ${apiEndpoint}.`) + ); +}); diff --git a/storage/system-test/test_9d800329-00da-4cdd-9a3e-7ac6743d5813.txt b/storage/system-test/test_9d800329-00da-4cdd-9a3e-7ac6743d5813.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/storage/system-test/transfer-manager.test.js b/storage/system-test/transfer-manager.test.js new file mode 100644 index 0000000000..f6180bed3a --- /dev/null +++ b/storage/system-test/transfer-manager.test.js @@ -0,0 +1,118 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const path = require('path'); +const {Storage} = require('@google-cloud/storage'); +const {before, after, it, describe} = require('mocha'); +const uuid = require('uuid'); +const cp = require('child_process'); +const {assert} = require('chai'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); +const storage = new Storage(); +const cwd = path.join(__dirname, '..'); +const bucketName = generateName(); +const bucket = storage.bucket(bucketName); +const firstFileName = 'test.txt'; +const secondFileName = 'test2.txt'; +const resourcesPath = path.join(cwd, 'resources'); +const firstFilePath = path.join(resourcesPath, firstFileName); +const secondFilePath = path.join(resourcesPath, secondFileName); +const downloadFilePath = path.join(cwd, 'downloaded.txt'); +const chunkSize = 1024; + +describe('transfer manager', () => { + before(async () => { + await bucket.create(); + }); + + after(async () => { + await bucket.deleteFiles({force: true}).catch(console.error); + await bucket.delete().catch(console.error); + }); + + it('should upload multiple files', async () => { + const output = execSync( + `node uploadManyFilesWithTransferManager.js ${bucketName} ${firstFilePath} ${secondFilePath}` + ); + assert.match( + output, + new RegExp( + `${firstFilePath} uploaded to ${bucketName}.\n${secondFilePath} uploaded to ${bucketName}` + ) + ); + }); + + it('should download mulitple files', async () => { + const output = execSync( + `node downloadManyFilesWithTransferManager.js ${bucketName} ${firstFilePath} ${secondFilePath}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${firstFilePath} downloaded to ${firstFilePath}.\ngs://${bucketName}/${secondFilePath} downloaded to ${secondFilePath}.` + ) + ); + }); + + it('should download a file utilizing chunked download', async () => { + const output = execSync( + `node downloadFileInChunksWithTransferManager.js ${bucketName} ${firstFilePath} ${downloadFilePath} ${chunkSize}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${firstFilePath} downloaded to ${downloadFilePath}.` + ) + ); + }); + + it('should upload a file utilizing chunked upload', async () => { + const output = execSync( + `node uploadFileInChunksWithTransferManager.js ${bucketName} ${firstFilePath} ${chunkSize}` + ); + assert.match( + output, + new RegExp(`${firstFilePath} uploaded to ${bucketName}.`) + ); + }); + + it('should upload a directory', async () => { + const output = execSync( + `node uploadDirectoryWithTransferManager.js ${bucketName} ${resourcesPath}` + ); + assert.match( + output, + new RegExp(`${resourcesPath} uploaded to ${bucketName}.`) + ); + }); + + it('should download a directory', async () => { + const output = execSync( + `node downloadFolderWithTransferManager.js ${bucketName} ${resourcesPath}` + ); + assert.match( + output, + new RegExp( + `gs://${bucketName}/${resourcesPath} downloaded to ${resourcesPath}.` + ) + ); + }); +}); + +function generateName() { + return `nodejs-storage-samples-${uuid.v4()}`; +} diff --git a/storage/uploadDirectory.js b/storage/uploadDirectory.js new file mode 100644 index 0000000000..62ff622795 --- /dev/null +++ b/storage/uploadDirectory.js @@ -0,0 +1,91 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Upload a directory to a bucket. +// description: Uploads full hierarchy of a local directory to a bucket. +// usage: node files.js upload-directory + +function main( + bucketName = 'your-unique-bucket-name', + directoryPath = './local/path/to/directory' +) { + // [START upload_directory] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The local directory to upload + // const directoryPath = './local/path/to/directory'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + const {promisify} = require('util'); + const fs = require('fs'); + const path = require('path'); + + const readdir = promisify(fs.readdir); + const stat = promisify(fs.stat); + + async function* getFiles(directory = '.') { + for (const file of await readdir(directory)) { + const fullPath = path.join(directory, file); + const stats = await stat(fullPath); + + if (stats.isDirectory()) { + yield* getFiles(fullPath); + } + + if (stats.isFile()) { + yield fullPath; + } + } + } + + async function uploadDirectory() { + const bucket = storage.bucket(bucketName); + let successfulUploads = 0; + + for await (const filePath of getFiles(directoryPath)) { + try { + const dirname = path.dirname(directoryPath); + const destination = path.relative(dirname, filePath); + + await bucket.upload(filePath, {destination}); + + console.log(`Successfully uploaded: ${filePath}`); + successfulUploads++; + } catch (e) { + console.error(`Error uploading ${filePath}:`, e); + } + } + + console.log( + `${successfulUploads} files uploaded to ${bucketName} successfully.` + ); + } + + uploadDirectory().catch(console.error); + // [END upload_directory] +} + +main(...process.argv.slice(2)); diff --git a/storage/uploadDirectoryWithTransferManager.js b/storage/uploadDirectoryWithTransferManager.js new file mode 100644 index 0000000000..28f29ec0e0 --- /dev/null +++ b/storage/uploadDirectoryWithTransferManager.js @@ -0,0 +1,58 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// sample-metadata: +// title: Upload Directory With Transfer Manager +// description: Uploads a directory in parallel utilizing transfer manager. +// usage: node uploadFolderWithTransferManager.js + +function main(bucketName = 'my-bucket', directoryName = 'my-directory') { + // [START storage_transfer_manager_upload_directory] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The local directory to upload + // const directoryName = 'your-directory'; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function uploadDirectoryWithTransferManager() { + // Uploads the directory + await transferManager.uploadManyFiles(directoryName); + + console.log(`${directoryName} uploaded to ${bucketName}.`); + } + + uploadDirectoryWithTransferManager().catch(console.error); + // [END storage_transfer_manager_upload_directory] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/uploadEncryptedFile.js b/storage/uploadEncryptedFile.js new file mode 100644 index 0000000000..8dd156cc02 --- /dev/null +++ b/storage/uploadEncryptedFile.js @@ -0,0 +1,70 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const path = require('path'); + +function main( + bucketName = 'my-bucket', + filePath = path.join(__dirname, '../resources', 'test.txt'), + destFileName = 'test.txt', + key = process.env.GOOGLE_CLOUD_KMS_KEY_US, + generationMatchPrecondition = 0 +) { + // [START storage_upload_encrypted_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The path to your file to upload + // const filePath = 'path/to/your/file'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // The key to encrypt the object with + // const key = 'TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g='; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function uploadEncryptedFile() { + const options = { + destination: destFileName, + encryptionKey: Buffer.from(key, 'base64'), + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; + + await storage.bucket(bucketName).upload(filePath, options); + + console.log( + `File ${filePath} uploaded to gs://${bucketName}/${destFileName}` + ); + } + + uploadEncryptedFile().catch(console.error); + // [END storage_upload_encrypted_file] +} +main(...process.argv.slice(2)); diff --git a/storage/uploadFile.js b/storage/uploadFile.js new file mode 100644 index 0000000000..1538e4ff14 --- /dev/null +++ b/storage/uploadFile.js @@ -0,0 +1,61 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + filePath = './local/path/to/file.txt', + destFileName = 'file.txt', + generationMatchPrecondition = 0 +) { + // [START storage_upload_file] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The path to your file to upload + // const filePath = 'path/to/your/file'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function uploadFile() { + const options = { + destination: destFileName, + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; + + await storage.bucket(bucketName).upload(filePath, options); + console.log(`${filePath} uploaded to ${bucketName}`); + } + + uploadFile().catch(console.error); + // [END storage_upload_file] +} + +main(...process.argv.slice(2)); diff --git a/storage/uploadFileInChunksWithTransferManager.js b/storage/uploadFileInChunksWithTransferManager.js new file mode 100644 index 0000000000..02e8437849 --- /dev/null +++ b/storage/uploadFileInChunksWithTransferManager.js @@ -0,0 +1,67 @@ +/** + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// sample-metadata: +// title: Upload a File in Chunks With Transfer Manager +// description: Uploads a single file in in chunks in parallel utilizing transfer manager. +// usage: node uploadFileInChunksWithTransferManager.js + +function main( + bucketName = 'my-bucket', + filePath = './local/path/to/file.txt', + chunkSize = 32 * 1024 * 1024 +) { + // [START storage_transfer_manager_upload_chunks_concurrently] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The path of file to upload + // const filePath = 'path/to/your/file'; + + // The size of each chunk to be uploaded + // const chunkSize = 32 * 1024 * 1024; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function uploadFileInChunksWithTransferManager() { + // Uploads the files + await transferManager.uploadFileInChunks(filePath, { + chunkSizeBytes: chunkSize, + }); + + console.log(`${filePath} uploaded to ${bucketName}.`); + } + + uploadFileInChunksWithTransferManager().catch(console.error); + // [END storage_transfer_manager_upload_chunks_concurrently] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/uploadFileWithKmsKey.js b/storage/uploadFileWithKmsKey.js new file mode 100644 index 0000000000..771638abca --- /dev/null +++ b/storage/uploadFileWithKmsKey.js @@ -0,0 +1,69 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This application demonstrates how to perform basic operations on files with + * the Google Cloud Storage API. + * + * For more information, see the README.md under /storage and the documentation + * at https://cloud.google.com/storage/docs. + */ + +function main( + bucketName = 'my-bucket', + filePath = 'test.txt', + kmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_US, + generationMatchPrecondition = 0 +) { + // [START storage_upload_with_kms_key] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The path to your file to upload + // const filePath = 'path/to/your/file'; + + // The name of the KMS-key + // const kmsKeyName = 'my-key'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function uploadFileWithKmsKey() { + const options = { + kmsKeyName, + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; + + await storage.bucket(bucketName).upload(filePath, options); + + console.log(`${filePath} uploaded to ${bucketName} using ${kmsKeyName}.`); + } + + uploadFileWithKmsKey().catch(console.error); + // [END storage_upload_with_kms_key] +} +main(...process.argv.slice(2)); diff --git a/storage/uploadFromMemory.js b/storage/uploadFromMemory.js new file mode 100644 index 0000000000..cd6ec5166d --- /dev/null +++ b/storage/uploadFromMemory.js @@ -0,0 +1,54 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + contents = 'these are my file contents', + destFileName = 'file.txt' +) { + // [START storage_file_upload_from_memory] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The contents that you want to upload + // const contents = 'these are my contents'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud Node.js client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function uploadFromMemory() { + await storage.bucket(bucketName).file(destFileName).save(contents); + + console.log( + `${destFileName} with contents ${contents} uploaded to ${bucketName}.` + ); + } + + uploadFromMemory().catch(console.error); + // [END storage_file_upload_from_memory] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/uploadManyFilesWithTransferManager.js b/storage/uploadManyFilesWithTransferManager.js new file mode 100644 index 0000000000..cc0019f509 --- /dev/null +++ b/storage/uploadManyFilesWithTransferManager.js @@ -0,0 +1,67 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// sample-metadata: +// title: Upload Many Files With Transfer Manager +// description: Uploads many files in parallel utilizing transfer manager. +// usage: node uploadManyFilesWithTransferManager.js + +function main( + bucketName = 'my-bucket', + firstFilePath = './local/path/to/file1.txt', + secondFilePath = './local/path/to/file2.txt' +) { + // [START storage_transfer_manager_upload_many] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The ID of the first GCS file to upload + // const firstFilePath = 'your-first-file-name'; + + // The ID of the second GCS file to upload + // const secondFilePath = 'your-second-file-name'; + + // Imports the Google Cloud client library + const {Storage, TransferManager} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + // Creates a transfer manager client + const transferManager = new TransferManager(storage.bucket(bucketName)); + + async function uploadManyFilesWithTransferManager() { + // Uploads the files + await transferManager.uploadManyFiles([firstFilePath, secondFilePath]); + + for (const filePath of [firstFilePath, secondFilePath]) { + console.log(`${filePath} uploaded to ${bucketName}.`); + } + } + + uploadManyFilesWithTransferManager().catch(console.error); + // [END storage_transfer_manager_upload_many] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/uploadWithoutAuthentication.js b/storage/uploadWithoutAuthentication.js new file mode 100644 index 0000000000..f51e569404 --- /dev/null +++ b/storage/uploadWithoutAuthentication.js @@ -0,0 +1,76 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + contents = 'these are my file contents', + destFileName = 'file.txt', + generationMatchPrecondition = 0 +) { + // [START storage_upload_without_authentication] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The contents that you want to upload + // const contents = 'these are my contents'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud Node.js client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function uploadWithoutAuthentication() { + const file = storage.bucket(bucketName).file(destFileName); + + // Returns an authenticated endpoint to which + // you can make requests without credentials. + const [location] = await file.createResumableUpload(); //auth required + + const options = { + uri: location, + resumable: true, + validation: false, + + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; + + // Passes the location to file.save so you don't need to + // authenticate this call + await file.save(contents, options); + + console.log(`${destFileName} uploaded to ${bucketName}`); + } + + uploadWithoutAuthentication().catch(console.error); + // [END storage_upload_without_authentication] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/uploadWithoutAuthenticationSignedUrl.js b/storage/uploadWithoutAuthenticationSignedUrl.js new file mode 100644 index 0000000000..4be8def3bf --- /dev/null +++ b/storage/uploadWithoutAuthenticationSignedUrl.js @@ -0,0 +1,84 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main( + bucketName = 'my-bucket', + contents = 'these are my file contents', + destFileName = 'file.txt' +) { + // [START storage_upload_without_authentication_signed_url] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // The contents that you want to upload + // const contents = 'these are my contents'; + + // The new ID for your GCS file + // const destFileName = 'your-new-file-name'; + + // Imports the Google Cloud Node.js client library + const {Storage} = require('@google-cloud/storage'); + + const fetch = require('node-fetch'); + + // Creates a client + const storage = new Storage(); + + async function uploadWithoutAuthenticationSignedUrlStrategy() { + const file = storage.bucket(bucketName).file(destFileName); + + // Use signed URLs to manually start resumable uploads. + // Authenticating is required to get the signed URL, but isn't + // required to start the resumable upload + const options = { + version: 'v4', + action: 'resumable', + expires: Date.now() + 30 * 60 * 1000, // 30 mins + }; + //auth required + const [signedUrl] = await file.getSignedUrl(options); + + // no auth required + const resumableSession = await fetch(signedUrl, { + method: 'POST', + headers: { + 'x-goog-resumable': 'start', + }, + }); + + // Endpoint to which we should upload the file + const location = resumableSession.headers.location; + + // Passes the location to file.save so you don't need to + // authenticate this call + await file.save(contents, { + uri: location, + resumable: true, + validation: false, + }); + + console.log(`${destFileName} uploaded to ${bucketName}`); + } + + uploadWithoutAuthenticationSignedUrlStrategy().catch(console.error); + // [END storage_upload_without_authentication_signed_url] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/storage/viewBucketIamMembers.js b/storage/viewBucketIamMembers.js new file mode 100644 index 0000000000..4f498ea449 --- /dev/null +++ b/storage/viewBucketIamMembers.js @@ -0,0 +1,61 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function main(bucketName = 'my-bucket') { + // [START storage_view_bucket_iam_members] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of your GCS bucket + // const bucketName = 'your-unique-bucket-name'; + + // Imports the Google Cloud client library + const {Storage} = require('@google-cloud/storage'); + + // Creates a client + const storage = new Storage(); + + async function viewBucketIamMembers() { + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const results = await storage + .bucket(bucketName) + .iam.getPolicy({requestedPolicyVersion: 3}); + + const bindings = results[0].bindings; + + console.log(`Bindings for bucket ${bucketName}:`); + for (const binding of bindings) { + console.log(` Role: ${binding.role}`); + console.log(' Members:'); + + const members = binding.members; + for (const member of members) { + console.log(` ${member}`); + } + + const condition = binding.condition; + if (condition) { + console.log(' Condition:'); + console.log(` Title: ${condition.title}`); + console.log(` Description: ${condition.description}`); + console.log(` Expression: ${condition.expression}`); + } + } + } + + viewBucketIamMembers().catch(console.error); + // [END storage_view_bucket_iam_members] +} +main(...process.argv.slice(2)); From 7fafefa94a613e28c617b02f9d754f01f81422b7 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Thu, 12 Mar 2026 23:33:10 +0000 Subject: [PATCH 02/11] refactor(buckets): standardize error handling for buckets.test.js Wrapped async calls in try...catch blocks, standardized error logging, and ensured strict mode/unhandledRejection fallbacks are present across all bucket samples. --- storage/addBucketLabel.js | 13 ++++-- storage/addBucketWebsiteConfiguration.js | 27 +++++++----- storage/bucketMetadata.js | 15 +++++-- storage/changeDefaultStorageClass.js | 15 +++++-- storage/configureBucketCors.js | 27 +++++++----- storage/createBucketWithDualRegion.js | 37 +++++++++------- .../createBucketWithHierarchicalNamespace.js | 31 +++++++------ storage/createBucketWithObjectRetention.js | 23 +++++++--- ...createBucketWithStorageClassAndLocation.js | 23 ++++++---- storage/createBucketWithTurboReplication.js | 23 +++++++--- storage/createNewBucket.js | 18 +++++--- storage/deleteBucket.js | 12 ++++-- storage/disableBucketVersioning.js | 23 ++++++---- storage/disableSoftDelete.js | 29 ++++++++----- storage/disableUniformBucketLevelAccess.js | 27 ++++++++---- storage/enableBucketVersioning.js | 23 ++++++---- storage/enableDefaultKMSKey.js | 29 ++++++++----- storage/enableUniformBucketLevelAccess.js | 27 ++++++++---- storage/getAutoclass.js | 20 ++++++--- storage/getPublicAccessPrevention.js | 17 ++++++-- storage/getRPO.js | 10 ++++- storage/getSoftDeletePolicy.js | 43 +++++++++++-------- storage/getUniformBucketLevelAccess.js | 37 ++++++++++------ storage/listBuckets.js | 18 +++++--- storage/makeBucketPublic.js | 15 +++++-- storage/removeBucketCors.js | 15 +++++-- storage/removeBucketLabel.js | 19 +++++--- storage/removeDefaultKMSKey.js | 23 ++++++---- storage/setAutoclass.js | 28 +++++++----- storage/setPublicAccessPreventionEnforced.js | 27 ++++++++---- storage/setPublicAccessPreventionInherited.js | 21 ++++++--- storage/setRPOAsyncTurbo.js | 17 ++++++-- storage/setRPODefault.js | 16 ++++--- storage/setSoftDeletePolicy.js | 29 ++++++++----- 34 files changed, 519 insertions(+), 258 deletions(-) diff --git a/storage/addBucketLabel.js b/storage/addBucketLabel.js index bb814a831b..6cc6228951 100644 --- a/storage/addBucketLabel.js +++ b/storage/addBucketLabel.js @@ -48,11 +48,18 @@ function main( }; async function addBucketLabel() { - await storage.bucket(bucketName).setMetadata({labels}); - console.log(`Added label to bucket ${bucketName}`); + try { + await storage.bucket(bucketName).setMetadata({labels}); + console.log(`Added label to bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing add bucket label:', + error.message || error + ); + } } - addBucketLabel().catch(console.error); + addBucketLabel(); // [END storage_add_bucket_label] } process.on('unhandledRejection', err => { diff --git a/storage/addBucketWebsiteConfiguration.js b/storage/addBucketWebsiteConfiguration.js index 75d07a5ea7..d8541ec816 100644 --- a/storage/addBucketWebsiteConfiguration.js +++ b/storage/addBucketWebsiteConfiguration.js @@ -44,19 +44,26 @@ function main( const storage = new Storage(); async function addBucketWebsiteConfiguration() { - await storage.bucket(bucketName).setMetadata({ - website: { - mainPageSuffix, - notFoundPage, - }, - }); + try { + await storage.bucket(bucketName).setMetadata({ + website: { + mainPageSuffix, + notFoundPage, + }, + }); - console.log( - `Static website bucket ${bucketName} is set up to use ${mainPageSuffix} as the index page and ${notFoundPage} as the 404 page` - ); + console.log( + `Static website bucket ${bucketName} is set up to use ${mainPageSuffix} as the index page and ${notFoundPage} as the 404 page` + ); + } catch (error) { + console.error( + 'Error executing add bucket website configuration:', + error.message || error + ); + } } - addBucketWebsiteConfiguration().catch(console.error); + addBucketWebsiteConfiguration(); // [END storage_define_bucket_website_configuration] } process.on('unhandledRejection', err => { diff --git a/storage/bucketMetadata.js b/storage/bucketMetadata.js index eab6c80c59..399354e0b2 100644 --- a/storage/bucketMetadata.js +++ b/storage/bucketMetadata.js @@ -35,12 +35,19 @@ function main(bucketName = 'my-bucket') { // const bucketName = 'your-unique-bucket-name'; // Get Bucket Metadata - const [metadata] = await storage.bucket(bucketName).getMetadata(); - - console.log(JSON.stringify(metadata, null, 2)); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + console.log(JSON.stringify(metadata, null, 2)); + } catch (error) { + console.error( + 'Error executing get bucket metadata:', + error.message || error + ); + } } // [END storage_get_bucket_metadata] - getBucketMetadata().catch(console.error); + getBucketMetadata(); } main(...process.argv.slice(2)); diff --git a/storage/changeDefaultStorageClass.js b/storage/changeDefaultStorageClass.js index c32346c73a..990ad805ba 100644 --- a/storage/changeDefaultStorageClass.js +++ b/storage/changeDefaultStorageClass.js @@ -39,12 +39,19 @@ function main(bucketName = 'my-bucket', storageClass = 'standard') { const storage = new Storage(); async function changeDefaultStorageClass() { - await storage.bucket(bucketName).setStorageClass(storageClass); - - console.log(`${bucketName} has been set to ${storageClass}`); + try { + await storage.bucket(bucketName).setStorageClass(storageClass); + + console.log(`${bucketName} has been set to ${storageClass}`); + } catch (error) { + console.error( + 'Error executing change default storage class:', + error.message || error + ); + } } - changeDefaultStorageClass().catch(console.error); + changeDefaultStorageClass(); // [END storage_change_default_storage_class] } process.on('unhandledRejection', err => { diff --git a/storage/configureBucketCors.js b/storage/configureBucketCors.js index cd2893d00a..0e24b30f63 100644 --- a/storage/configureBucketCors.js +++ b/storage/configureBucketCors.js @@ -55,21 +55,28 @@ function main( // const method = 'GET'; async function configureBucketCors() { - await storage.bucket(bucketName).setCorsConfiguration([ - { - maxAgeSeconds, - method: [method], - origin: [origin], - responseHeader: [responseHeader], - }, - ]); + try { + await storage.bucket(bucketName).setCorsConfiguration([ + { + maxAgeSeconds, + method: [method], + origin: [origin], + responseHeader: [responseHeader], + }, + ]); - console.log(`Bucket ${bucketName} was updated with a CORS config + console.log(`Bucket ${bucketName} was updated with a CORS config to allow ${method} requests from ${origin} sharing ${responseHeader} responses across origins`); + } catch (error) { + console.error( + 'Error executing configure bucket cors:', + error.message || error + ); + } } - configureBucketCors().catch(console.error); + configureBucketCors(); // [END storage_cors_configuration] } diff --git a/storage/createBucketWithDualRegion.js b/storage/createBucketWithDualRegion.js index cd145d8e2a..d49e20d56c 100644 --- a/storage/createBucketWithDualRegion.js +++ b/storage/createBucketWithDualRegion.js @@ -51,24 +51,31 @@ function main( async function createDualRegionBucket() { // For regions supporting dual-regions see: https://cloud.google.com/storage/docs/locations - const [bucket] = await storage.createBucket(bucketName, { - location, - customPlacementConfig: { - dataLocations: [region1, region2], - }, - }); + try { + const [bucket] = await storage.createBucket(bucketName, { + location, + customPlacementConfig: { + dataLocations: [region1, region2], + }, + }); - console.log(`Created '${bucket.name}'`); - console.log(`- location: '${bucket.metadata.location}'`); - console.log(`- locationType: '${bucket.metadata.locationType}'`); - console.log( - `- customPlacementConfig: '${JSON.stringify( - bucket.metadata.customPlacementConfig - )}'` - ); + console.log(`Created '${bucket.name}'`); + console.log(`- location: '${bucket.metadata.location}'`); + console.log(`- locationType: '${bucket.metadata.locationType}'`); + console.log( + `- customPlacementConfig: '${JSON.stringify( + bucket.metadata.customPlacementConfig + )}'` + ); + } catch (error) { + console.error( + 'Error executing create dual region bucket:', + error.message || error + ); + } } - createDualRegionBucket().catch(console.error); + createDualRegionBucket(); // [END storage_create_bucket_dual_region] } diff --git a/storage/createBucketWithHierarchicalNamespace.js b/storage/createBucketWithHierarchicalNamespace.js index 8641cacff1..efb5c40ea0 100644 --- a/storage/createBucketWithHierarchicalNamespace.js +++ b/storage/createBucketWithHierarchicalNamespace.js @@ -38,23 +38,30 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function createBucketWithHierarchicalNamespace() { - const [bucket] = await storage.createBucket(bucketName, { - iamConfiguration: { - uniformBucketLevelAccess: { + try { + const [bucket] = await storage.createBucket(bucketName, { + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: true, + }, + }, + hierarchicalNamespace: { enabled: true, }, - }, - hierarchicalNamespace: { - enabled: true, - }, - }); + }); - console.log( - `Created '${bucket.name}' with hierarchical namespace enabled.` - ); + console.log( + `Created '${bucket.name}' with hierarchical namespace enabled.` + ); + } catch (error) { + console.error( + 'Error executing create bucket with hierarchical namespace:', + error.message || error + ); + } } - createBucketWithHierarchicalNamespace().catch(console.error); + createBucketWithHierarchicalNamespace(); // [END storage_create_bucket_hierarchical_namespace] } diff --git a/storage/createBucketWithObjectRetention.js b/storage/createBucketWithObjectRetention.js index c51ddf7b48..fb6d29c654 100644 --- a/storage/createBucketWithObjectRetention.js +++ b/storage/createBucketWithObjectRetention.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + // sample-metadata: // title: Create a Bucket with object retention enabled. // description: Create a Bucket with object retention enabled. @@ -36,16 +38,23 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function createBucketWithObjectRetention() { - const [bucket] = await storage.createBucket(bucketName, { - enableObjectRetention: true, - }); + try { + const [bucket] = await storage.createBucket(bucketName, { + enableObjectRetention: true, + }); - console.log( - `Created '${bucket.name}' with object retention enabled setting: ${bucket.metadata.objectRetention.mode}` - ); + console.log( + `Created '${bucket.name}' with object retention enabled setting: ${bucket.metadata.objectRetention.mode}` + ); + } catch (error) { + console.error( + 'Error executing create bucket with object retention:', + error.message || error + ); + } } - createBucketWithObjectRetention().catch(console.error); + createBucketWithObjectRetention(); // [END storage_create_bucket_with_object_retention] } diff --git a/storage/createBucketWithStorageClassAndLocation.js b/storage/createBucketWithStorageClassAndLocation.js index 27ee7e70a7..5b4adaa796 100644 --- a/storage/createBucketWithStorageClassAndLocation.js +++ b/storage/createBucketWithStorageClassAndLocation.js @@ -54,17 +54,24 @@ function main( async function createBucketWithStorageClassAndLocation() { // For default values see: https://cloud.google.com/storage/docs/locations and // https://cloud.google.com/storage/docs/storage-classes - const [bucket] = await storage.createBucket(bucketName, { - location, - [storageClass]: true, - }); + try { + const [bucket] = await storage.createBucket(bucketName, { + location, + [storageClass]: true, + }); - console.log( - `${bucket.name} created with ${storageClass} class in ${location}` - ); + console.log( + `${bucket.name} created with ${storageClass} class in ${location}` + ); + } catch (error) { + console.error( + 'Error executing create bucket with storage class and location:', + error.message || error + ); + } } - createBucketWithStorageClassAndLocation().catch(console.error); + createBucketWithStorageClassAndLocation(); // [END storage_create_bucket_class_location] } diff --git a/storage/createBucketWithTurboReplication.js b/storage/createBucketWithTurboReplication.js index 566f0c4380..fdedac9be5 100644 --- a/storage/createBucketWithTurboReplication.js +++ b/storage/createBucketWithTurboReplication.js @@ -22,6 +22,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket', location = 'NAM4') { // [START storage_create_bucket_turbo_replication] /** @@ -49,14 +51,21 @@ function main(bucketName = 'my-bucket', location = 'NAM4') { async function createBucketWithTurboReplication() { // For default values see: https://cloud.google.com/storage/docs/locations and // https://cloud.google.com/storage/docs/storage-classes - const [bucket] = await storage.createBucket(bucketName, { - location, - rpo, - }); + try { + const [bucket] = await storage.createBucket(bucketName, { + location, + rpo, + }); - console.log( - `${bucket.name} created with the recovery point objective (RPO) set to ${rpo} in ${location}.` - ); + console.log( + `${bucket.name} created with the recovery point objective (RPO) set to ${rpo} in ${location}.` + ); + } catch (error) { + console.error( + 'Error executing create bucket with turbo replication:', + error.message || error + ); + } } createBucketWithTurboReplication(); diff --git a/storage/createNewBucket.js b/storage/createNewBucket.js index e3afa22edf..9438769ae9 100644 --- a/storage/createNewBucket.js +++ b/storage/createNewBucket.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_create_bucket] /** @@ -37,15 +39,19 @@ function main(bucketName = 'my-bucket') { // For default values see: https://cloud.google.com/storage/docs/locations and // https://cloud.google.com/storage/docs/storage-classes - const [bucket] = await storage.createBucket(bucketName, { - location: 'ASIA', - storageClass: 'COLDLINE', - }); + try { + const [bucket] = await storage.createBucket(bucketName, { + location: 'ASIA', + storageClass: 'COLDLINE', + }); - console.log(`Bucket ${bucket.name} created.`); + console.log(`Bucket ${bucket.name} created.`); + } catch (error) { + console.error('Error executing create bucket:', error.message || error); + } } - createBucket().catch(console.error); + createBucket(); // [END storage_create_bucket] } diff --git a/storage/deleteBucket.js b/storage/deleteBucket.js index 20c2036d75..0b08732e8f 100644 --- a/storage/deleteBucket.js +++ b/storage/deleteBucket.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -35,11 +37,15 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function deleteBucket() { - await storage.bucket(bucketName).delete(); - console.log(`Bucket ${bucketName} deleted`); + try { + await storage.bucket(bucketName).delete(); + console.log(`Bucket ${bucketName} deleted`); + } catch (error) { + console.error('Error executing delete bucket:', error.message || error); + } } - deleteBucket().catch(console.error); + deleteBucket(); // [END storage_delete_bucket] } diff --git a/storage/disableBucketVersioning.js b/storage/disableBucketVersioning.js index 4a1a438292..3c6a4f060e 100644 --- a/storage/disableBucketVersioning.js +++ b/storage/disableBucketVersioning.js @@ -34,16 +34,23 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableBucketVersioning() { - await storage.bucket(bucketName).setMetadata({ - versioning: { - enabled: false, - }, - }); - - console.log(`Versioning is disabled for bucket ${bucketName}`); + try { + await storage.bucket(bucketName).setMetadata({ + versioning: { + enabled: false, + }, + }); + + console.log(`Versioning is disabled for bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing disable bucket versioning:', + error.message || error + ); + } } - disableBucketVersioning().catch(console.error); + disableBucketVersioning(); // [END storage_disable_versioning] } diff --git a/storage/disableSoftDelete.js b/storage/disableSoftDelete.js index 4ea98c5132..a65aba4462 100644 --- a/storage/disableSoftDelete.js +++ b/storage/disableSoftDelete.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_disable_soft_delete] /** @@ -29,18 +31,25 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableSoftDelete() { - const options = { - softDeletePolicy: { - retentionDurationSeconds: 0, - }, - }; - - const [metadata] = await storage.bucket(bucketName).setMetadata(options); - - console.log(`Bucket ${metadata.name} soft delete policy was disabled`); + try { + const options = { + softDeletePolicy: { + retentionDurationSeconds: 0, + }, + }; + + const [metadata] = await storage.bucket(bucketName).setMetadata(options); + + console.log(`Bucket ${metadata.name} soft delete policy was disabled`); + } catch (error) { + console.error( + 'Error executing disable soft delete:', + error.message || error + ); + } } - disableSoftDelete().catch(console.error); + disableSoftDelete(); // [END storage_disable_soft_delete] } diff --git a/storage/disableUniformBucketLevelAccess.js b/storage/disableUniformBucketLevelAccess.js index 8a1cfd2495..1b80c0b594 100644 --- a/storage/disableUniformBucketLevelAccess.js +++ b/storage/disableUniformBucketLevelAccess.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_disable_uniform_bucket_level_access] /** @@ -35,18 +37,27 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableUniformBucketLevelAccess() { // Disables uniform bucket-level access for the bucket - await storage.bucket(bucketName).setMetadata({ - iamConfiguration: { - uniformBucketLevelAccess: { - enabled: false, + try { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: false, + }, }, - }, - }); + }); - console.log(`Uniform bucket-level access was disabled for ${bucketName}.`); + console.log( + `Uniform bucket-level access was disabled for ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing disable uniform bucket-level access:', + error.message || error + ); + } } - disableUniformBucketLevelAccess().catch(console.error); + disableUniformBucketLevelAccess(); // [END storage_disable_uniform_bucket_level_access] } main(...process.argv.slice(2)); diff --git a/storage/enableBucketVersioning.js b/storage/enableBucketVersioning.js index 66d81197cc..e0bb9408d1 100644 --- a/storage/enableBucketVersioning.js +++ b/storage/enableBucketVersioning.js @@ -34,16 +34,23 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function enableBucketVersioning() { - await storage.bucket(bucketName).setMetadata({ - versioning: { - enabled: true, - }, - }); - - console.log(`Versioning is enabled for bucket ${bucketName}`); + try { + await storage.bucket(bucketName).setMetadata({ + versioning: { + enabled: true, + }, + }); + + console.log(`Versioning is enabled for bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing enable bucket versioning:', + error.message || error + ); + } } - enableBucketVersioning().catch(console.error); + enableBucketVersioning(); // [END storage_enable_versioning] } diff --git a/storage/enableDefaultKMSKey.js b/storage/enableDefaultKMSKey.js index adc3b3505b..9d1eef7590 100644 --- a/storage/enableDefaultKMSKey.js +++ b/storage/enableDefaultKMSKey.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main( bucketName = 'my-bucket', defaultKmsKeyName = process.env.GOOGLE_CLOUD_KMS_KEY_ASIA @@ -41,18 +43,25 @@ function main( const storage = new Storage(); async function enableDefaultKMSKey() { - await storage.bucket(bucketName).setMetadata({ - encryption: { - defaultKmsKeyName, - }, - }); - - console.log( - `Default KMS key for ${bucketName} was set to ${defaultKmsKeyName}.` - ); + try { + await storage.bucket(bucketName).setMetadata({ + encryption: { + defaultKmsKeyName, + }, + }); + + console.log( + `Default KMS key for ${bucketName} was set to ${defaultKmsKeyName}.` + ); + } catch (error) { + console.error( + 'Error executing enable default KMS key:', + error.message || error + ); + } } - enableDefaultKMSKey().catch(console.error); + enableDefaultKMSKey(); // [END storage_set_bucket_default_kms_key] } diff --git a/storage/enableUniformBucketLevelAccess.js b/storage/enableUniformBucketLevelAccess.js index 3bbc7c49aa..828fdf0247 100644 --- a/storage/enableUniformBucketLevelAccess.js +++ b/storage/enableUniformBucketLevelAccess.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_enable_uniform_bucket_level_access] /** @@ -36,18 +38,25 @@ function main(bucketName = 'my-bucket') { // Enables uniform bucket-level access for the bucket async function enableUniformBucketLevelAccess() { - await storage.bucket(bucketName).setMetadata({ - iamConfiguration: { - uniformBucketLevelAccess: { - enabled: true, + try { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + uniformBucketLevelAccess: { + enabled: true, + }, }, - }, - }); - - console.log(`Uniform bucket-level access was enabled for ${bucketName}.`); + }); + + console.log(`Uniform bucket-level access was enabled for ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing enable uniform bucket-level access:', + error.message || error + ); + } } - enableUniformBucketLevelAccess().catch(console.error); + enableUniformBucketLevelAccess(); // [END storage_enable_uniform_bucket_level_access] } diff --git a/storage/getAutoclass.js b/storage/getAutoclass.js index 44ae5ff0a2..55cf523b87 100644 --- a/storage/getAutoclass.js +++ b/storage/getAutoclass.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_get_autoclass] /** @@ -35,18 +37,22 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function getAutoclass() { - const [metadata] = await storage.bucket(bucketName).getMetadata(); - console.log( - `Autoclass is ${ - metadata.autoclass.enabled ? 'enabled' : 'disabled' - } for ${metadata.name} at ${metadata.autoclass.toggleTime}. + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log( + `Autoclass is ${ + metadata.autoclass.enabled ? 'enabled' : 'disabled' + } for ${metadata.name} at ${metadata.autoclass.toggleTime}. Autoclass terminal storage class is last updated to ${ metadata.autoclass.terminalStorageClass } at ${metadata.autoclass.terminalStorageClassUpdateTime}.` - ); + ); + } catch (error) { + console.error('Error executing get autoclass:', error.message || error); + } } - getAutoclass().catch(console.error); + getAutoclass(); // [END storage_get_autoclass] } main(...process.argv.slice(2)); diff --git a/storage/getPublicAccessPrevention.js b/storage/getPublicAccessPrevention.js index 7a53f7ff22..e782ed529e 100644 --- a/storage/getPublicAccessPrevention.js +++ b/storage/getPublicAccessPrevention.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_get_public_access_prevention] /** @@ -36,10 +38,17 @@ function main(bucketName = 'my-bucket') { async function getPublicAccessPrevention() { // Gets Bucket Metadata and prints publicAccessPrevention value (either 'inherited' or 'enforced'). - const [metadata] = await storage.bucket(bucketName).getMetadata(); - console.log( - `Public access prevention is ${metadata.iamConfiguration.publicAccessPrevention} for ${bucketName}.` - ); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log( + `Public access prevention is ${metadata.iamConfiguration.publicAccessPrevention} for ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing get public access prevention:', + error.message || error + ); + } } getPublicAccessPrevention(); diff --git a/storage/getRPO.js b/storage/getRPO.js index 00d788208f..1d7bd59523 100644 --- a/storage/getRPO.js +++ b/storage/getRPO.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_get_rpo] /** @@ -37,8 +39,12 @@ function main(bucketName = 'my-bucket') { async function getRPO() { // Gets Bucket Metadata and prints RPO value (either 'default' or 'async_turbo'). // If RPO is undefined, the bucket is a single region bucket - const [metadata] = await storage.bucket(bucketName).getMetadata(); - console.log(`RPO is ${metadata.rpo} for ${bucketName}.`); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log(`RPO is ${metadata.rpo} for ${bucketName}.`); + } catch (error) { + console.error('Error executing get RPO:', error.message || error); + } } getRPO(); diff --git a/storage/getSoftDeletePolicy.js b/storage/getSoftDeletePolicy.js index fa5efebf2f..b7be0acd39 100644 --- a/storage/getSoftDeletePolicy.js +++ b/storage/getSoftDeletePolicy.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_get_soft_delete_policy] /** @@ -29,27 +31,34 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function getSoftDeletePolicy() { - const [metadata] = await storage.bucket(bucketName).getMetadata(); - - const softDelete = metadata.softDeletePolicy; - if ( - !softDelete || - !softDelete.retentionDurationSeconds || - softDelete.retentionDurationSeconds === '0' - ) { - console.log(`Bucket ${metadata.name} soft delete policy was disabled`); - } else { - console.log(`Soft delete policy for ${metadata.name}`); - console.log( - `Soft delete Period: ${softDelete.retentionDurationSeconds} seconds` - ); - if (softDelete.effectiveTime) { - console.log(`Effective Time: ${softDelete.effectiveTime}`); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + const softDelete = metadata.softDeletePolicy; + if ( + !softDelete || + !softDelete.retentionDurationSeconds || + softDelete.retentionDurationSeconds === '0' + ) { + console.log(`Bucket ${metadata.name} soft delete policy was disabled`); + } else { + console.log(`Soft delete policy for ${metadata.name}`); + console.log( + `Soft delete Period: ${softDelete.retentionDurationSeconds} seconds` + ); + if (softDelete.effectiveTime) { + console.log(`Effective Time: ${softDelete.effectiveTime}`); + } } + } catch (error) { + console.error( + 'Error executing get soft delete policy:', + error.message || error + ); } } - getSoftDeletePolicy().catch(console.error); + getSoftDeletePolicy(); // [END storage_get_soft_delete_policy] } diff --git a/storage/getUniformBucketLevelAccess.js b/storage/getUniformBucketLevelAccess.js index e6382135e3..ff4dddce82 100644 --- a/storage/getUniformBucketLevelAccess.js +++ b/storage/getUniformBucketLevelAccess.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_get_uniform_bucket_level_access] /** @@ -36,23 +38,32 @@ function main(bucketName = 'my-bucket') { async function getUniformBucketLevelAccess() { // Gets Bucket Metadata and checks if uniform bucket-level access is enabled. - const [metadata] = await storage.bucket(bucketName).getMetadata(); - - if (metadata.iamConfiguration) { - const uniformBucketLevelAccess = - metadata.iamConfiguration.uniformBucketLevelAccess; - console.log(`Uniform bucket-level access is enabled for ${bucketName}.`); - console.log( - `Bucket will be locked on ${uniformBucketLevelAccess.lockedTime}.` - ); - } else { - console.log( - `Uniform bucket-level access is not enabled for ${bucketName}.` + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + if (metadata.iamConfiguration) { + const uniformBucketLevelAccess = + metadata.iamConfiguration.uniformBucketLevelAccess; + console.log( + `Uniform bucket-level access is enabled for ${bucketName}.` + ); + console.log( + `Bucket will be locked on ${uniformBucketLevelAccess.lockedTime}.` + ); + } else { + console.log( + `Uniform bucket-level access is not enabled for ${bucketName}.` + ); + } + } catch (error) { + console.error( + 'Error executing get uniform bucket-level access:', + error.message || error ); } } - getUniformBucketLevelAccess().catch(console.error); + getUniformBucketLevelAccess(); // [END storage_get_uniform_bucket_level_access] } diff --git a/storage/listBuckets.js b/storage/listBuckets.js index 3c06f4fc2e..033495b909 100644 --- a/storage/listBuckets.js +++ b/storage/listBuckets.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main() { // [START storage_list_buckets] // Imports the Google Cloud client library @@ -23,15 +25,19 @@ function main() { const storage = new Storage(); async function listBuckets() { - const [buckets] = await storage.getBuckets(); + try { + const [buckets] = await storage.getBuckets(); - console.log('Buckets:'); - buckets.forEach(bucket => { - console.log(bucket.name); - }); + console.log('Buckets:'); + buckets.forEach(bucket => { + console.log(bucket.name); + }); + } catch (error) { + console.error('Error executing list buckets:', error.message || error); + } } - listBuckets().catch(console.error); + listBuckets(); // [END storage_list_buckets] } diff --git a/storage/makeBucketPublic.js b/storage/makeBucketPublic.js index 46aaa958fa..447a0da6be 100644 --- a/storage/makeBucketPublic.js +++ b/storage/makeBucketPublic.js @@ -34,12 +34,19 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function makeBucketPublic() { - await storage.bucket(bucketName).makePublic(); - - console.log(`Bucket ${bucketName} is now publicly readable`); + try { + await storage.bucket(bucketName).makePublic(); + + console.log(`Bucket ${bucketName} is now publicly readable`); + } catch (error) { + console.error( + 'Error executing make bucket public:', + error.message || error + ); + } } - makeBucketPublic().catch(console.error); + makeBucketPublic(); // [END storage_set_bucket_public_iam] } process.on('unhandledRejection', err => { diff --git a/storage/removeBucketCors.js b/storage/removeBucketCors.js index 62a122d80e..26b30268a0 100644 --- a/storage/removeBucketCors.js +++ b/storage/removeBucketCors.js @@ -34,12 +34,19 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function removeBucketCors() { - await storage.bucket(bucketName).setCorsConfiguration([]); - - console.log(`Removed CORS configuration from bucket ${bucketName}`); + try { + await storage.bucket(bucketName).setCorsConfiguration([]); + + console.log(`Removed CORS configuration from bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing remove bucket cors:', + error.message || error + ); + } } - removeBucketCors().catch(console.error); + removeBucketCors(); // [END storage_remove_cors_configuration] } diff --git a/storage/removeBucketLabel.js b/storage/removeBucketLabel.js index 8df82f0121..0f6a547615 100644 --- a/storage/removeBucketLabel.js +++ b/storage/removeBucketLabel.js @@ -37,14 +37,21 @@ function main(bucketName = 'my-bucket', labelKey = 'labelone') { const storage = new Storage(); async function removeBucketLabel() { - const labels = {}; - // To remove a label set the value of the key to null. - labels[labelKey] = null; - await storage.bucket(bucketName).setMetadata({labels}); - console.log(`Removed labels from bucket ${bucketName}`); + try { + const labels = {}; + // To remove a label set the value of the key to null. + labels[labelKey] = null; + await storage.bucket(bucketName).setMetadata({labels}); + console.log(`Removed labels from bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing remove bucket label:', + error.message || error + ); + } } - removeBucketLabel().catch(console.error); + removeBucketLabel(); // [END storage_remove_bucket_label] } process.on('unhandledRejection', err => { diff --git a/storage/removeDefaultKMSKey.js b/storage/removeDefaultKMSKey.js index 8c754f9232..203e5c7fbf 100644 --- a/storage/removeDefaultKMSKey.js +++ b/storage/removeDefaultKMSKey.js @@ -34,16 +34,23 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function removeDefaultKMSKey() { - await storage.bucket(bucketName).setMetadata({ - encryption: { - defaultKmsKeyName: null, - }, - }); - - console.log(`Default KMS key was removed from ${bucketName}`); + try { + await storage.bucket(bucketName).setMetadata({ + encryption: { + defaultKmsKeyName: null, + }, + }); + + console.log(`Default KMS key was removed from ${bucketName}`); + } catch (error) { + console.error( + 'Error executing remove default KMS key:', + error.message || error + ); + } } - removeDefaultKMSKey().catch(console.error); + removeDefaultKMSKey(); // [END storage_bucket_delete_default_kms_key] } diff --git a/storage/setAutoclass.js b/storage/setAutoclass.js index d8c906ac09..e6c8f16027 100644 --- a/storage/setAutoclass.js +++ b/storage/setAutoclass.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main( bucketName = 'my-bucket', toggle = true, @@ -39,19 +41,23 @@ function main( // Configure the Autoclass setting for a bucket. // terminalStorageClass field is optional and defaults to NEARLINE if not otherwise specified. // Valid terminalStorageClass values are NEARLINE and ARCHIVE. - const [metadata] = await storage.bucket(bucketName).setMetadata({ - autoclass: { - enabled: toggle, - terminalStorageClass, - }, - }); - - console.log( - `Autoclass terminal storage class is ${metadata.autoclass.terminalStorageClass}.` - ); + try { + const [metadata] = await storage.bucket(bucketName).setMetadata({ + autoclass: { + enabled: toggle, + terminalStorageClass, + }, + }); + + console.log( + `Autoclass terminal storage class is ${metadata.autoclass.terminalStorageClass}.` + ); + } catch (error) { + console.error('Error executing set autoclass:', error.message || error); + } } - setAutoclass().catch(console.error); + setAutoclass(); // [END storage_set_autoclass] } diff --git a/storage/setPublicAccessPreventionEnforced.js b/storage/setPublicAccessPreventionEnforced.js index 036bf8bec6..71885230dd 100644 --- a/storage/setPublicAccessPreventionEnforced.js +++ b/storage/setPublicAccessPreventionEnforced.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_set_public_access_prevention_enforced] /** @@ -36,15 +38,22 @@ function main(bucketName = 'my-bucket') { // Enforces public access prevention for the bucket async function setPublicAccessPreventionEnforced() { - await storage.bucket(bucketName).setMetadata({ - iamConfiguration: { - publicAccessPrevention: 'enforced', - }, - }); - - console.log( - `Public access prevention is set to enforced for ${bucketName}.` - ); + try { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + publicAccessPrevention: 'enforced', + }, + }); + + console.log( + `Public access prevention is set to enforced for ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing set public access prevention enforced:', + error.message || error + ); + } } setPublicAccessPreventionEnforced(); diff --git a/storage/setPublicAccessPreventionInherited.js b/storage/setPublicAccessPreventionInherited.js index 14f61e0cdb..c0fc042ad5 100644 --- a/storage/setPublicAccessPreventionInherited.js +++ b/storage/setPublicAccessPreventionInherited.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_set_public_access_prevention_inherited] /** @@ -34,13 +36,20 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function setPublicAccessPreventionInherited() { // Sets public access prevention to 'inherited' for the bucket - await storage.bucket(bucketName).setMetadata({ - iamConfiguration: { - publicAccessPrevention: 'inherited', - }, - }); + try { + await storage.bucket(bucketName).setMetadata({ + iamConfiguration: { + publicAccessPrevention: 'inherited', + }, + }); - console.log(`Public access prevention is 'inherited' for ${bucketName}.`); + console.log(`Public access prevention is 'inherited' for ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing set public access prevention inherited:', + error.message || error + ); + } } setPublicAccessPreventionInherited(); diff --git a/storage/setRPOAsyncTurbo.js b/storage/setRPOAsyncTurbo.js index 3ce3c54701..64fcbccfa2 100644 --- a/storage/setRPOAsyncTurbo.js +++ b/storage/setRPOAsyncTurbo.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_set_rpo_async_turbo] /** @@ -37,11 +39,18 @@ function main(bucketName = 'my-bucket') { // Enable turbo replication for the bucket by setting rpo to ASYNC_TURBO. // The bucket must be a dual-region bucket. async function setRPOAsyncTurbo() { - await storage.bucket(bucketName).setMetadata({ - rpo: 'ASYNC_TURBO', - }); + try { + await storage.bucket(bucketName).setMetadata({ + rpo: 'ASYNC_TURBO', + }); - console.log(`Turbo replication enabled for ${bucketName}.`); + console.log(`Turbo replication enabled for ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing set RPO async turbo:', + error.message || error + ); + } } setRPOAsyncTurbo(); diff --git a/storage/setRPODefault.js b/storage/setRPODefault.js index 0e1f604c2e..fd134b6f77 100644 --- a/storage/setRPODefault.js +++ b/storage/setRPODefault.js @@ -20,6 +20,8 @@ * at https://cloud.google.com/storage/docs. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_set_rpo_default] /** @@ -37,11 +39,15 @@ function main(bucketName = 'my-bucket') { // Disable turbo replication for the bucket by setting RPO to default. // The bucket must be a dual-region bucket. async function setRPODefault() { - await storage.bucket(bucketName).setMetadata({ - rpo: 'DEFAULT', - }); - - console.log(`Turbo replication disabled for ${bucketName}.`); + try { + await storage.bucket(bucketName).setMetadata({ + rpo: 'DEFAULT', + }); + + console.log(`Turbo replication disabled for ${bucketName}.`); + } catch (error) { + console.error('Error executing set RPO default:', error.message || error); + } } setRPODefault(); diff --git a/storage/setSoftDeletePolicy.js b/storage/setSoftDeletePolicy.js index 89f631604b..30f7da9d06 100644 --- a/storage/setSoftDeletePolicy.js +++ b/storage/setSoftDeletePolicy.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_set_soft_delete_policy] /** @@ -29,18 +31,25 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function setSoftDeletePolicy() { - const options = { - softDeletePolicy: { - retentionDurationSeconds: 604800, // 7 days (in seconds) - }, - }; - - const [metadata] = await storage.bucket(bucketName).setMetadata(options); - - console.log(`Bucket ${metadata.name} soft delete policy set to 7 days`); + try { + const options = { + softDeletePolicy: { + retentionDurationSeconds: 604800, // 7 days (in seconds) + }, + }; + + const [metadata] = await storage.bucket(bucketName).setMetadata(options); + + console.log(`Bucket ${metadata.name} soft delete policy set to 7 days`); + } catch (error) { + console.error( + 'Error executing set soft delete policy:', + error.message || error + ); + } } - setSoftDeletePolicy().catch(console.error); + setSoftDeletePolicy(); // [END storage_set_soft_delete_policy] } From 9bd5aedf904ba81ffefe215b46ae997aa178f33e Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 17:36:31 +0000 Subject: [PATCH 03/11] refactor(acl): standardize error handling and address PR feedback in ACL samples --- storage/addBucketDefaultOwnerAcl.js | 25 ++++++++++++++++------- storage/addBucketOwnerAcl.js | 25 ++++++++++++++++------- storage/addFileOwnerAcl.js | 21 +++++++++++++------ storage/printBucketAcl.js | 21 +++++++++++++------ storage/printBucketAclForUser.js | 25 +++++++++++++++-------- storage/printFileAcl.js | 20 ++++++++++++------- storage/printFileAclForUser.js | 31 +++++++++++++++++++---------- storage/removeBucketDefaultOwner.js | 23 ++++++++++++++------- storage/removeBucketOwnerAcl.js | 23 ++++++++++++++------- storage/removeFileOwnerAcl.js | 29 +++++++++++++++++---------- 10 files changed, 167 insertions(+), 76 deletions(-) diff --git a/storage/addBucketDefaultOwnerAcl.js b/storage/addBucketDefaultOwnerAcl.js index 7e598e6c00..38431f62a0 100644 --- a/storage/addBucketDefaultOwnerAcl.js +++ b/storage/addBucketDefaultOwnerAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,16 +40,25 @@ function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { const storage = new Storage(); async function addBucketDefaultOwner() { - // Makes the user an owner in the default ACL of the bucket. You can use - // addAllUsers(), addDomain(), addProject(), addGroup(), and - // addAllAuthenticatedUsers() to grant access to different types of entities. - // You can also use "readers" and "writers" to grant different roles. - await storage.bucket(bucketName).acl.default.owners.addUser(userEmail); + try { + // Makes the user an owner in the default ACL of the bucket. You can use + // addAllUsers(), addDomain(), addProject(), addGroup(), and + // addAllAuthenticatedUsers() to grant access to different types of entities. + // You can also use "readers" and "writers" to grant different roles. + await storage.bucket(bucketName).acl.default.owners.addUser(userEmail); - console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`); + console.log( + `Added user ${userEmail} as an owner on bucket ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing add bucket default owner ACL:', + error.message || error + ); + } } - addBucketDefaultOwner().catch(console.error); + addBucketDefaultOwner(); // [END storage_add_bucket_default_owner] } main(...process.argv.slice(2)); diff --git a/storage/addBucketOwnerAcl.js b/storage/addBucketOwnerAcl.js index 1600756d9d..b3ee2f0844 100644 --- a/storage/addBucketOwnerAcl.js +++ b/storage/addBucketOwnerAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,16 +40,25 @@ function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { const storage = new Storage(); async function addBucketOwner() { - // Makes the user an owner of the bucket. You can use addAllUsers(), - // addDomain(), addProject(), addGroup(), and addAllAuthenticatedUsers() - // to grant access to different types of entities. You can also use "readers" - // and "writers" to grant different roles. - await storage.bucket(bucketName).acl.owners.addUser(userEmail); + try { + // Makes the user an owner of the bucket. You can use addAllUsers(), + // addDomain(), addProject(), addGroup(), and addAllAuthenticatedUsers() + // to grant access to different types of entities. You can also use "readers" + // and "writers" to grant different roles. + await storage.bucket(bucketName).acl.owners.addUser(userEmail); - console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`); + console.log( + `Added user ${userEmail} as an owner on bucket ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing add bucket owner ACL:', + error.message || error + ); + } } - addBucketOwner().catch(console.error); + addBucketOwner(); // [END storage_add_bucket_owner] } main(...process.argv.slice(2)); diff --git a/storage/addFileOwnerAcl.js b/storage/addFileOwnerAcl.js index 4029f56923..a908a03e9c 100644 --- a/storage/addFileOwnerAcl.js +++ b/storage/addFileOwnerAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -45,15 +47,22 @@ function main( const storage = new Storage(); async function addFileOwner() { - await storage - .bucket(bucketName) - .file(fileName) - .acl.owners.addUser(userEmail); + try { + await storage + .bucket(bucketName) + .file(fileName) + .acl.owners.addUser(userEmail); - console.log(`Added user ${userEmail} as an owner on file ${fileName}.`); + console.log(`Added user ${userEmail} as an owner on file ${fileName}.`); + } catch (error) { + console.error( + 'Error executing add file owner ACL:', + error.message || error + ); + } } - addFileOwner().catch(console.error); + addFileOwner(); // [END storage_add_file_owner] } main(...process.argv.slice(2)); diff --git a/storage/printBucketAcl.js b/storage/printBucketAcl.js index 9937fab68c..c14e682e16 100644 --- a/storage/printBucketAcl.js +++ b/storage/printBucketAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -35,14 +37,21 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function printBucketAcl() { - // Gets the ACL for the bucket - const [acls] = await storage.bucket(bucketName).acl.get(); + try { + // Gets the ACL for the bucket + const [acls] = await storage.bucket(bucketName).acl.get(); - acls.forEach(acl => { - console.log(`${acl.role}: ${acl.entity}`); - }); + acls.forEach(acl => { + console.log(`${acl.role}: ${acl.entity}`); + }); + } catch (error) { + console.error( + 'Error executing print bucket ACL:', + error.message || error + ); + } } - printBucketAcl().catch(console.error); + printBucketAcl(); // [END storage_print_bucket_acl] } diff --git a/storage/printBucketAclForUser.js b/storage/printBucketAclForUser.js index 09a6ed7c43..27420478fd 100644 --- a/storage/printBucketAclForUser.js +++ b/storage/printBucketAclForUser.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,18 +40,25 @@ function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { const storage = new Storage(); async function printBucketAclForUser() { - const options = { - // Specify the user - entity: `user-${userEmail}`, - }; + try { + const options = { + // Specify the user + entity: `user-${userEmail}`, + }; - // Gets the user's ACL for the bucket - const [aclObject] = await storage.bucket(bucketName).acl.get(options); + // Gets the user's ACL for the bucket + const [aclObject] = await storage.bucket(bucketName).acl.get(options); - console.log(`${aclObject.role}: ${aclObject.entity}`); + console.log(`${aclObject.role}: ${aclObject.entity}`); + } catch (error) { + console.error( + 'Error executing print bucket ACL for user:', + error.message || error + ); + } } - printBucketAclForUser().catch(console.error); + printBucketAclForUser(); // [END storage_print_bucket_acl_for_user] } diff --git a/storage/printFileAcl.js b/storage/printFileAcl.js index 8c14ddebfa..0d9ead9b18 100644 --- a/storage/printFileAcl.js +++ b/storage/printFileAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,15 +40,19 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function printFileAcl() { - // Gets the ACL for the file - const [acls] = await storage.bucket(bucketName).file(fileName).acl.get(); - - acls.forEach(acl => { - console.log(`${acl.role}: ${acl.entity}`); - }); + try { + // Gets the ACL for the file + const [acls] = await storage.bucket(bucketName).file(fileName).acl.get(); + + acls.forEach(acl => { + console.log(`${acl.role}: ${acl.entity}`); + }); + } catch (error) { + console.error('Error executing print file ACL:', error.message || error); + } } - printFileAcl().catch(console.error); + printFileAcl(); // [END storage_print_file_acl] } main(...process.argv.slice(2)); diff --git a/storage/printFileAclForUser.js b/storage/printFileAclForUser.js index f614749c73..396cb393ba 100644 --- a/storage/printFileAclForUser.js +++ b/storage/printFileAclForUser.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -45,21 +47,28 @@ function main( const storage = new Storage(); async function printFileAclForUser() { - const options = { - // Specify the user - entity: `user-${userEmail}`, - }; + try { + const options = { + // Specify the user + entity: `user-${userEmail}`, + }; - // Gets the user's ACL for the file - const [aclObject] = await storage - .bucket(bucketName) - .file(fileName) - .acl.get(options); + // Gets the user's ACL for the file + const [aclObject] = await storage + .bucket(bucketName) + .file(fileName) + .acl.get(options); - console.log(`${aclObject.role}: ${aclObject.entity}`); + console.log(`${aclObject.role}: ${aclObject.entity}`); + } catch (error) { + console.error( + 'Error executing print file ACL for user:', + error.message || error + ); + } } - printFileAclForUser().catch(console.error); + printFileAclForUser(); // [END storage_print_file_acl_for_user] } main(...process.argv.slice(2)); diff --git a/storage/removeBucketDefaultOwner.js b/storage/removeBucketDefaultOwner.js index 94fb8a9aa9..51087943ff 100644 --- a/storage/removeBucketDefaultOwner.js +++ b/storage/removeBucketDefaultOwner.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,15 +40,22 @@ function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { const storage = new Storage(); async function removeBucketDefaultOwner() { - // Removes the user from the access control list of the bucket. You can use - // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and - // deleteAllAuthenticatedUsers() to remove access for different types of entities. - await storage.bucket(bucketName).acl.default.owners.deleteUser(userEmail); - - console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + try { + // Removes the user from the access control list of the bucket. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage.bucket(bucketName).acl.default.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing remove bucket default owner ACL:', + error.message || error + ); + } } - removeBucketDefaultOwner().catch(console.error); + removeBucketDefaultOwner(); // [END storage_remove_bucket_default_owner] } diff --git a/storage/removeBucketOwnerAcl.js b/storage/removeBucketOwnerAcl.js index 5f3e8950bf..2ee60765a8 100644 --- a/storage/removeBucketOwnerAcl.js +++ b/storage/removeBucketOwnerAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -38,15 +40,22 @@ function main(bucketName = 'my-bucket', userEmail = 'jdobry@google.com') { const storage = new Storage(); async function removeBucketOwner() { - // Removes the user from the access control list of the bucket. You can use - // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and - // deleteAllAuthenticatedUsers() to remove access for different types of entities. - await storage.bucket(bucketName).acl.owners.deleteUser(userEmail); - - console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + try { + // Removes the user from the access control list of the bucket. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage.bucket(bucketName).acl.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from bucket ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing remove bucket owner ACL:', + error.message || error + ); + } } - removeBucketOwner().catch(console.error); + removeBucketOwner(); // [END storage_remove_bucket_owner] } diff --git a/storage/removeFileOwnerAcl.js b/storage/removeFileOwnerAcl.js index 2a72e1f7e0..ccd2fec08a 100644 --- a/storage/removeFileOwnerAcl.js +++ b/storage/removeFileOwnerAcl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -45,18 +47,25 @@ function main( const storage = new Storage(); async function removeFileOwner() { - // Removes the user from the access control list of the file. You can use - // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and - // deleteAllAuthenticatedUsers() to remove access for different types of entities. - await storage - .bucket(bucketName) - .file(fileName) - .acl.owners.deleteUser(userEmail); - - console.log(`Removed user ${userEmail} from file ${fileName}.`); + try { + // Removes the user from the access control list of the file. You can use + // deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and + // deleteAllAuthenticatedUsers() to remove access for different types of entities. + await storage + .bucket(bucketName) + .file(fileName) + .acl.owners.deleteUser(userEmail); + + console.log(`Removed user ${userEmail} from file ${fileName}.`); + } catch (error) { + console.error( + 'Error executing remove file owner ACL:', + error.message || error + ); + } } - removeFileOwner().catch(console.error); + removeFileOwner(); // [END storage_remove_file_owner] } From af43faa4ff3454ef0eda4d850fcbe15746959872 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 17:58:00 +0000 Subject: [PATCH 04/11] refactor(lifecycle): standardize error handling in bucket lifecycle samples --- storage/disableBucketLifecycleManagement.js | 17 +++++++--- storage/enableBucketLifecycleManagement.js | 35 +++++++++++++-------- 2 files changed, 35 insertions(+), 17 deletions(-) diff --git a/storage/disableBucketLifecycleManagement.js b/storage/disableBucketLifecycleManagement.js index 8ef7077e8b..04569982f9 100644 --- a/storage/disableBucketLifecycleManagement.js +++ b/storage/disableBucketLifecycleManagement.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to disable Object Lifecycle Management for * a bucket. @@ -34,12 +36,19 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableBucketLifecycleManagement() { - await storage.bucket(bucketName).setMetadata({lifecycle: null}); - - console.log(`Lifecycle management is disabled for bucket ${bucketName}`); + try { + await storage.bucket(bucketName).setMetadata({lifecycle: null}); + + console.log(`Lifecycle management is disabled for bucket ${bucketName}`); + } catch (error) { + console.error( + 'Error executing disable bucket lifecycle management:', + error.message || error + ); + } } - disableBucketLifecycleManagement().catch(console.error); + disableBucketLifecycleManagement(); // [END storage_disable_bucket_lifecycle_management] } diff --git a/storage/enableBucketLifecycleManagement.js b/storage/enableBucketLifecycleManagement.js index ce11309860..a3157c93d3 100644 --- a/storage/enableBucketLifecycleManagement.js +++ b/storage/enableBucketLifecycleManagement.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to enable Object Lifecycle Management for * a bucket. @@ -34,21 +36,28 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function enableBucketLifecycleManagement() { - const [metadata] = await storage.bucket(bucketName).addLifecycleRule({ - action: { - type: 'Delete', - }, - condition: {age: 100}, - }); - - console.log( - `Lifecycle management is enabled for bucket ${bucketName} and the rules are:` - ); - - console.log(metadata.lifecycle.rule); + try { + const [metadata] = await storage.bucket(bucketName).addLifecycleRule({ + action: { + type: 'Delete', + }, + condition: {age: 100}, + }); + + console.log( + `Lifecycle management is enabled for bucket ${bucketName} and the rules are:` + ); + + console.log(metadata.lifecycle.rule); + } catch (error) { + console.error( + 'Error executing enable bucket lifecycle management:', + error.message || error + ); + } } - enableBucketLifecycleManagement().catch(console.error); + enableBucketLifecycleManagement(); // [END storage_enable_bucket_lifecycle_management] } From 1439c8126704368bd059da169546fbc12add1603 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 18:50:24 +0000 Subject: [PATCH 05/11] refactor(bucket-lock): standardize error handling and remove dead code in bucket lock samples --- storage/disableDefaultEventBasedHold.js | 21 +++++++++---- storage/enableDefaultEventBasedHold.js | 23 ++++++++++----- storage/getDefaultEventBasedHold.js | 17 +++++++++-- storage/getRetentionPolicy.js | 31 +++++++++++++------- storage/lockRetentionPolicy.js | 39 +++++++++++++++---------- storage/releaseEventBasedHold.js | 37 ++++++++++++++--------- storage/releaseTemporaryHold.js | 37 ++++++++++++++--------- storage/removeRetentionPolicy.js | 27 ++++++++++------- storage/setEventBasedHold.js | 39 +++++++++++++++---------- storage/setRetentionPolicy.js | 23 ++++++++++----- storage/setTemporaryHold.js | 37 ++++++++++++++--------- 11 files changed, 214 insertions(+), 117 deletions(-) diff --git a/storage/disableDefaultEventBasedHold.js b/storage/disableDefaultEventBasedHold.js index 09310facd2..53237fe56a 100644 --- a/storage/disableDefaultEventBasedHold.js +++ b/storage/disableDefaultEventBasedHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -35,14 +37,21 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableDefaultEventBasedHold() { - // Disables a default event-based hold for a bucket. - await storage.bucket(bucketName).setMetadata({ - defaultEventBasedHold: false, - }); - console.log(`Default event-based hold was disabled for ${bucketName}.`); + try { + // Disables a default event-based hold for a bucket. + await storage.bucket(bucketName).setMetadata({ + defaultEventBasedHold: false, + }); + console.log(`Default event-based hold was disabled for ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing disable default event-based hold:', + error.message || error + ); + } } - disableDefaultEventBasedHold().catch(console.error); + disableDefaultEventBasedHold(); // [END storage_disable_default_event_based_hold] } main(...process.argv.slice(2)); diff --git a/storage/enableDefaultEventBasedHold.js b/storage/enableDefaultEventBasedHold.js index 406970fdd9..16efcfcf94 100644 --- a/storage/enableDefaultEventBasedHold.js +++ b/storage/enableDefaultEventBasedHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -36,15 +38,22 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function enableDefaultEventBasedHold() { - // Enables a default event-based hold for the bucket. - await storage.bucket(bucketName).setMetadata({ - defaultEventBasedHold: true, - }); - - console.log(`Default event-based hold was enabled for ${bucketName}.`); + try { + // Enables a default event-based hold for the bucket. + await storage.bucket(bucketName).setMetadata({ + defaultEventBasedHold: true, + }); + + console.log(`Default event-based hold was enabled for ${bucketName}.`); + } catch (error) { + console.error( + 'Error executing enable default event-based hold:', + error.message || error + ); + } } - enableDefaultEventBasedHold().catch(console.error); + enableDefaultEventBasedHold(); // [END storage_enable_default_event_based_hold] } main(...process.argv.slice(2)); diff --git a/storage/getDefaultEventBasedHold.js b/storage/getDefaultEventBasedHold.js index daf9d42252..b9b3837cb5 100644 --- a/storage/getDefaultEventBasedHold.js +++ b/storage/getDefaultEventBasedHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -35,11 +37,20 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function getDefaultEventBasedHold() { - const [metadata] = await storage.bucket(bucketName).getMetadata(); - console.log(`Default event-based hold: ${metadata.defaultEventBasedHold}.`); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + console.log( + `Default event-based hold: ${metadata.defaultEventBasedHold}.` + ); + } catch (error) { + console.error( + 'Error executing get default event-based hold:', + error.message || error + ); + } } - getDefaultEventBasedHold().catch(console.error); + getDefaultEventBasedHold(); // [END storage_get_default_event_based_hold] } main(...process.argv.slice(2)); diff --git a/storage/getRetentionPolicy.js b/storage/getRetentionPolicy.js index 0b74b7266d..6df1fdec3f 100644 --- a/storage/getRetentionPolicy.js +++ b/storage/getRetentionPolicy.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -35,21 +37,28 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function getRetentionPolicy() { - const [metadata] = await storage.bucket(bucketName).getMetadata(); - if (metadata.retentionPolicy) { - const retentionPolicy = metadata.retentionPolicy; - console.log('A retention policy exists!'); - console.log(`Period: ${retentionPolicy.retentionPeriod}`); - console.log(`Effective time: ${retentionPolicy.effectiveTime}`); - if (retentionPolicy.isLocked) { - console.log('Policy is locked'); - } else { - console.log('Policy is unlocked'); + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + if (metadata.retentionPolicy) { + const retentionPolicy = metadata.retentionPolicy; + console.log('A retention policy exists!'); + console.log(`Period: ${retentionPolicy.retentionPeriod}`); + console.log(`Effective time: ${retentionPolicy.effectiveTime}`); + if (retentionPolicy.isLocked) { + console.log('Policy is locked'); + } else { + console.log('Policy is unlocked'); + } } + } catch (error) { + console.error( + 'Error executing get bucket retention policy:', + error.message || error + ); } } - getRetentionPolicy().catch(console.error); + getRetentionPolicy(); // [END storage_get_retention_policy] } main(...process.argv.slice(2)); diff --git a/storage/lockRetentionPolicy.js b/storage/lockRetentionPolicy.js index b259fdf7ca..7323803579 100644 --- a/storage/lockRetentionPolicy.js +++ b/storage/lockRetentionPolicy.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -35,24 +37,29 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function lockRetentionPolicy() { - // Gets the current metageneration value for the bucket, required by - // lock_retention_policy - const [unlockedMetadata] = await storage.bucket(bucketName).getMetadata(); - - // Warning: Once a retention policy is locked, it cannot be unlocked. The - // retention period can only be increased - const [lockedMetadata] = await storage - .bucket(bucketName) - .lock(unlockedMetadata.metageneration); - console.log(`Retention policy for ${bucketName} is now locked`); - console.log( - `Retention policy effective as of ${lockedMetadata.retentionPolicy.effectiveTime}` - ); - - return lockedMetadata; + try { + // Gets the current metageneration value for the bucket, required by + // lock_retention_policy + const [unlockedMetadata] = await storage.bucket(bucketName).getMetadata(); + + // Warning: Once a retention policy is locked, it cannot be unlocked. The + // retention period can only be increased + const [lockedMetadata] = await storage + .bucket(bucketName) + .lock(unlockedMetadata.metageneration); + console.log(`Retention policy for ${bucketName} is now locked`); + console.log( + `Retention policy effective as of ${lockedMetadata.retentionPolicy.effectiveTime}` + ); + } catch (error) { + console.error( + 'Error executing lock bucket retention policy:', + error.message || error + ); + } } - lockRetentionPolicy().catch(console.error); + lockRetentionPolicy(); // [END storage_lock_retention_policy] } main(...process.argv.slice(2)); diff --git a/storage/releaseEventBasedHold.js b/storage/releaseEventBasedHold.js index cbce58d4be..caeb22a2b2 100644 --- a/storage/releaseEventBasedHold.js +++ b/storage/releaseEventBasedHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -42,23 +44,30 @@ function main( const storage = new Storage(); async function releaseEventBasedHold() { - // Optional: set a meta-generation-match precondition to avoid potential race - // conditions and data corruptions. The request to set metadata is aborted if the - // object's metageneration number does not match your precondition. - const options = { - ifMetagenerationMatch: metagenerationMatchPrecondition, - }; + try { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; - await storage.bucket(bucketName).file(fileName).setMetadata( - { - eventBasedHold: false, - }, - options - ); - console.log(`Event-based hold was released for ${fileName}.`); + await storage.bucket(bucketName).file(fileName).setMetadata( + { + eventBasedHold: false, + }, + options + ); + console.log(`Event-based hold was released for ${fileName}.`); + } catch (error) { + console.error( + 'Error executing release event-based hold:', + error.message || error + ); + } } - releaseEventBasedHold().catch(console.error); + releaseEventBasedHold(); // [END storage_release_event_based_hold] } main(...process.argv.slice(2)); diff --git a/storage/releaseTemporaryHold.js b/storage/releaseTemporaryHold.js index 91f6c15b30..585d295d9f 100644 --- a/storage/releaseTemporaryHold.js +++ b/storage/releaseTemporaryHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -42,23 +44,30 @@ function main( const storage = new Storage(); async function releaseTemporaryHold() { - // Optional: set a meta-generation-match precondition to avoid potential race - // conditions and data corruptions. The request to set metadata is aborted if the - // object's metageneration number does not match your precondition. - const options = { - ifMetagenerationMatch: metagenerationMatchPrecondition, - }; + try { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; - await storage.bucket(bucketName).file(fileName).setMetadata( - { - temporaryHold: false, - }, - options - ); - console.log(`Temporary hold was released for ${fileName}.`); + await storage.bucket(bucketName).file(fileName).setMetadata( + { + temporaryHold: false, + }, + options + ); + console.log(`Temporary hold was released for ${fileName}.`); + } catch (error) { + console.error( + 'Error executing release temporary hold:', + error.message || error + ); + } } - releaseTemporaryHold().catch(console.error); + releaseTemporaryHold(); // [END storage_release_temporary_hold] } main(...process.argv.slice(2)); diff --git a/storage/removeRetentionPolicy.js b/storage/removeRetentionPolicy.js index f7ec28c7d0..05938e18a9 100644 --- a/storage/removeRetentionPolicy.js +++ b/storage/removeRetentionPolicy.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -35,20 +37,25 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function removeRetentionPolicy() { - const [metadata] = await storage.bucket(bucketName).getMetadata(); - if (metadata.retentionPolicy && metadata.retentionPolicy.isLocked) { - console.log( - 'Unable to remove retention period as retention policy is locked.' + try { + const [metadata] = await storage.bucket(bucketName).getMetadata(); + if (metadata.retentionPolicy && metadata.retentionPolicy.isLocked) { + console.log( + 'Unable to remove retention period as retention policy is locked.' + ); + } else { + await storage.bucket(bucketName).removeRetentionPeriod(); + console.log(`Removed bucket ${bucketName} retention policy.`); + } + } catch (error) { + console.error( + 'Error executing remove bucket retention policy:', + error.message || error ); - return null; - } else { - const results = await storage.bucket(bucketName).removeRetentionPeriod(); - console.log(`Removed bucket ${bucketName} retention policy.`); - return results; } } - removeRetentionPolicy().catch(console.error); + removeRetentionPolicy(); // [END storage_remove_retention_policy] } main(...process.argv.slice(2)); diff --git a/storage/setEventBasedHold.js b/storage/setEventBasedHold.js index cda14b4c3a..50c73a424b 100644 --- a/storage/setEventBasedHold.js +++ b/storage/setEventBasedHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -42,24 +44,31 @@ function main( const storage = new Storage(); async function setEventBasedHold() { - // Optional: set a meta-generation-match precondition to avoid potential race - // conditions and data corruptions. The request to set metadata is aborted if the - // object's metageneration number does not match your precondition. - const options = { - ifMetagenerationMatch: metagenerationMatchPrecondition, - }; + try { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; - // Set event-based hold - await storage.bucket(bucketName).file(fileName).setMetadata( - { - eventBasedHold: true, - }, - options - ); - console.log(`Event-based hold was set for ${fileName}.`); + // Set event-based hold + await storage.bucket(bucketName).file(fileName).setMetadata( + { + eventBasedHold: true, + }, + options + ); + console.log(`Event-based hold was set for ${fileName}.`); + } catch (error) { + console.error( + 'Error executing set event-based hold:', + error.message || error + ); + } } - setEventBasedHold().catch(console.error); + setEventBasedHold(); // [END storage_set_event_based_hold] } main(...process.argv.slice(2)); diff --git a/storage/setRetentionPolicy.js b/storage/setRetentionPolicy.js index 21d4d22010..cb175a8f25 100644 --- a/storage/setRetentionPolicy.js +++ b/storage/setRetentionPolicy.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -38,15 +40,22 @@ function main(bucketName = 'my-bucket', retentionPeriod = 5) { const storage = new Storage(); async function setRetentionPolicy() { - const [metadata] = await storage - .bucket(bucketName) - .setRetentionPeriod(retentionPeriod); - console.log( - `Bucket ${bucketName} retention period set for ${metadata.retentionPolicy.retentionPeriod} seconds.` - ); + try { + const [metadata] = await storage + .bucket(bucketName) + .setRetentionPeriod(retentionPeriod); + console.log( + `Bucket ${bucketName} retention period set for ${metadata.retentionPolicy.retentionPeriod} seconds.` + ); + } catch (error) { + console.error( + 'Error executing set bucket retention policy:', + error.message || error + ); + } } - setRetentionPolicy().catch(console.error); + setRetentionPolicy(); // [END storage_set_retention_policy] } main(...process.argv.slice(2)); diff --git a/storage/setTemporaryHold.js b/storage/setTemporaryHold.js index db503748a0..573cad7c28 100644 --- a/storage/setTemporaryHold.js +++ b/storage/setTemporaryHold.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to use Bucket Lock operations on buckets * and objects using the Google Cloud Storage API. @@ -42,23 +44,30 @@ function main( const storage = new Storage(); async function setTemporaryHold() { - // Optional: set a meta-generation-match precondition to avoid potential race - // conditions and data corruptions. The request to set metadata is aborted if the - // object's metageneration number does not match your precondition. - const options = { - ifMetagenerationMatch: metagenerationMatchPrecondition, - }; + try { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; - await storage.bucket(bucketName).file(fileName).setMetadata( - { - temporaryHold: true, - }, - options - ); - console.log(`Temporary hold was set for ${fileName}.`); + await storage.bucket(bucketName).file(fileName).setMetadata( + { + temporaryHold: true, + }, + options + ); + console.log(`Temporary hold was set for ${fileName}.`); + } catch (error) { + console.error( + 'Error executing set temporary hold:', + error.message || error + ); + } } - setTemporaryHold().catch(console.error); + setTemporaryHold(); // [END storage_set_temporary_hold] } main(...process.argv.slice(2)); From 969c66b7421562781da6fe43462090505baa2960 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 19:20:18 +0000 Subject: [PATCH 06/11] refactor(encryption): standardize error handling in encryption samples --- storage/changeFileCSEKToCMEK.js | 55 +++++++++++++++++--------------- storage/downloadEncryptedFile.js | 33 ++++++++++++------- storage/generateEncryptionKey.js | 31 +++++++++++------- storage/rotateEncryptionKey.js | 47 ++++++++++++++++----------- storage/uploadEncryptedFile.js | 44 +++++++++++++++---------- 5 files changed, 125 insertions(+), 85 deletions(-) diff --git a/storage/changeFileCSEKToCMEK.js b/storage/changeFileCSEKToCMEK.js index 11c886ae5f..9b9902455d 100644 --- a/storage/changeFileCSEKToCMEK.js +++ b/storage/changeFileCSEKToCMEK.js @@ -50,37 +50,40 @@ function main( const storage = new Storage(); async function changeFileCSEKToCMEK() { - const rotateEncryptionKeyOptions = { - kmsKeyName, - // Optional: set a generation-match precondition to avoid potential race - // conditions and data corruptions. The request to copy is aborted if the - // object's generation number does not match your precondition. - preconditionOpts: { - ifGenerationMatch: generationMatchPrecondition, - }, - }; + try { + const rotateEncryptionKeyOptions = { + kmsKeyName, + // Optional: set a generation-match precondition to avoid potential race + // conditions and data corruptions. The request to copy is aborted if the + // object's generation number does not match your precondition. + preconditionOpts: { + ifGenerationMatch: generationMatchPrecondition, + }, + }; - console.log(rotateEncryptionKeyOptions); + console.log(rotateEncryptionKeyOptions); - await storage - .bucket(bucketName) - .file(fileName, { - encryptionKey: Buffer.from(encryptionKey, 'base64'), - }) - .rotateEncryptionKey({ - rotateEncryptionKeyOptions, - }); + await storage + .bucket(bucketName) + .file(fileName, { + encryptionKey: Buffer.from(encryptionKey, 'base64'), + }) + .rotateEncryptionKey({ + rotateEncryptionKeyOptions, + }); - console.log( - `file ${fileName} in bucket ${bucketName} is now managed by KMS key ${kmsKeyName} instead of customer-supplied encryption key` - ); + console.log( + `file ${fileName} in bucket ${bucketName} is now managed by KMS key ${kmsKeyName} instead of customer-supplied encryption key` + ); + } catch (error) { + console.error( + 'Error executing change file CSEK to CMEK:', + error.message || error + ); + } } - changeFileCSEKToCMEK().catch(console.error); + changeFileCSEKToCMEK(); // [END storage_object_csek_to_cmek] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/downloadEncryptedFile.js b/storage/downloadEncryptedFile.js index 76ffa1b969..ebf3176dff 100644 --- a/storage/downloadEncryptedFile.js +++ b/storage/downloadEncryptedFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on encrypted * files with the Google Cloud Storage API. @@ -52,22 +54,29 @@ function main( const storage = new Storage(); async function downloadEncryptedFile() { - const options = { - destination: destFileName, - }; + try { + const options = { + destination: destFileName, + }; - // Decrypts and downloads the file. This can only be done with the key used - // to encrypt and upload the file. - await storage - .bucket(bucketName) - .file(srcFileName) - .setEncryptionKey(Buffer.from(encryptionKey, 'base64')) - .download(options); + // Decrypts and downloads the file. This can only be done with the key used + // to encrypt and upload the file. + await storage + .bucket(bucketName) + .file(srcFileName) + .setEncryptionKey(Buffer.from(encryptionKey, 'base64')) + .download(options); - console.log(`File ${srcFileName} downloaded to ${destFileName}`); + console.log(`File ${srcFileName} downloaded to ${destFileName}`); + } catch (error) { + console.error( + 'Error executing download encrypted file:', + error.message || error + ); + } } - downloadEncryptedFile().catch(console.error); + downloadEncryptedFile(); // [END storage_download_encrypted_file] } main(...process.argv.slice(2)); diff --git a/storage/generateEncryptionKey.js b/storage/generateEncryptionKey.js index 2e3c331f65..5b41d905b8 100644 --- a/storage/generateEncryptionKey.js +++ b/storage/generateEncryptionKey.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on encrypted * files with the Google Cloud Storage API. @@ -25,17 +27,24 @@ function main() { const crypto = require('crypto'); function generateEncryptionKey() { - /** - * Generates a 256 bit (32 byte) AES encryption key and prints the base64 - * representation. - * - * This is included for demonstration purposes. You should generate your own - * key. Please remember that encryption keys should be handled with a - * comprehensive security policy. - */ - const buffer = crypto.randomBytes(32); - const encodedKey = buffer.toString('base64'); - console.log(`Base 64 encoded encryption key: ${encodedKey}`); + try { + /** + * Generates a 256 bit (32 byte) AES encryption key and prints the base64 + * representation. + * + * This is included for demonstration purposes. You should generate your own + * key. Please remember that encryption keys should be handled with a + * comprehensive security policy. + */ + const buffer = crypto.randomBytes(32); + const encodedKey = buffer.toString('base64'); + console.log(`Base 64 encoded encryption key: ${encodedKey}`); + } catch (error) { + console.error( + 'Error executing generate encryption key:', + error.message || error + ); + } } generateEncryptionKey(); // [END storage_generate_encryption_key] diff --git a/storage/rotateEncryptionKey.js b/storage/rotateEncryptionKey.js index 9bcbd45011..7de0e8e292 100644 --- a/storage/rotateEncryptionKey.js +++ b/storage/rotateEncryptionKey.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on encrypted * files with the Google Cloud Storage API. @@ -54,29 +56,36 @@ function main( const storage = new Storage(); async function rotateEncryptionKey() { - const rotateEncryptionKeyOptions = { - encryptionKey: Buffer.from(newKey, 'base64'), + try { + const rotateEncryptionKeyOptions = { + encryptionKey: Buffer.from(newKey, 'base64'), - // Optional: set a generation-match precondition to avoid potential race - // conditions and data corruptions. The request to copy is aborted if the - // object's generation number does not match your precondition. - preconditionOpts: { - ifGenerationMatch: generationMatchPrecondition, - }, - }; - await storage - .bucket(bucketName) - .file(fileName, { - encryptionKey: Buffer.from(oldKey, 'base64'), - }) - .rotateEncryptionKey({ - rotateEncryptionKeyOptions, - }); + // Optional: set a generation-match precondition to avoid potential race + // conditions and data corruptions. The request to copy is aborted if the + // object's generation number does not match your precondition. + preconditionOpts: { + ifGenerationMatch: generationMatchPrecondition, + }, + }; + await storage + .bucket(bucketName) + .file(fileName, { + encryptionKey: Buffer.from(oldKey, 'base64'), + }) + .rotateEncryptionKey({ + rotateEncryptionKeyOptions, + }); - console.log('Encryption key rotated successfully'); + console.log('Encryption key rotated successfully'); + } catch (error) { + console.error( + 'Error executing rotate encryption key:', + error.message || error + ); + } } - rotateEncryptionKey().catch(console.error); + rotateEncryptionKey(); // [END storage_rotate_encryption_key] } main(...process.argv.slice(2)); diff --git a/storage/uploadEncryptedFile.js b/storage/uploadEncryptedFile.js index 8dd156cc02..4f26d889c7 100644 --- a/storage/uploadEncryptedFile.js +++ b/storage/uploadEncryptedFile.js @@ -11,6 +11,9 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + +'use strict'; + const path = require('path'); function main( @@ -43,28 +46,35 @@ function main( const storage = new Storage(); async function uploadEncryptedFile() { - const options = { - destination: destFileName, - encryptionKey: Buffer.from(key, 'base64'), + try { + const options = { + destination: destFileName, + encryptionKey: Buffer.from(key, 'base64'), - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to upload is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, - }; + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; - await storage.bucket(bucketName).upload(filePath, options); + await storage.bucket(bucketName).upload(filePath, options); - console.log( - `File ${filePath} uploaded to gs://${bucketName}/${destFileName}` - ); + console.log( + `File ${filePath} uploaded to gs://${bucketName}/${destFileName}` + ); + } catch (error) { + console.error( + 'Error executing upload encrypted file:', + error.message || error + ); + } } - uploadEncryptedFile().catch(console.error); + uploadEncryptedFile(); // [END storage_upload_encrypted_file] } main(...process.argv.slice(2)); From d3694dc0ad43728130c21133a16367ac70d52d9b Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 21:24:18 +0000 Subject: [PATCH 07/11] refactor(files): standardize error handling and remove dead code in file and versioning samples --- storage/composeFile.js | 42 ++++----- storage/configureRetries.js | 17 ++-- storage/copyFile.js | 50 ++++++----- storage/copyOldVersionOfFile.js | 53 ++++++----- storage/deleteFile.js | 12 ++- storage/deleteOldVersionOfFile.js | 31 ++++--- storage/downloadByteRange.js | 34 ++++--- storage/downloadFile.js | 22 +++-- storage/downloadIntoMemory.js | 29 +++--- storage/downloadPublicFile.js | 25 ++++-- storage/fileChangeStorageClass.js | 44 +++++----- storage/fileSetMetadata.js | 73 ++++++++------- storage/generateSignedUrl.js | 39 ++++---- storage/generateV4ReadSignedUrl.js | 41 +++++---- storage/generateV4SignedPolicy.js | 55 +++++++----- storage/generateV4UploadSignedUrl.js | 51 ++++++----- storage/getMetadata.js | 88 ++++++++++--------- storage/listFiles.js | 20 +++-- storage/listFilesByPrefix.js | 73 ++++++++------- storage/listFilesPaginate.js | 40 ++++++--- storage/listFilesWithOldVersions.js | 25 ++++-- storage/listSoftDeletedObjectVersions.js | 35 +++++--- storage/listSoftDeletedObjects.js | 31 ++++--- storage/makePublic.js | 12 ++- storage/moveFile.js | 48 +++++----- storage/moveFileAtomic.js | 51 ++++++----- storage/renameFile.js | 21 +++-- storage/restoreSoftDeletedObject.js | 27 ++++-- storage/setObjectRetentionPolicy.js | 78 ++++++++-------- storage/streamFileDownload.js | 39 ++++---- storage/streamFileUpload.js | 17 +++- storage/uploadDirectory.js | 45 ++++++---- storage/uploadFile.js | 36 ++++---- storage/uploadFileWithKmsKey.js | 35 +++++--- storage/uploadFromMemory.js | 26 +++--- storage/uploadWithoutAuthentication.js | 58 ++++++------ .../uploadWithoutAuthenticationSignedUrl.js | 72 ++++++++------- 37 files changed, 879 insertions(+), 616 deletions(-) diff --git a/storage/composeFile.js b/storage/composeFile.js index 183385e951..817223dd15 100644 --- a/storage/composeFile.js +++ b/storage/composeFile.js @@ -49,31 +49,31 @@ function main( const storage = new Storage(); async function composeFile() { - const bucket = storage.bucket(bucketName); - const sources = [firstFileName, secondFileName]; + try { + const bucket = storage.bucket(bucketName); + const sources = [firstFileName, secondFileName]; - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to compose is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const combineOptions = { - ifGenerationMatch: destinationGenerationMatchPrecondition, - }; - await bucket.combine(sources, destinationFileName, combineOptions); + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to compose is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const combineOptions = { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }; + await bucket.combine(sources, destinationFileName, combineOptions); - console.log( - `New composite file ${destinationFileName} was created by combining ${firstFileName} and ${secondFileName}` - ); + console.log( + `New composite file ${destinationFileName} was created by combining ${firstFileName} and ${secondFileName}` + ); + } catch (error) { + console.error('Error executing compose file:', error.message || error); + } } - composeFile().catch(console.error); + composeFile(); // [END storage_compose_file] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/configureRetries.js b/storage/configureRetries.js index 201fda22a0..245a14b9ad 100644 --- a/storage/configureRetries.js +++ b/storage/configureRetries.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + const {IdempotencyStrategy} = require('@google-cloud/storage'); /** @@ -76,15 +78,18 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { ); async function deleteFileWithCustomizedRetrySetting() { - await storage.bucket(bucketName).file(fileName).delete(); - console.log(`File ${fileName} deleted with a customized retry strategy.`); + try { + await storage.bucket(bucketName).file(fileName).delete(); + console.log(`File ${fileName} deleted with a customized retry strategy.`); + } catch (error) { + console.error( + 'Error executing delete file with customized retry setting:', + error.message || error + ); + } } deleteFileWithCustomizedRetrySetting(); // [END storage_configure_retries] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/copyFile.js b/storage/copyFile.js index bc41947d8a..e81b914c2f 100644 --- a/storage/copyFile.js +++ b/storage/copyFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -50,33 +52,37 @@ function main( const storage = new Storage(); async function copyFile() { - const copyDestination = storage.bucket(destBucketName).file(destFileName); + try { + const copyDestination = storage.bucket(destBucketName).file(destFileName); - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to copy is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const copyOptions = { - preconditionOpts: { - ifGenerationMatch: destinationGenerationMatchPrecondition, - }, - }; + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const copyOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; - // Copies the file to the other bucket - await storage - .bucket(srcBucketName) - .file(srcFilename) - .copy(copyDestination, copyOptions); + // Copies the file to the other bucket + await storage + .bucket(srcBucketName) + .file(srcFilename) + .copy(copyDestination, copyOptions); - console.log( - `gs://${srcBucketName}/${srcFilename} copied to gs://${destBucketName}/${destFileName}` - ); + console.log( + `gs://${srcBucketName}/${srcFilename} copied to gs://${destBucketName}/${destFileName}` + ); + } catch (error) { + console.error('Error executing copy file:', error.message || error); + } } - copyFile().catch(console.error); + copyFile(); // [END storage_copy_file] } main(...process.argv.slice(2)); diff --git a/storage/copyOldVersionOfFile.js b/storage/copyOldVersionOfFile.js index 780fe0f1a9..9353873d62 100644 --- a/storage/copyOldVersionOfFile.js +++ b/storage/copyOldVersionOfFile.js @@ -53,34 +53,41 @@ function main( const storage = new Storage(); async function copyOldVersionOfFile() { - // Copies the file to the other bucket + try { + // Copies the file to the other bucket - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to copy is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const copyOptions = { - preconditionOpts: { - ifGenerationMatch: destinationGenerationMatchPrecondition, - }, - }; + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const copyOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; - await storage - .bucket(srcBucketName) - .file(srcFilename, { - generation, - }) - .copy(storage.bucket(destBucketName).file(destFileName), copyOptions); + await storage + .bucket(srcBucketName) + .file(srcFilename, { + generation, + }) + .copy(storage.bucket(destBucketName).file(destFileName), copyOptions); - console.log( - `Generation ${generation} of file ${srcFilename} in bucket ${srcBucketName} was copied to ${destFileName} in bucket ${destBucketName}` - ); + console.log( + `Generation ${generation} of file ${srcFilename} in bucket ${srcBucketName} was copied to ${destFileName} in bucket ${destBucketName}` + ); + } catch (error) { + console.error( + 'Error executing copy old version of file:', + error.message || error + ); + } } - copyOldVersionOfFile().catch(console.error); + copyOldVersionOfFile(); // [END storage_copy_file_archived_generation] } main(...process.argv.slice(2)); diff --git a/storage/deleteFile.js b/storage/deleteFile.js index 4747d3a322..9e9c702cb2 100644 --- a/storage/deleteFile.js +++ b/storage/deleteFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -52,12 +54,16 @@ function main( ifGenerationMatch: generationMatchPrecondition, }; async function deleteFile() { - await storage.bucket(bucketName).file(fileName).delete(deleteOptions); + try { + await storage.bucket(bucketName).file(fileName).delete(deleteOptions); - console.log(`gs://${bucketName}/${fileName} deleted`); + console.log(`gs://${bucketName}/${fileName} deleted`); + } catch (error) { + console.error('Error executing delete file:', error.message || error); + } } - deleteFile().catch(console.error); + deleteFile(); // [END storage_delete_file] } main(...process.argv.slice(2)); diff --git a/storage/deleteOldVersionOfFile.js b/storage/deleteOldVersionOfFile.js index 39434ecd14..45a430c6b6 100644 --- a/storage/deleteOldVersionOfFile.js +++ b/storage/deleteOldVersionOfFile.js @@ -40,20 +40,27 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt', generation = 1) { const storage = new Storage(); async function deleteOldVersionOfFile() { - // Deletes the file from the bucket with given version - await storage - .bucket(bucketName) - .file(fileName, { - generation, - }) - .delete(); - - console.log( - `Generation ${generation} of file ${fileName} was deleted from ${bucketName}` - ); + try { + // Deletes the file from the bucket with given version + await storage + .bucket(bucketName) + .file(fileName, { + generation, + }) + .delete(); + + console.log( + `Generation ${generation} of file ${fileName} was deleted from ${bucketName}` + ); + } catch (error) { + console.error( + 'Error executing delete old version of file:', + error.message || error + ); + } } - deleteOldVersionOfFile().catch(console.error); + deleteOldVersionOfFile(); // [END storage_delete_file_archived_generation] } main(...process.argv.slice(2)); diff --git a/storage/downloadByteRange.js b/storage/downloadByteRange.js index bace089b39..1bb286f970 100644 --- a/storage/downloadByteRange.js +++ b/storage/downloadByteRange.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -57,25 +59,29 @@ function main( const storage = new Storage(); async function downloadByteRange() { - const options = { - destination: destFileName, - start: startByte, - end: endByte, - }; + try { + const options = { + destination: destFileName, + start: startByte, + end: endByte, + }; - // Downloads the file from the starting byte to the ending byte specified in options - await storage.bucket(bucketName).file(fileName).download(options); + // Downloads the file from the starting byte to the ending byte specified in options + await storage.bucket(bucketName).file(fileName).download(options); - console.log( - `gs://${bucketName}/${fileName} downloaded to ${destFileName} from byte ${startByte} to byte ${endByte}.` - ); + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName} from byte ${startByte} to byte ${endByte}.` + ); + } catch (error) { + console.error( + 'Error executing download byte range:', + error.message || error + ); + } } downloadByteRange(); // [END storage_download_byte_range] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2)); diff --git a/storage/downloadFile.js b/storage/downloadFile.js index 3ea9d8f314..1b9ccf3026 100644 --- a/storage/downloadFile.js +++ b/storage/downloadFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -47,16 +49,20 @@ function main( const storage = new Storage(); async function downloadFile() { - const options = { - destination: destFileName, - }; + try { + const options = { + destination: destFileName, + }; - // Downloads the file - await storage.bucket(bucketName).file(fileName).download(options); + // Downloads the file + await storage.bucket(bucketName).file(fileName).download(options); - console.log( - `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` - ); + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); + } catch (error) { + console.error('Error executing download file:', error.message || error); + } } downloadFile().catch(console.error); diff --git a/storage/downloadIntoMemory.js b/storage/downloadIntoMemory.js index c80fd9a863..1f69fc161a 100644 --- a/storage/downloadIntoMemory.js +++ b/storage/downloadIntoMemory.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,19 +40,26 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function downloadIntoMemory() { - // Downloads the file into a buffer in memory. - const contents = await storage.bucket(bucketName).file(fileName).download(); + try { + // Downloads the file into a buffer in memory. + const contents = await storage + .bucket(bucketName) + .file(fileName) + .download(); - console.log( - `Contents of gs://${bucketName}/${fileName} are ${contents.toString()}.` - ); + console.log( + `Contents of gs://${bucketName}/${fileName} are ${contents.toString()}.` + ); + } catch (error) { + console.error( + 'Error executing file download into memory:', + error.message || error + ); + } } - downloadIntoMemory().catch(console.error); + downloadIntoMemory(); // [END storage_file_download_into_memory] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2)); diff --git a/storage/downloadPublicFile.js b/storage/downloadPublicFile.js index 426b9a7fbd..ba80658ed2 100644 --- a/storage/downloadPublicFile.js +++ b/storage/downloadPublicFile.js @@ -47,19 +47,26 @@ function main( const storage = new Storage(); async function downloadPublicFile() { - const options = { - destination: destFileName, - }; + try { + const options = { + destination: destFileName, + }; - // Download public file. - await storage.bucket(bucketName).file(srcFileName).download(options); + // Download public file. + await storage.bucket(bucketName).file(srcFileName).download(options); - console.log( - `Downloaded public file ${srcFileName} from bucket name ${bucketName} to ${destFileName}` - ); + console.log( + `Downloaded public file ${srcFileName} from bucket name ${bucketName} to ${destFileName}` + ); + } catch (error) { + console.error( + 'Error executing download public file:', + error.message || error + ); + } } - downloadPublicFile().catch(console.error); + downloadPublicFile(); // [END storage_download_public_file] } main(...process.argv.slice(2)); diff --git a/storage/fileChangeStorageClass.js b/storage/fileChangeStorageClass.js index 4677c2a900..2aad38dac5 100644 --- a/storage/fileChangeStorageClass.js +++ b/storage/fileChangeStorageClass.js @@ -47,31 +47,33 @@ function main( // const storageClass = 'coldline'; async function fileChangeStorageClass() { - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to copy is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const setStorageClassOptions = { - ifGenerationMatch: generationMatchPrecondition, - }; + try { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const setStorageClassOptions = { + ifGenerationMatch: generationMatchPrecondition, + }; - await storage - .bucket(bucketName) - .file(fileName) - .setStorageClass(storageClass, setStorageClassOptions); + await storage + .bucket(bucketName) + .file(fileName) + .setStorageClass(storageClass, setStorageClassOptions); - console.log(`${fileName} has been set to ${storageClass}`); + console.log(`${fileName} has been set to ${storageClass}`); + } catch (error) { + console.error( + 'Error executing file change storage class:', + error.message || error + ); + } } - fileChangeStorageClass().catch(console.error); + fileChangeStorageClass(); // [END storage_change_file_storage_class] } - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/fileSetMetadata.js b/storage/fileSetMetadata.js index d7f0adf13f..4e8111abbb 100644 --- a/storage/fileSetMetadata.js +++ b/storage/fileSetMetadata.js @@ -41,45 +41,52 @@ function main( // const fileName = 'your-file-name'; async function setFileMetadata() { - // Optional: set a meta-generation-match precondition to avoid potential race - // conditions and data corruptions. The request to set metadata is aborted if the - // object's metageneration number does not match your precondition. - const options = { - ifMetagenerationMatch: metagenerationMatchPrecondition, - }; + try { + // Optional: set a meta-generation-match precondition to avoid potential race + // conditions and data corruptions. The request to set metadata is aborted if the + // object's metageneration number does not match your precondition. + const options = { + ifMetagenerationMatch: metagenerationMatchPrecondition, + }; - // Set file metadata. - const [metadata] = await storage - .bucket(bucketName) - .file(fileName) - .setMetadata( - { - // Predefined metadata for server e.g. 'cacheControl', 'contentDisposition', - // 'contentEncoding', 'contentLanguage', 'contentType' - contentDisposition: - 'attachment; filename*=utf-8\'\'"anotherImage.jpg"', - contentType: 'image/jpeg', + // Set file metadata. + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .setMetadata( + { + // Predefined metadata for server e.g. 'cacheControl', 'contentDisposition', + // 'contentEncoding', 'contentLanguage', 'contentType' + contentDisposition: + 'attachment; filename*=utf-8\'\'"anotherImage.jpg"', + contentType: 'image/jpeg', - // A note or actionable items for user e.g. uniqueId, object description, - // or other useful information. - metadata: { - description: 'file description...', - modified: '1900-01-01', + // A note or actionable items for user e.g. uniqueId, object description, + // or other useful information. + metadata: { + description: 'file description...', + modified: '1900-01-01', + }, }, - }, - options - ); + options + ); - console.log( - 'Updated metadata for object', - fileName, - 'in bucket ', - bucketName - ); - console.log(metadata); + console.log( + 'Updated metadata for object', + fileName, + 'in bucket ', + bucketName + ); + console.log(metadata); + } catch (error) { + console.error( + 'Error executing set file metadata:', + error.message || error + ); + } } - setFileMetadata().catch(console.error); + setFileMetadata(); // [END storage_set_metadata] } diff --git a/storage/generateSignedUrl.js b/storage/generateSignedUrl.js index 3455d51bb3..68395acf44 100644 --- a/storage/generateSignedUrl.js +++ b/storage/generateSignedUrl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,23 +40,30 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function generateSignedUrl() { - // These options will allow temporary read access to the file - const options = { - version: 'v2', // defaults to 'v2' if missing. - action: 'read', - expires: Date.now() + 1000 * 60 * 60, // one hour - }; - - // Get a v2 signed URL for the file - const [url] = await storage - .bucket(bucketName) - .file(fileName) - .getSignedUrl(options); - - console.log(`The signed url for ${fileName} is ${url}.`); + try { + // These options will allow temporary read access to the file + const options = { + version: 'v2', // defaults to 'v2' if missing. + action: 'read', + expires: Date.now() + 1000 * 60 * 60, // one hour + }; + + // Get a v2 signed URL for the file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); + + console.log(`The signed url for ${fileName} is ${url}.`); + } catch (error) { + console.error( + 'Error executing generate signed url:', + error.message || error + ); + } } - generateSignedUrl().catch(console.error); + generateSignedUrl(); // [END storage_generate_signed_url] } main(...process.argv.slice(2)); diff --git a/storage/generateV4ReadSignedUrl.js b/storage/generateV4ReadSignedUrl.js index de55d6901b..81b6f059b7 100644 --- a/storage/generateV4ReadSignedUrl.js +++ b/storage/generateV4ReadSignedUrl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -40,26 +42,33 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function generateV4ReadSignedUrl() { - // These options will allow temporary read access to the file - const options = { - version: 'v4', - action: 'read', - expires: Date.now() + 15 * 60 * 1000, // 15 minutes - }; + try { + // These options will allow temporary read access to the file + const options = { + version: 'v4', + action: 'read', + expires: Date.now() + 15 * 60 * 1000, // 15 minutes + }; - // Get a v4 signed URL for reading the file - const [url] = await storage - .bucket(bucketName) - .file(fileName) - .getSignedUrl(options); + // Get a v4 signed URL for reading the file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); - console.log('Generated GET signed URL:'); - console.log(url); - console.log('You can use this URL with any user agent, for example:'); - console.log(`curl '${url}'`); + console.log('Generated GET signed URL:'); + console.log(url); + console.log('You can use this URL with any user agent, for example:'); + console.log(`curl '${url}'`); + } catch (error) { + console.error( + 'Error executing generate v4 read signed url:', + error.message || error + ); + } } - generateV4ReadSignedUrl().catch(console.error); + generateV4ReadSignedUrl(); // [END storage_generate_signed_url_v4] } main(...process.argv.slice(2)); diff --git a/storage/generateV4SignedPolicy.js b/storage/generateV4SignedPolicy.js index f52a5d4aab..a222d02cc8 100644 --- a/storage/generateV4SignedPolicy.js +++ b/storage/generateV4SignedPolicy.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,35 +40,42 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function generateV4SignedPolicy() { - const bucket = storage.bucket(bucketName); - const file = bucket.file(fileName); + try { + const bucket = storage.bucket(bucketName); + const file = bucket.file(fileName); - // These options will allow temporary uploading of a file - // through an HTML form. - const expires = Date.now() + 10 * 60 * 1000; // 10 minutes - const options = { - expires, - fields: {'x-goog-meta-test': 'data'}, - }; + // These options will allow temporary uploading of a file + // through an HTML form. + const expires = Date.now() + 10 * 60 * 1000; // 10 minutes + const options = { + expires, + fields: {'x-goog-meta-test': 'data'}, + }; - // Get a v4 signed policy for uploading file - const [response] = await file.generateSignedPostPolicyV4(options); + // Get a v4 signed policy for uploading file + const [response] = await file.generateSignedPostPolicyV4(options); - // Create an HTML form with the provided policy - let output = `\n`; - // Include all fields returned in the HTML form as they're required - for (const name of Object.keys(response.fields)) { - const value = response.fields[name]; - output += ` \n`; - } - output += '
\n'; - output += '
\n'; - output += ''; + // Create an HTML form with the provided policy + let output = `
\n`; + // Include all fields returned in the HTML form as they're required + for (const name of Object.keys(response.fields)) { + const value = response.fields[name]; + output += ` \n`; + } + output += '
\n'; + output += '
\n'; + output += '
'; - console.log(output); + console.log(output); + } catch (error) { + console.error( + 'Error executing generate v4 signed policy:', + error.message || error + ); + } } - generateV4SignedPolicy().catch(console.error); + generateV4SignedPolicy(); // [END storage_generate_signed_post_policy_v4] } main(...process.argv.slice(2)); diff --git a/storage/generateV4UploadSignedUrl.js b/storage/generateV4UploadSignedUrl.js index 4b2b4c0219..f6d8280170 100644 --- a/storage/generateV4UploadSignedUrl.js +++ b/storage/generateV4UploadSignedUrl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,31 +40,38 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function generateV4UploadSignedUrl() { - // These options will allow temporary uploading of the file with outgoing - // Content-Type: application/octet-stream header. - const options = { - version: 'v4', - action: 'write', - expires: Date.now() + 15 * 60 * 1000, // 15 minutes - contentType: 'application/octet-stream', - }; + try { + // These options will allow temporary uploading of the file with outgoing + // Content-Type: application/octet-stream header. + const options = { + version: 'v4', + action: 'write', + expires: Date.now() + 15 * 60 * 1000, // 15 minutes + contentType: 'application/octet-stream', + }; - // Get a v4 signed URL for uploading file - const [url] = await storage - .bucket(bucketName) - .file(fileName) - .getSignedUrl(options); + // Get a v4 signed URL for uploading file + const [url] = await storage + .bucket(bucketName) + .file(fileName) + .getSignedUrl(options); - console.log('Generated PUT signed URL:'); - console.log(url); - console.log('You can use this URL with any user agent, for example:'); - console.log( - "curl -X PUT -H 'Content-Type: application/octet-stream' " + - `--upload-file my-file '${url}'` - ); + console.log('Generated PUT signed URL:'); + console.log(url); + console.log('You can use this URL with any user agent, for example:'); + console.log( + "curl -X PUT -H 'Content-Type: application/octet-stream' " + + `--upload-file my-file '${url}'` + ); + } catch (error) { + console.error( + 'Error executing generate v4 upload signed url:', + error.message || error + ); + } } - generateV4UploadSignedUrl().catch(console.error); + generateV4UploadSignedUrl(); // [END storage_generate_upload_signed_url_v4] } main(...process.argv.slice(2)); diff --git a/storage/getMetadata.js b/storage/getMetadata.js index ccaa6b8e3b..8959ff1a43 100644 --- a/storage/getMetadata.js +++ b/storage/getMetadata.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,54 +40,58 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function getMetadata() { - // Gets the metadata for the file - const [metadata] = await storage - .bucket(bucketName) - .file(fileName) - .getMetadata(); + try { + // Gets the metadata for the file + const [metadata] = await storage + .bucket(bucketName) + .file(fileName) + .getMetadata(); - console.log(`Bucket: ${metadata.bucket}`); - console.log(`CacheControl: ${metadata.cacheControl}`); - console.log(`ComponentCount: ${metadata.componentCount}`); - console.log(`ContentDisposition: ${metadata.contentDisposition}`); - console.log(`ContentEncoding: ${metadata.contentEncoding}`); - console.log(`ContentLanguage: ${metadata.contentLanguage}`); - console.log(`ContentType: ${metadata.contentType}`); - console.log(`CustomTime: ${metadata.customTime}`); - console.log(`Crc32c: ${metadata.crc32c}`); - console.log(`ETag: ${metadata.etag}`); - console.log(`Generation: ${metadata.generation}`); - console.log(`Id: ${metadata.id}`); - console.log(`KmsKeyName: ${metadata.kmsKeyName}`); - console.log(`Md5Hash: ${metadata.md5Hash}`); - console.log(`MediaLink: ${metadata.mediaLink}`); - console.log(`Metageneration: ${metadata.metageneration}`); - console.log(`Name: ${metadata.name}`); - console.log(`Size: ${metadata.size}`); - console.log(`StorageClass: ${metadata.storageClass}`); - console.log(`TimeCreated: ${new Date(metadata.timeCreated)}`); - console.log(`Last Metadata Update: ${new Date(metadata.updated)}`); - console.log(`TurboReplication: ${metadata.rpo}`); - console.log( - `temporaryHold: ${metadata.temporaryHold ? 'enabled' : 'disabled'}` - ); - console.log( - `eventBasedHold: ${metadata.eventBasedHold ? 'enabled' : 'disabled'}` - ); - if (metadata.retentionExpirationTime) { + console.log(`Bucket: ${metadata.bucket}`); + console.log(`CacheControl: ${metadata.cacheControl}`); + console.log(`ComponentCount: ${metadata.componentCount}`); + console.log(`ContentDisposition: ${metadata.contentDisposition}`); + console.log(`ContentEncoding: ${metadata.contentEncoding}`); + console.log(`ContentLanguage: ${metadata.contentLanguage}`); + console.log(`ContentType: ${metadata.contentType}`); + console.log(`CustomTime: ${metadata.customTime}`); + console.log(`Crc32c: ${metadata.crc32c}`); + console.log(`ETag: ${metadata.etag}`); + console.log(`Generation: ${metadata.generation}`); + console.log(`Id: ${metadata.id}`); + console.log(`KmsKeyName: ${metadata.kmsKeyName}`); + console.log(`Md5Hash: ${metadata.md5Hash}`); + console.log(`MediaLink: ${metadata.mediaLink}`); + console.log(`Metageneration: ${metadata.metageneration}`); + console.log(`Name: ${metadata.name}`); + console.log(`Size: ${metadata.size}`); + console.log(`StorageClass: ${metadata.storageClass}`); + console.log(`TimeCreated: ${new Date(metadata.timeCreated)}`); + console.log(`Last Metadata Update: ${new Date(metadata.updated)}`); + console.log(`TurboReplication: ${metadata.rpo}`); console.log( - `retentionExpirationTime: ${new Date(metadata.retentionExpirationTime)}` + `temporaryHold: ${metadata.temporaryHold ? 'enabled' : 'disabled'}` ); - } - if (metadata.metadata) { - console.log('\n\n\nUser metadata:'); - for (const key in metadata.metadata) { - console.log(`${key}=${metadata.metadata[key]}`); + console.log( + `eventBasedHold: ${metadata.eventBasedHold ? 'enabled' : 'disabled'}` + ); + if (metadata.retentionExpirationTime) { + console.log( + `retentionExpirationTime: ${new Date(metadata.retentionExpirationTime)}` + ); + } + if (metadata.metadata) { + console.log('\n\n\nUser metadata:'); + for (const key in metadata.metadata) { + console.log(`${key}=${metadata.metadata[key]}`); + } } + } catch (error) { + console.error('Error executing get metadata:', error.message || error); } } - getMetadata().catch(console.error); + getMetadata(); // [END storage_get_metadata] } main(...process.argv.slice(2)); diff --git a/storage/listFiles.js b/storage/listFiles.js index 79cdd2c8b6..8291cb1212 100644 --- a/storage/listFiles.js +++ b/storage/listFiles.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -35,16 +37,20 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function listFiles() { - // Lists files in the bucket - const [files] = await storage.bucket(bucketName).getFiles(); + try { + // Lists files in the bucket + const [files] = await storage.bucket(bucketName).getFiles(); - console.log('Files:'); - files.forEach(file => { - console.log(file.name); - }); + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } catch (error) { + console.error('Error executing list files:', error.message || error); + } } - listFiles().catch(console.error); + listFiles(); // [END storage_list_files] } main(...process.argv.slice(2)); diff --git a/storage/listFilesByPrefix.js b/storage/listFilesByPrefix.js index 04fc988ae4..b79bd93d67 100644 --- a/storage/listFilesByPrefix.js +++ b/storage/listFilesByPrefix.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -41,43 +43,50 @@ function main(bucketName = 'my-bucket', prefix = 'test', delimiter = '/') { const storage = new Storage(); async function listFilesByPrefix() { - /** - * This can be used to list all blobs in a "folder", e.g. "public/". - * - * The delimiter argument can be used to restrict the results to only the - * "files" in the given "folder". Without the delimiter, the entire tree under - * the prefix is returned. For example, given these blobs: - * - * /a/1.txt - * /a/b/2.txt - * - * If you just specify prefix = 'a/', you'll get back: - * - * /a/1.txt - * /a/b/2.txt - * - * However, if you specify prefix='a/' and delimiter='/', you'll get back: - * - * /a/1.txt - */ - const options = { - prefix: prefix, - }; + try { + /** + * This can be used to list all blobs in a "folder", e.g. "public/". + * + * The delimiter argument can be used to restrict the results to only the + * "files" in the given "folder". Without the delimiter, the entire tree under + * the prefix is returned. For example, given these blobs: + * + * /a/1.txt + * /a/b/2.txt + * + * If you just specify prefix = 'a/', you'll get back: + * + * /a/1.txt + * /a/b/2.txt + * + * However, if you specify prefix='a/' and delimiter='/', you'll get back: + * + * /a/1.txt + */ + const options = { + prefix: prefix, + }; - if (delimiter) { - options.delimiter = delimiter; - } + if (delimiter) { + options.delimiter = delimiter; + } - // Lists files in the bucket, filtered by a prefix - const [files] = await storage.bucket(bucketName).getFiles(options); + // Lists files in the bucket, filtered by a prefix + const [files] = await storage.bucket(bucketName).getFiles(options); - console.log('Files:'); - files.forEach(file => { - console.log(file.name); - }); + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } catch (error) { + console.error( + 'Error executing list files by prefix:', + error.message || error + ); + } } - listFilesByPrefix().catch(console.error); + listFilesByPrefix(); // [END storage_list_files_with_prefix] } main(...process.argv.slice(2)); diff --git a/storage/listFilesPaginate.js b/storage/listFilesPaginate.js index 694ac4c268..f2716aaa2f 100644 --- a/storage/listFilesPaginate.js +++ b/storage/listFilesPaginate.js @@ -11,7 +11,8 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -/* eslint-disable no-unused-vars */ + +'use strict'; function main(bucketName = 'my-bucket') { // [START storage_list_files_paginated] @@ -28,28 +29,39 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function listFilesPaginated() { - const bucket = storage.bucket(bucketName); - const [files, queryForPage2] = await bucket.getFiles({autoPaginate: false}); - - console.log('Files:'); - files.forEach(file => { - console.log(file.name); - }); - - // Page through the next set of results using "queryForPage2" - if (queryForPage2 !== null) { - const [files, queryForPage3] = await bucket.getFiles(queryForPage2); + try { + const bucket = storage.bucket(bucketName); + const [files, queryForPage2] = await bucket.getFiles({ + autoPaginate: false, + }); console.log('Files:'); files.forEach(file => { console.log(file.name); }); - // If necessary, continue cursoring using "queryForPage3" + // Page through the next set of results using "queryForPage2" + if (queryForPage2 !== null) { + // We only extract the files, but you could also extract the next query object + // const [files, queryForPage3] = await bucket.getFiles(queryForPage2); + const [files] = await bucket.getFiles(queryForPage2); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + + // If necessary, continue cursoring to subsequent pages + } + } catch (error) { + console.error( + 'Error executing list files paginated:', + error.message || error + ); } } - listFilesPaginated().catch(console.error); + listFilesPaginated(); // [END storage_list_files_paginated] } main(...process.argv.slice(2)); diff --git a/storage/listFilesWithOldVersions.js b/storage/listFilesWithOldVersions.js index c08f38cd40..269fcc2fb7 100644 --- a/storage/listFilesWithOldVersions.js +++ b/storage/listFilesWithOldVersions.js @@ -34,17 +34,24 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function listFilesWithOldVersions() { - const [files] = await storage.bucket(bucketName).getFiles({ - versions: true, - }); - - console.log('Files:'); - files.forEach(file => { - console.log(file.name, file.generation); - }); + try { + const [files] = await storage.bucket(bucketName).getFiles({ + versions: true, + }); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name, file.generation); + }); + } catch (error) { + console.error( + 'Error executing list files with old versions:', + error.message || error + ); + } } - listFilesWithOldVersions().catch(console.error); + listFilesWithOldVersions(); // [END storage_list_file_archived_generations] } main(...process.argv.slice(2)); diff --git a/storage/listSoftDeletedObjectVersions.js b/storage/listSoftDeletedObjectVersions.js index 0a7e711fff..5f5db8bb3f 100644 --- a/storage/listSoftDeletedObjectVersions.js +++ b/storage/listSoftDeletedObjectVersions.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket', fileName = 'test.txt') { // [START storage_list_soft_deleted_object_versions] /** @@ -32,22 +34,29 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function listSoftDeletedObjectVersions() { - const options = { - softDeleted: true, - matchGlob: fileName, - }; - - const [files] = await storage.bucket(bucketName).getFiles(options); - - console.log('Files:'); - files.forEach(file => { - console.log( - `Name: ${file.name}, Generation: ${file.metadata.generation}` + try { + const options = { + softDeleted: true, + matchGlob: fileName, + }; + + const [files] = await storage.bucket(bucketName).getFiles(options); + + console.log('Files:'); + files.forEach(file => { + console.log( + `Name: ${file.name}, Generation: ${file.metadata.generation}` + ); + }); + } catch (error) { + console.error( + 'Error executing list soft deleted object versions:', + error.message || error ); - }); + } } - listSoftDeletedObjectVersions().catch(console.error); + listSoftDeletedObjectVersions(); // [END storage_list_soft_deleted_object_versions] } diff --git a/storage/listSoftDeletedObjects.js b/storage/listSoftDeletedObjects.js index 62014723f9..51707c56ad 100644 --- a/storage/listSoftDeletedObjects.js +++ b/storage/listSoftDeletedObjects.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_list_soft_deleted_objects] /** @@ -29,19 +31,26 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function listSoftDeletedObjects() { - const options = { - softDeleted: true, - }; - - const [files] = await storage.bucket(bucketName).getFiles(options); - - console.log('Files:'); - files.forEach(file => { - console.log(file.name); - }); + try { + const options = { + softDeleted: true, + }; + + const [files] = await storage.bucket(bucketName).getFiles(options); + + console.log('Files:'); + files.forEach(file => { + console.log(file.name); + }); + } catch (error) { + console.error( + 'Error executing list soft deleted objects:', + error.message || error + ); + } } - listSoftDeletedObjects().catch(console.error); + listSoftDeletedObjects(); // [END storage_list_soft_deleted_objects] } diff --git a/storage/makePublic.js b/storage/makePublic.js index fd17d8b010..504e9503b5 100644 --- a/storage/makePublic.js +++ b/storage/makePublic.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,12 +40,16 @@ function main(bucketName = 'my-bucket', fileName = 'test.txt') { const storage = new Storage(); async function makePublic() { - await storage.bucket(bucketName).file(fileName).makePublic(); + try { + await storage.bucket(bucketName).file(fileName).makePublic(); - console.log(`gs://${bucketName}/${fileName} is now public.`); + console.log(`gs://${bucketName}/${fileName} is now public.`); + } catch (error) { + console.error('Error executing make public:', error.message || error); + } } - makePublic().catch(console.error); + makePublic(); // [END storage_make_public] } main(...process.argv.slice(2)); diff --git a/storage/moveFile.js b/storage/moveFile.js index 29d62d4e68..530b13cea7 100644 --- a/storage/moveFile.js +++ b/storage/moveFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -46,31 +48,35 @@ function main( const storage = new Storage(); async function moveFile() { - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to copy is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const moveOptions = { - preconditionOpts: { - ifGenerationMatch: destinationGenerationMatchPrecondition, - }, - }; + try { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const moveOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; - // Moves the file within the bucket - await storage - .bucket(bucketName) - .file(srcFileName) - .move(destFileName, moveOptions); + // Moves the file within the bucket + await storage + .bucket(bucketName) + .file(srcFileName) + .move(destFileName, moveOptions); - console.log( - `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` - ); + console.log( + `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` + ); + } catch (error) { + console.error('Error executing move file:', error.message || error); + } } - moveFile().catch(console.error); + moveFile(); // [END storage_move_file] } main(...process.argv.slice(2)); diff --git a/storage/moveFileAtomic.js b/storage/moveFileAtomic.js index 88047da8cf..79eea2d950 100644 --- a/storage/moveFileAtomic.js +++ b/storage/moveFileAtomic.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -46,31 +48,38 @@ function main( const storage = new Storage(); async function moveFileAtomic() { - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to copy is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - const moveOptions = { - preconditionOpts: { - ifGenerationMatch: destinationGenerationMatchPrecondition, - }, - }; + try { + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to copy is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + const moveOptions = { + preconditionOpts: { + ifGenerationMatch: destinationGenerationMatchPrecondition, + }, + }; - // Moves the file atomically within the bucket - await storage - .bucket(bucketName) - .file(srcFileName) - .moveFileAtomic(destFileName, moveOptions); + // Moves the file atomically within the bucket + await storage + .bucket(bucketName) + .file(srcFileName) + .moveFileAtomic(destFileName, moveOptions); - console.log( - `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` - ); + console.log( + `gs://${bucketName}/${srcFileName} moved to gs://${bucketName}/${destFileName}` + ); + } catch (error) { + console.error( + 'Error executing move file atomic:', + error.message || error + ); + } } - moveFileAtomic().catch(console.error); + moveFileAtomic(); // [END storage_move_object] } main(...process.argv.slice(2)); diff --git a/storage/renameFile.js b/storage/renameFile.js index 0bd53108f1..fa352442af 100644 --- a/storage/renameFile.js +++ b/storage/renameFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -45,15 +47,22 @@ function main( const storage = new Storage(); async function renameFile() { - // renames the file - await storage.bucket(srcBucketName).file(srcFileName).rename(destFileName); + try { + // renames the file + await storage + .bucket(srcBucketName) + .file(srcFileName) + .rename(destFileName); - console.log( - `gs://${srcBucketName}/${srcFileName} renamed to gs://${srcBucketName}/${destFileName}.` - ); + console.log( + `gs://${srcBucketName}/${srcFileName} renamed to gs://${srcBucketName}/${destFileName}.` + ); + } catch (error) { + console.error('Error executing rename file:', error.message || error); + } } - renameFile().catch(console.error); + renameFile(); // [END storage_rename_file] } main(...process.argv.slice(2)); diff --git a/storage/restoreSoftDeletedObject.js b/storage/restoreSoftDeletedObject.js index 396ce52e78..9c44e2ef1c 100644 --- a/storage/restoreSoftDeletedObject.js +++ b/storage/restoreSoftDeletedObject.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + function main( bucketName = 'my-bucket', fileName = 'test.txt', @@ -39,19 +41,26 @@ function main( const storage = new Storage(); async function restoreSoftDeletedObject() { - const options = { - generation: generation, - }; + try { + const options = { + generation: generation, + }; - const restoredFile = await storage - .bucket(bucketName) - .file(fileName) - .restore(options); + const restoredFile = await storage + .bucket(bucketName) + .file(fileName) + .restore(options); - console.log(`Soft deleted object ${restoredFile.name} was restored`); + console.log(`Soft deleted object ${restoredFile.name} was restored`); + } catch (error) { + console.error( + 'Error executing restore soft deleted object:', + error.message || error + ); + } } - restoreSoftDeletedObject().catch(console.error); + restoreSoftDeletedObject(); // [END storage_restore_object] } diff --git a/storage/setObjectRetentionPolicy.js b/storage/setObjectRetentionPolicy.js index 9cfe3444ca..c7067b7673 100644 --- a/storage/setObjectRetentionPolicy.js +++ b/storage/setObjectRetentionPolicy.js @@ -14,6 +14,8 @@ * limitations under the License. */ +'use strict'; + // sample-metadata: // title: Set the object retention policy of a File. // description: Set the object retention policy of a File. @@ -46,51 +48,53 @@ function main( const storage = new Storage(); async function setObjectRetentionPolicy() { - // Get a reference to the bucket - const myBucket = storage.bucket(bucketName); + try { + // Get a reference to the bucket + const myBucket = storage.bucket(bucketName); - // Create a reference to a file object - const file = myBucket.file(destFileName); + // Create a reference to a file object + const file = myBucket.file(destFileName); - // Save the file data - await file.save(contents); + // Save the file data + await file.save(contents); - // Set the retention policy for the file - const retentionDate = new Date(); - retentionDate.setDate(retentionDate.getDate() + 10); - const [metadata] = await file.setMetadata({ - retention: { - mode: 'Unlocked', - retainUntilTime: retentionDate.toISOString(), - }, - }); + // Set the retention policy for the file + const retentionDate = new Date(); + retentionDate.setDate(retentionDate.getDate() + 10); + const [metadata] = await file.setMetadata({ + retention: { + mode: 'Unlocked', + retainUntilTime: retentionDate.toISOString(), + }, + }); - console.log( - `Retention policy for file ${file.name} was set to: ${metadata.retention.mode}` - ); + console.log( + `Retention policy for file ${file.name} was set to: ${metadata.retention.mode}` + ); - // To modify an existing policy on an unlocked file object, pass in the override parameter - const newRetentionDate = new Date(retentionDate.getDate()); - newRetentionDate.setDate(newRetentionDate.getDate() + 9); - const [newMetadata] = await file.setMetadata({ - retention: { - mode: 'Unlocked', - retainUntilTime: newRetentionDate, - }, - overrideUnlockedRetention: true, - }); + // To modify an existing policy on an unlocked file object, pass in the override parameter + const newRetentionDate = new Date(retentionDate.getDate()); + newRetentionDate.setDate(newRetentionDate.getDate() + 9); + const [newMetadata] = await file.setMetadata({ + retention: { + mode: 'Unlocked', + retainUntilTime: newRetentionDate, + }, + overrideUnlockedRetention: true, + }); - console.log( - `Retention policy for file ${file.name} was updated to: ${newMetadata.retention.retainUntilTime}` - ); + console.log( + `Retention policy for file ${file.name} was updated to: ${newMetadata.retention.retainUntilTime}` + ); + } catch (error) { + console.error( + 'Error executing set object retention policy:', + error.message || error + ); + } } - setObjectRetentionPolicy().catch(console.error); + setObjectRetentionPolicy(); // [END storage_set_object_retention_policy] } - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/streamFileDownload.js b/storage/streamFileDownload.js index b25aa87f7d..32dbf0fd74 100644 --- a/storage/streamFileDownload.js +++ b/storage/streamFileDownload.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -48,24 +50,31 @@ function main( const storage = new Storage(); async function streamFileDownload() { - // The example below demonstrates how we can reference a remote file, then - // pipe its contents to a local file. - // Once the stream is created, the data can be piped anywhere (process, sdout, etc) - await storage - .bucket(bucketName) - .file(fileName) - .createReadStream() //stream is created - .pipe(fs.createWriteStream(destFileName)) - .on('finish', () => { - // The file download is complete - }); + try { + // The example below demonstrates how we can reference a remote file, then + // pipe its contents to a local file. + // Once the stream is created, the data can be piped anywhere (process, sdout, etc) + await storage + .bucket(bucketName) + .file(fileName) + .createReadStream() //stream is created + .pipe(fs.createWriteStream(destFileName)) + .on('finish', () => { + // The file download is complete + }); - console.log( - `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` - ); + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); + } catch (error) { + console.error( + 'Error executing stream file download:', + error.message || error + ); + } } - streamFileDownload().catch(console.error); + streamFileDownload(); // [END storage_stream_file_download] } main(...process.argv.slice(2)); diff --git a/storage/streamFileUpload.js b/storage/streamFileUpload.js index 19c240f603..a74297ca3a 100644 --- a/storage/streamFileUpload.js +++ b/storage/streamFileUpload.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -59,11 +61,18 @@ function main( passthroughStream.end(); async function streamFileUpload() { - passthroughStream.pipe(file.createWriteStream()).on('finish', () => { - // The file upload is complete - }); + try { + passthroughStream.pipe(file.createWriteStream()).on('finish', () => { + // The file upload is complete + }); - console.log(`${destFileName} uploaded to ${bucketName}`); + console.log(`${destFileName} uploaded to ${bucketName}`); + } catch (error) { + console.error( + 'Error executing stream file upload:', + error.message || error + ); + } } streamFileUpload().catch(console.error); diff --git a/storage/uploadDirectory.js b/storage/uploadDirectory.js index 62ff622795..e15d6e6746 100644 --- a/storage/uploadDirectory.js +++ b/storage/uploadDirectory.js @@ -62,29 +62,36 @@ function main( } async function uploadDirectory() { - const bucket = storage.bucket(bucketName); - let successfulUploads = 0; - - for await (const filePath of getFiles(directoryPath)) { - try { - const dirname = path.dirname(directoryPath); - const destination = path.relative(dirname, filePath); - - await bucket.upload(filePath, {destination}); - - console.log(`Successfully uploaded: ${filePath}`); - successfulUploads++; - } catch (e) { - console.error(`Error uploading ${filePath}:`, e); + try { + const bucket = storage.bucket(bucketName); + let successfulUploads = 0; + + for await (const filePath of getFiles(directoryPath)) { + try { + const dirname = path.dirname(directoryPath); + const destination = path.relative(dirname, filePath); + + await bucket.upload(filePath, {destination}); + + console.log(`Successfully uploaded: ${filePath}`); + successfulUploads++; + } catch (e) { + console.error(`Error uploading ${filePath}:`, e); + } } - } - console.log( - `${successfulUploads} files uploaded to ${bucketName} successfully.` - ); + console.log( + `${successfulUploads} files uploaded to ${bucketName} successfully.` + ); + } catch (error) { + console.error( + 'Error executing upload directory:', + error.message || error + ); + } } - uploadDirectory().catch(console.error); + uploadDirectory(); // [END upload_directory] } diff --git a/storage/uploadFile.js b/storage/uploadFile.js index 1538e4ff14..a54af5eb9b 100644 --- a/storage/uploadFile.js +++ b/storage/uploadFile.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', filePath = './local/path/to/file.txt', @@ -38,23 +40,27 @@ function main( const storage = new Storage(); async function uploadFile() { - const options = { - destination: destFileName, - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to upload is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, - }; - - await storage.bucket(bucketName).upload(filePath, options); - console.log(`${filePath} uploaded to ${bucketName}`); + try { + const options = { + destination: destFileName, + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; + + await storage.bucket(bucketName).upload(filePath, options); + console.log(`${filePath} uploaded to ${bucketName}`); + } catch (error) { + console.error('Error executing upload file:', error.message || error); + } } - uploadFile().catch(console.error); + uploadFile(); // [END storage_upload_file] } diff --git a/storage/uploadFileWithKmsKey.js b/storage/uploadFileWithKmsKey.js index 771638abca..b94d718bc4 100644 --- a/storage/uploadFileWithKmsKey.js +++ b/storage/uploadFileWithKmsKey.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -46,21 +48,28 @@ function main( const storage = new Storage(); async function uploadFileWithKmsKey() { - const options = { - kmsKeyName, - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to upload is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, - }; + try { + const options = { + kmsKeyName, + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; - await storage.bucket(bucketName).upload(filePath, options); + await storage.bucket(bucketName).upload(filePath, options); - console.log(`${filePath} uploaded to ${bucketName} using ${kmsKeyName}.`); + console.log(`${filePath} uploaded to ${bucketName} using ${kmsKeyName}.`); + } catch (error) { + console.error( + 'Error executing upload file with kms key:', + error.message || error + ); + } } uploadFileWithKmsKey().catch(console.error); diff --git a/storage/uploadFromMemory.js b/storage/uploadFromMemory.js index cd6ec5166d..919439c882 100644 --- a/storage/uploadFromMemory.js +++ b/storage/uploadFromMemory.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', contents = 'these are my file contents', @@ -37,18 +39,22 @@ function main( const storage = new Storage(); async function uploadFromMemory() { - await storage.bucket(bucketName).file(destFileName).save(contents); - - console.log( - `${destFileName} with contents ${contents} uploaded to ${bucketName}.` - ); + try { + await storage.bucket(bucketName).file(destFileName).save(contents); + + console.log( + `${destFileName} with contents ${contents} uploaded to ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing file upload from memory:', + error.message || error + ); + } } - uploadFromMemory().catch(console.error); + uploadFromMemory(); // [END storage_file_upload_from_memory] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2)); diff --git a/storage/uploadWithoutAuthentication.js b/storage/uploadWithoutAuthentication.js index f51e569404..254509d33b 100644 --- a/storage/uploadWithoutAuthentication.js +++ b/storage/uploadWithoutAuthentication.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', contents = 'these are my file contents', @@ -38,39 +40,43 @@ function main( const storage = new Storage(); async function uploadWithoutAuthentication() { - const file = storage.bucket(bucketName).file(destFileName); + try { + const file = storage.bucket(bucketName).file(destFileName); - // Returns an authenticated endpoint to which - // you can make requests without credentials. - const [location] = await file.createResumableUpload(); //auth required + // Returns an authenticated endpoint to which + // you can make requests without credentials. + const [location] = await file.createResumableUpload(); //auth required - const options = { - uri: location, - resumable: true, - validation: false, + const options = { + uri: location, + resumable: true, + validation: false, - // Optional: - // Set a generation-match precondition to avoid potential race conditions - // and data corruptions. The request to upload is aborted if the object's - // generation number does not match your precondition. For a destination - // object that does not yet exist, set the ifGenerationMatch precondition to 0 - // If the destination object already exists in your bucket, set instead a - // generation-match precondition using its generation number. - preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, - }; + // Optional: + // Set a generation-match precondition to avoid potential race conditions + // and data corruptions. The request to upload is aborted if the object's + // generation number does not match your precondition. For a destination + // object that does not yet exist, set the ifGenerationMatch precondition to 0 + // If the destination object already exists in your bucket, set instead a + // generation-match precondition using its generation number. + preconditionOpts: {ifGenerationMatch: generationMatchPrecondition}, + }; - // Passes the location to file.save so you don't need to - // authenticate this call - await file.save(contents, options); + // Passes the location to file.save so you don't need to + // authenticate this call + await file.save(contents, options); - console.log(`${destFileName} uploaded to ${bucketName}`); + console.log(`${destFileName} uploaded to ${bucketName}`); + } catch (error) { + console.error( + 'Error executing upload without authentication:', + error.message || error + ); + } } - uploadWithoutAuthentication().catch(console.error); + uploadWithoutAuthentication(); // [END storage_upload_without_authentication] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2)); diff --git a/storage/uploadWithoutAuthenticationSignedUrl.js b/storage/uploadWithoutAuthenticationSignedUrl.js index 4be8def3bf..61096b444a 100644 --- a/storage/uploadWithoutAuthenticationSignedUrl.js +++ b/storage/uploadWithoutAuthenticationSignedUrl.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', contents = 'these are my file contents', @@ -39,46 +41,50 @@ function main( const storage = new Storage(); async function uploadWithoutAuthenticationSignedUrlStrategy() { - const file = storage.bucket(bucketName).file(destFileName); + try { + const file = storage.bucket(bucketName).file(destFileName); - // Use signed URLs to manually start resumable uploads. - // Authenticating is required to get the signed URL, but isn't - // required to start the resumable upload - const options = { - version: 'v4', - action: 'resumable', - expires: Date.now() + 30 * 60 * 1000, // 30 mins - }; - //auth required - const [signedUrl] = await file.getSignedUrl(options); + // Use signed URLs to manually start resumable uploads. + // Authenticating is required to get the signed URL, but isn't + // required to start the resumable upload + const options = { + version: 'v4', + action: 'resumable', + expires: Date.now() + 30 * 60 * 1000, // 30 mins + }; + //auth required + const [signedUrl] = await file.getSignedUrl(options); - // no auth required - const resumableSession = await fetch(signedUrl, { - method: 'POST', - headers: { - 'x-goog-resumable': 'start', - }, - }); + // no auth required + const resumableSession = await fetch(signedUrl, { + method: 'POST', + headers: { + 'x-goog-resumable': 'start', + }, + }); - // Endpoint to which we should upload the file - const location = resumableSession.headers.location; + // Endpoint to which we should upload the file + const location = resumableSession.headers.location; - // Passes the location to file.save so you don't need to - // authenticate this call - await file.save(contents, { - uri: location, - resumable: true, - validation: false, - }); + // Passes the location to file.save so you don't need to + // authenticate this call + await file.save(contents, { + uri: location, + resumable: true, + validation: false, + }); - console.log(`${destFileName} uploaded to ${bucketName}`); + console.log(`${destFileName} uploaded to ${bucketName}`); + } catch (error) { + console.error( + 'Error executing upload without authentication signed url strategy:', + error.message || error + ); + } } - uploadWithoutAuthenticationSignedUrlStrategy().catch(console.error); + uploadWithoutAuthenticationSignedUrlStrategy(); // [END storage_upload_without_authentication_signed_url] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2)); From 3df66e63c9b6cd8d24d7258cf0df4ba1ee4eaae8 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 22:05:33 +0000 Subject: [PATCH 08/11] refactor(hmac-keys): standardize error handling and fix region tags in hmac key samples --- storage/hmacKeyActivate.js | 22 +++++++++++++++------- storage/hmacKeyCreate.js | 27 +++++++++++++++++---------- storage/hmacKeyDeactivate.js | 22 +++++++++++++++------- storage/hmacKeyDelete.js | 19 ++++++++++++------- storage/hmacKeyGet.js | 23 ++++++++++++++--------- storage/hmacKeysList.js | 23 ++++++++++++++--------- storage/package.json | 2 +- 7 files changed, 88 insertions(+), 50 deletions(-) diff --git a/storage/hmacKeyActivate.js b/storage/hmacKeyActivate.js index 6ce8a2dd9a..29d90070c2 100644 --- a/storage/hmacKeyActivate.js +++ b/storage/hmacKeyActivate.js @@ -41,17 +41,25 @@ function main( // Activate HMAC SA Key async function activateHmacKey() { - const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); - const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'ACTIVE'}); + try { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'ACTIVE'}); - console.log('The HMAC key is now active.'); - console.log('The HMAC key metadata is:'); - for (const [key, value] of Object.entries(hmacKeyMetadata)) { - console.log(`${key}: ${value}`); + console.log('The HMAC key is now active.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKeyMetadata)) { + console.log(`${key}: ${value}`); + } + } catch (error) { + console.error( + 'Error executing activate hmac key:', + error.message || error + ); } } + + activateHmacKey(); // [END storage_activate_hmac_key] - activateHmacKey().catch(console.error); } main(...process.argv.slice(2)); diff --git a/storage/hmacKeyCreate.js b/storage/hmacKeyCreate.js index 3fb3456801..d4f794fed3 100644 --- a/storage/hmacKeyCreate.js +++ b/storage/hmacKeyCreate.js @@ -41,19 +41,26 @@ function main( // Create HMAC SA Key async function createHmacKey() { - const [hmacKey, secret] = await storage.createHmacKey(serviceAccountEmail, { - projectId, - }); - - console.log(`The base64 encoded secret is: ${secret}`); - console.log('Do not miss that secret, there is no API to recover it.'); - console.log('The HMAC key metadata is:'); - for (const [key, value] of Object.entries(hmacKey.metadata)) { - console.log(`${key}: ${value}`); + try { + const [hmacKey, secret] = await storage.createHmacKey( + serviceAccountEmail, + { + projectId, + } + ); + + console.log(`The base64 encoded secret is: ${secret}`); + console.log('Do not miss that secret, there is no API to recover it.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKey.metadata)) { + console.log(`${key}: ${value}`); + } + } catch (error) { + console.error('Error executing create hmac key:', error.message || error); } } // [END storage_create_hmac_key] - createHmacKey().catch(console.error); + createHmacKey(); } main(...process.argv.slice(2)); diff --git a/storage/hmacKeyDeactivate.js b/storage/hmacKeyDeactivate.js index 471bc84ca5..33c6919ffa 100644 --- a/storage/hmacKeyDeactivate.js +++ b/storage/hmacKeyDeactivate.js @@ -41,17 +41,25 @@ function main( // Deactivate HMAC SA Key async function deactivateHmacKey() { - const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); - const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'INACTIVE'}); + try { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'INACTIVE'}); - console.log('The HMAC key is now inactive.'); - console.log('The HMAC key metadata is:'); - for (const [key, value] of Object.entries(hmacKeyMetadata)) { - console.log(`${key}: ${value}`); + console.log('The HMAC key is now inactive.'); + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKeyMetadata)) { + console.log(`${key}: ${value}`); + } + } catch (error) { + console.error( + 'Error executing deactivate hmac key:', + error.message || error + ); } } + + deactivateHmacKey(); // [END storage_deactivate_hmac_key] - deactivateHmacKey().catch(console.error); } main(...process.argv.slice(2)); diff --git a/storage/hmacKeyDelete.js b/storage/hmacKeyDelete.js index 337a273b0f..6bc65c60e4 100644 --- a/storage/hmacKeyDelete.js +++ b/storage/hmacKeyDelete.js @@ -41,15 +41,20 @@ function main( // Delete HMAC SA Key async function deleteHmacKey() { - const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); - await hmacKey.delete(); - - console.log( - 'The key is deleted, though it may still appear in getHmacKeys() results.' - ); + try { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + await hmacKey.delete(); + + console.log( + 'The key is deleted, though it may still appear in getHmacKeys() results.' + ); + } catch (error) { + console.error('Error executing delete hmac key:', error.message || error); + } } + + deleteHmacKey(); // [END storage_delete_hmac_key] - deleteHmacKey().catch(console.error); } main(...process.argv.slice(2)); diff --git a/storage/hmacKeyGet.js b/storage/hmacKeyGet.js index 656036984b..445e85c526 100644 --- a/storage/hmacKeyGet.js +++ b/storage/hmacKeyGet.js @@ -41,18 +41,23 @@ function main( // Get HMAC SA Key Metadata async function getHmacKey() { - const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); - - // Populate the hmacKey object with metadata from server. - await hmacKey.getMetadata(); - - console.log('The HMAC key metadata is:'); - for (const [key, value] of Object.entries(hmacKey.metadata)) { - console.log(`${key}: ${value}`); + try { + const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId}); + + // Populate the hmacKey object with metadata from server. + await hmacKey.getMetadata(); + + console.log('The HMAC key metadata is:'); + for (const [key, value] of Object.entries(hmacKey.metadata)) { + console.log(`${key}: ${value}`); + } + } catch (error) { + console.error('Error executing get hmac key:', error.message || error); } } + + getHmacKey(); // [END storage_get_hmac_key] - getHmacKey().catch(console.error); } main(...process.argv.slice(2)); diff --git a/storage/hmacKeysList.js b/storage/hmacKeysList.js index 6f6acf939d..896fb9d815 100644 --- a/storage/hmacKeysList.js +++ b/storage/hmacKeysList.js @@ -35,18 +35,23 @@ function main(projectId = 'serviceAccountProjectId') { // List HMAC SA Keys' Metadata async function listHmacKeys() { - const [hmacKeys] = await storage.getHmacKeys({projectId}); - - // hmacKeys is an array of HmacKey objects. - for (const hmacKey of hmacKeys) { - console.log( - `Service Account Email: ${hmacKey.metadata.serviceAccountEmail}` - ); - console.log(`Access Id: ${hmacKey.metadata.accessId}`); + try { + const [hmacKeys] = await storage.getHmacKeys({projectId}); + + // hmacKeys is an array of HmacKey objects. + for (const hmacKey of hmacKeys) { + console.log( + `Service Account Email: ${hmacKey.metadata.serviceAccountEmail}` + ); + console.log(`Access Id: ${hmacKey.metadata.accessId}`); + } + } catch (error) { + console.error('Error executing list hmac keys:', error.message || error); } } + + listHmacKeys(); // [END storage_list_hmac_keys] - listHmacKeys().catch(console.error); } main(...process.argv.slice(2)); diff --git a/storage/package.json b/storage/package.json index 539149afb7..71acc18ab5 100644 --- a/storage/package.json +++ b/storage/package.json @@ -13,7 +13,7 @@ ], "scripts": { "cleanup": "node scripts/cleanup", - "test": "mocha system-test/*.js --timeout 600000" + "test": "mocha system-test/*.js --timeout 800000" }, "dependencies": { "@google-cloud/pubsub": "^4.0.0", From 2b99c0a9e31ff99bbe20fcc0b445162ba5df3b84 Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 22:19:45 +0000 Subject: [PATCH 09/11] refactor(iam): standardize error handling in IAM samples --- storage/addBucketConditionalBinding.js | 81 +++++++++++++---------- storage/addBucketIamMember.js | 47 +++++++------ storage/hmacKeyCreate.js | 3 +- storage/removeBucketConditionalBinding.js | 71 +++++++++++--------- storage/removeBucketIamMember.js | 71 +++++++++++--------- storage/viewBucketIamMembers.js | 8 ++- 6 files changed, 162 insertions(+), 119 deletions(-) diff --git a/storage/addBucketConditionalBinding.js b/storage/addBucketConditionalBinding.js index f5f6754a2b..db5aac9b4b 100644 --- a/storage/addBucketConditionalBinding.js +++ b/storage/addBucketConditionalBinding.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -57,44 +59,51 @@ function main( const storage = new Storage(); async function addBucketConditionalBinding() { - // Get a reference to a Google Cloud Storage bucket - const bucket = storage.bucket(bucketName); - - // Gets and updates the bucket's IAM policy - const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); - - // Set the policy's version to 3 to use condition in bindings. - policy.version = 3; - - // Adds the new roles to the bucket's IAM policy - policy.bindings.push({ - role: roleName, - members: members, - condition: { - title: title, - description: description, - expression: expression, - }, - }); - - // Updates the bucket's IAM policy - await bucket.iam.setPolicy(policy); - - console.log( - `Added the following member(s) with role ${roleName} to ${bucketName}:` - ); - - members.forEach(member => { - console.log(` ${member}`); - }); - - console.log('with condition:'); - console.log(` Title: ${title}`); - console.log(` Description: ${description}`); - console.log(` Expression: ${expression}`); + try { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // Gets and updates the bucket's IAM policy + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Set the policy's version to 3 to use condition in bindings. + policy.version = 3; + + // Adds the new roles to the bucket's IAM policy + policy.bindings.push({ + role: roleName, + members: members, + condition: { + title: title, + description: description, + expression: expression, + }, + }); + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + + console.log( + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); + + members.forEach(member => { + console.log(` ${member}`); + }); + + console.log('with condition:'); + console.log(` Title: ${title}`); + console.log(` Description: ${description}`); + console.log(` Expression: ${expression}`); + } catch (error) { + console.error( + 'Error executing add bucket conditional binding:', + error.message || error + ); + } } - addBucketConditionalBinding().catch(console.error); + addBucketConditionalBinding(); // [END storage_add_bucket_conditional_iam_binding] } main(...process.argv.slice(2)); diff --git a/storage/addBucketIamMember.js b/storage/addBucketIamMember.js index 9ab6595de9..a3b25c5419 100644 --- a/storage/addBucketIamMember.js +++ b/storage/addBucketIamMember.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', roleName = 'roles/storage.objectViewer', @@ -42,32 +44,39 @@ function main( const storage = new Storage(); async function addBucketIamMember() { - // Get a reference to a Google Cloud Storage bucket - const bucket = storage.bucket(bucketName); + try { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); - // For more information please read: - // https://cloud.google.com/storage/docs/access-control/iam - const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); - // Adds the new roles to the bucket's IAM policy - policy.bindings.push({ - role: roleName, - members: members, - }); + // Adds the new roles to the bucket's IAM policy + policy.bindings.push({ + role: roleName, + members: members, + }); - // Updates the bucket's IAM policy - await bucket.iam.setPolicy(policy); + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); - console.log( - `Added the following member(s) with role ${roleName} to ${bucketName}:` - ); + console.log( + `Added the following member(s) with role ${roleName} to ${bucketName}:` + ); - members.forEach(member => { - console.log(` ${member}`); - }); + members.forEach(member => { + console.log(` ${member}`); + }); + } catch (error) { + console.error( + 'Error executing add bucket iam member:', + error.message || error + ); + } } - addBucketIamMember().catch(console.error); + addBucketIamMember(); // [END storage_add_bucket_iam_member] } main(...process.argv.slice(2)); diff --git a/storage/hmacKeyCreate.js b/storage/hmacKeyCreate.js index d4f794fed3..a61871cb9f 100644 --- a/storage/hmacKeyCreate.js +++ b/storage/hmacKeyCreate.js @@ -59,8 +59,9 @@ function main( console.error('Error executing create hmac key:', error.message || error); } } - // [END storage_create_hmac_key] + createHmacKey(); + // [END storage_create_hmac_key] } main(...process.argv.slice(2)); diff --git a/storage/removeBucketConditionalBinding.js b/storage/removeBucketConditionalBinding.js index 7ba888ea5d..12a8adf105 100644 --- a/storage/removeBucketConditionalBinding.js +++ b/storage/removeBucketConditionalBinding.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on bucket and * file Access Control Lists with the Google Cloud Storage API. @@ -55,40 +57,47 @@ function main( const storage = new Storage(); async function removeBucketConditionalBinding() { - // Get a reference to a Google Cloud Storage bucket - const bucket = storage.bucket(bucketName); - - // Gets and updates the bucket's IAM policy - const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); - - // Set the policy's version to 3 to use condition in bindings. - policy.version = 3; - - // Finds and removes the appropriate role-member group with specific condition. - const index = policy.bindings.findIndex( - binding => - binding.role === roleName && - binding.condition && - binding.condition.title === title && - binding.condition.description === description && - binding.condition.expression === expression - ); - - const binding = policy.bindings[index]; - if (binding) { - policy.bindings.splice(index, 1); - - // Updates the bucket's IAM policy - await bucket.iam.setPolicy(policy); - - console.log('Conditional Binding was removed.'); - } else { - // No matching role-member group with specific condition were found - throw new Error('No matching binding group found.'); + try { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); + + // Gets and updates the bucket's IAM policy + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + // Set the policy's version to 3 to use condition in bindings. + policy.version = 3; + + // Finds and removes the appropriate role-member group with specific condition. + const index = policy.bindings.findIndex( + binding => + binding.role === roleName && + binding.condition && + binding.condition.title === title && + binding.condition.description === description && + binding.condition.expression === expression + ); + + const binding = policy.bindings[index]; + if (binding) { + policy.bindings.splice(index, 1); + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); + + console.log('Conditional Binding was removed.'); + } else { + // No matching role-member group with specific condition were found + throw new Error('No matching binding group found.'); + } + } catch (error) { + console.error( + 'Error executing remove bucket conditional binding:', + error.message || error + ); } } - removeBucketConditionalBinding().catch(console.error); + removeBucketConditionalBinding(); // [END storage_remove_bucket_conditional_iam_binding] } main(...process.argv.slice(2)); diff --git a/storage/removeBucketIamMember.js b/storage/removeBucketIamMember.js index fe333a2f48..d3a5a886e9 100644 --- a/storage/removeBucketIamMember.js +++ b/storage/removeBucketIamMember.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main( bucketName = 'my-bucket', roleName = 'roles/storage.objectViewer', @@ -41,47 +43,54 @@ function main( const storage = new Storage(); async function removeBucketIamMember() { - // Get a reference to a Google Cloud Storage bucket - const bucket = storage.bucket(bucketName); - - // For more information please read: - // https://cloud.google.com/storage/docs/access-control/iam - const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + try { + // Get a reference to a Google Cloud Storage bucket + const bucket = storage.bucket(bucketName); - // Finds and updates the appropriate role-member group, without a condition. - const index = policy.bindings.findIndex( - binding => binding.role === roleName && !binding.condition - ); + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); - const role = policy.bindings[index]; - if (role) { - role.members = role.members.filter( - member => members.indexOf(member) === -1 + // Finds and updates the appropriate role-member group, without a condition. + const index = policy.bindings.findIndex( + binding => binding.role === roleName && !binding.condition ); - // Updates the policy object with the new (or empty) role-member group - if (role.members.length === 0) { - policy.bindings.splice(index, 1); + const role = policy.bindings[index]; + if (role) { + role.members = role.members.filter( + member => members.indexOf(member) === -1 + ); + + // Updates the policy object with the new (or empty) role-member group + if (role.members.length === 0) { + policy.bindings.splice(index, 1); + } else { + policy.bindings.index = role; + } + + // Updates the bucket's IAM policy + await bucket.iam.setPolicy(policy); } else { - policy.bindings.index = role; + // No matching role-member group(s) were found + throw new Error('No matching role-member group(s) found.'); } - // Updates the bucket's IAM policy - await bucket.iam.setPolicy(policy); - } else { - // No matching role-member group(s) were found - throw new Error('No matching role-member group(s) found.'); + console.log( + `Removed the following member(s) with role ${roleName} from ${bucketName}:` + ); + members.forEach(member => { + console.log(` ${member}`); + }); + } catch (error) { + console.error( + 'Error executing remove bucket iam member:', + error.message || error + ); } - - console.log( - `Removed the following member(s) with role ${roleName} from ${bucketName}:` - ); - members.forEach(member => { - console.log(` ${member}`); - }); } - removeBucketIamMember().catch(console.error); + removeBucketIamMember(); // [END storage_remove_bucket_iam_member] } main(...process.argv.slice(2)); diff --git a/storage/viewBucketIamMembers.js b/storage/viewBucketIamMembers.js index 4f498ea449..e7af0e2607 100644 --- a/storage/viewBucketIamMembers.js +++ b/storage/viewBucketIamMembers.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + function main(bucketName = 'my-bucket') { // [START storage_view_bucket_iam_members] /** @@ -27,6 +29,7 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function viewBucketIamMembers() { + try{ // For more information please read: // https://cloud.google.com/storage/docs/access-control/iam const results = await storage @@ -53,9 +56,12 @@ function main(bucketName = 'my-bucket') { console.log(` Expression: ${condition.expression}`); } } + }catch(error){ + + } } - viewBucketIamMembers().catch(console.error); + viewBucketIamMembers(); // [END storage_view_bucket_iam_members] } main(...process.argv.slice(2)); From f220b4d1995175b6e9c31d67b4457137bc27b02f Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 22:30:51 +0000 Subject: [PATCH 10/11] refactor(notifications): standardize error handling, fix metadata destructuring bug, and align region tags --- storage/createNotification.js | 19 ++++++++--- storage/deleteNotification.js | 19 ++++++++--- storage/getMetadataNotifications.js | 38 ++++++++++++--------- storage/listNotifications.js | 25 ++++++++++---- storage/viewBucketIamMembers.js | 51 +++++++++++++++-------------- 5 files changed, 95 insertions(+), 57 deletions(-) diff --git a/storage/createNotification.js b/storage/createNotification.js index 97013ff534..447e0037c5 100644 --- a/storage/createNotification.js +++ b/storage/createNotification.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -42,13 +44,20 @@ function main( const storage = new Storage(); async function createNotification() { - // Creates a notification - await storage.bucket(bucketName).createNotification(topic); - - console.log('Notification subscription created.'); + try { + // Creates a notification + await storage.bucket(bucketName).createNotification(topic); + + console.log('Notification subscription created.'); + } catch (error) { + console.error( + 'Error executing create notification:', + error.message || error + ); + } } - createNotification().catch(console.error); + createNotification(); // [END storage_create_bucket_notifications] } main(...process.argv.slice(2)); diff --git a/storage/deleteNotification.js b/storage/deleteNotification.js index 07db6b77a7..90ac6b18af 100644 --- a/storage/deleteNotification.js +++ b/storage/deleteNotification.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,13 +40,20 @@ function main(bucketName = 'my-bucket', notificationId = '1') { const storage = new Storage(); async function deleteNotification() { - // Deletes the notification from the bucket - await storage.bucket(bucketName).notification(notificationId).delete(); - - console.log(`Notification ${notificationId} deleted.`); + try { + // Deletes the notification from the bucket + await storage.bucket(bucketName).notification(notificationId).delete(); + + console.log(`Notification ${notificationId} deleted.`); + } catch (error) { + console.error( + 'Error executing delete notification:', + error.message || error + ); + } } - deleteNotification().catch(console.error); + deleteNotification(); // [END storage_delete_bucket_notification] } main(...process.argv.slice(2)); diff --git a/storage/getMetadataNotifications.js b/storage/getMetadataNotifications.js index 47801b79b4..c1719a840f 100644 --- a/storage/getMetadataNotifications.js +++ b/storage/getMetadataNotifications.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -38,24 +40,28 @@ function main(bucketName = 'my-bucket', notificationId = '1') { const storage = new Storage(); async function getMetadata() { - // Get the notification metadata - const [metadata] = await storage - .bucket(bucketName) - .notification(notificationId) - .getMetadata(); - - console.log(`ID: ${metadata.id}`); - console.log(`Topic: ${metadata.topic}`); - console.log(`Event Types: ${metadata.event_types}`); - console.log(`Custom Attributes: ${metadata.custom_attributes}`); - console.log(`Payload Format: ${metadata.payload_format}`); - console.log(`Object Name Prefix: ${metadata.object_name_prefix}`); - console.log(`Etag: ${metadata.etag}`); - console.log(`Self Link: ${metadata.selfLink}`); - console.log(`Kind: ${metadata.kind}`); + try { + // Get the notification metadata + const [metadata] = await storage + .bucket(bucketName) + .notification(notificationId) + .getMetadata(); + + console.log(`ID: ${metadata.id}`); + console.log(`Topic: ${metadata.topic}`); + console.log(`Event Types: ${metadata.event_types}`); + console.log(`Custom Attributes: ${metadata.custom_attributes}`); + console.log(`Payload Format: ${metadata.payload_format}`); + console.log(`Object Name Prefix: ${metadata.object_name_prefix}`); + console.log(`Etag: ${metadata.etag}`); + console.log(`Self Link: ${metadata.selfLink}`); + console.log(`Kind: ${metadata.kind}`); + } catch (error) { + console.error('Error executing get metadata:', error.message || error); + } } - getMetadata().catch(console.error); + getMetadata(); // [END storage_print_pubsub_bucket_notification] } main(...process.argv.slice(2)); diff --git a/storage/listNotifications.js b/storage/listNotifications.js index ba8a68e06f..c41a39a1b7 100644 --- a/storage/listNotifications.js +++ b/storage/listNotifications.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on files with * the Google Cloud Storage API. @@ -35,16 +37,25 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function listNotifications() { - // Lists notifications in the bucket - const [notifications] = await storage.bucket(bucketName).getNotifications(); + try { + // Lists notifications in the bucket + const [notifications] = await storage + .bucket(bucketName) + .getNotifications(); - console.log('Notifications:'); - notifications.forEach(notification => { - console.log(notification.id); - }); + console.log('Notifications:'); + notifications.forEach(notification => { + console.log(notification.id); + }); + } catch (error) { + console.error( + 'Error executing list notifications:', + error.message || error + ); + } } - listNotifications().catch(console.error); + listNotifications(); // [END storage_list_bucket_notifications] } main(...process.argv.slice(2)); diff --git a/storage/viewBucketIamMembers.js b/storage/viewBucketIamMembers.js index e7af0e2607..21d0d8af23 100644 --- a/storage/viewBucketIamMembers.js +++ b/storage/viewBucketIamMembers.js @@ -29,36 +29,39 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function viewBucketIamMembers() { - try{ - // For more information please read: - // https://cloud.google.com/storage/docs/access-control/iam - const results = await storage - .bucket(bucketName) - .iam.getPolicy({requestedPolicyVersion: 3}); + try { + // For more information please read: + // https://cloud.google.com/storage/docs/access-control/iam + const results = await storage + .bucket(bucketName) + .iam.getPolicy({requestedPolicyVersion: 3}); - const bindings = results[0].bindings; + const bindings = results[0].bindings; - console.log(`Bindings for bucket ${bucketName}:`); - for (const binding of bindings) { - console.log(` Role: ${binding.role}`); - console.log(' Members:'); + console.log(`Bindings for bucket ${bucketName}:`); + for (const binding of bindings) { + console.log(` Role: ${binding.role}`); + console.log(' Members:'); - const members = binding.members; - for (const member of members) { - console.log(` ${member}`); - } + const members = binding.members; + for (const member of members) { + console.log(` ${member}`); + } - const condition = binding.condition; - if (condition) { - console.log(' Condition:'); - console.log(` Title: ${condition.title}`); - console.log(` Description: ${condition.description}`); - console.log(` Expression: ${condition.expression}`); + const condition = binding.condition; + if (condition) { + console.log(' Condition:'); + console.log(` Title: ${condition.title}`); + console.log(` Description: ${condition.description}`); + console.log(` Expression: ${condition.expression}`); + } } + } catch (error) { + console.error( + 'Error executing view bucket iam members:', + error.message || error + ); } - }catch(error){ - - } } viewBucketIamMembers(); From 1feea69cac5e7daf35e6b932d707f30dc8e5185c Mon Sep 17 00:00:00 2001 From: Angel Caamal Date: Fri, 13 Mar 2026 22:48:24 +0000 Subject: [PATCH 11/11] refactor(storage, quickstart, requesterpays): standardize error handling and set region tags --- storage/disableRequesterPays.js | 23 +++++++++++----- storage/downloadFileUsingRequesterPays.js | 29 +++++++++++++------- storage/enableRequesterPays.js | 19 +++++++++---- storage/getRequesterPaysStatus.js | 33 ++++++++++++++--------- storage/getServiceAccount.js | 21 ++++++++------- storage/quickstart.js | 12 ++++++--- storage/setClientEndpoint.js | 27 +++++++++++-------- 7 files changed, 106 insertions(+), 58 deletions(-) diff --git a/storage/disableRequesterPays.js b/storage/disableRequesterPays.js index c587615ed3..e051aa7fe5 100644 --- a/storage/disableRequesterPays.js +++ b/storage/disableRequesterPays.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -36,15 +38,22 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function disableRequesterPays() { - // Disables requester-pays requests - await storage.bucket(bucketName).disableRequesterPays(); - - console.log( - `Requester-pays requests have been disabled for bucket ${bucketName}` - ); + try { + // Disables requester-pays requests + await storage.bucket(bucketName).disableRequesterPays(); + + console.log( + `Requester-pays requests have been disabled for bucket ${bucketName}` + ); + } catch (error) { + console.error( + 'Error executing disable requester pays:', + error.message || error + ); + } } - disableRequesterPays().catch(console.error); + disableRequesterPays(); // [END storage_disable_requester_pays] } main(...process.argv.slice(2)); diff --git a/storage/downloadFileUsingRequesterPays.js b/storage/downloadFileUsingRequesterPays.js index 8f449e075b..7e27c8fb06 100644 --- a/storage/downloadFileUsingRequesterPays.js +++ b/storage/downloadFileUsingRequesterPays.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -53,20 +55,27 @@ function main( const storage = new Storage(); async function downloadFileUsingRequesterPays() { - const options = { - destination: destFileName, - userProject: projectId, - }; + try { + const options = { + destination: destFileName, + userProject: projectId, + }; - // Downloads the file - await storage.bucket(bucketName).file(srcFileName).download(options); + // Downloads the file + await storage.bucket(bucketName).file(srcFileName).download(options); - console.log( - `gs://${bucketName}/${srcFileName} downloaded to ${destFileName} using requester-pays requests` - ); + console.log( + `gs://${bucketName}/${srcFileName} downloaded to ${destFileName} using requester-pays requests` + ); + } catch (error) { + console.error( + 'Error executing download file using requester pays:', + error.message || error + ); + } } - downloadFileUsingRequesterPays().catch(console.error); + downloadFileUsingRequesterPays(); // [END storage_download_file_requester_pays] } main(...process.argv.slice(2)); diff --git a/storage/enableRequesterPays.js b/storage/enableRequesterPays.js index 4425a1a650..4763f75b92 100644 --- a/storage/enableRequesterPays.js +++ b/storage/enableRequesterPays.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -35,14 +37,21 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function enableRequesterPays() { - await storage.bucket(bucketName).enableRequesterPays(); + try { + await storage.bucket(bucketName).enableRequesterPays(); - console.log( - `Requester-pays requests have been enabled for bucket ${bucketName}` - ); + console.log( + `Requester-pays requests have been enabled for bucket ${bucketName}` + ); + } catch (error) { + console.error( + 'Error executing enable requester pays:', + error.message || error + ); + } } - enableRequesterPays().catch(console.error); + enableRequesterPays(); // [END storage_enable_requester_pays] } main(...process.argv.slice(2)); diff --git a/storage/getRequesterPaysStatus.js b/storage/getRequesterPaysStatus.js index fea0ad4e75..bc530576e5 100644 --- a/storage/getRequesterPaysStatus.js +++ b/storage/getRequesterPaysStatus.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates how to perform basic operations on buckets with * the Google Cloud Storage API. @@ -35,21 +37,28 @@ function main(bucketName = 'my-bucket') { const storage = new Storage(); async function getRequesterPaysStatus() { - // Gets the requester-pays status of a bucket - const [metadata] = await storage.bucket(bucketName).getMetadata(); - - let status; - if (metadata && metadata.billing && metadata.billing.requesterPays) { - status = 'enabled'; - } else { - status = 'disabled'; + try { + // Gets the requester-pays status of a bucket + const [metadata] = await storage.bucket(bucketName).getMetadata(); + + let status; + if (metadata && metadata.billing && metadata.billing.requesterPays) { + status = 'enabled'; + } else { + status = 'disabled'; + } + console.log( + `Requester-pays requests are ${status} for bucket ${bucketName}.` + ); + } catch (error) { + console.error( + 'Error executing get requester pays status:', + error.message || error + ); } - console.log( - `Requester-pays requests are ${status} for bucket ${bucketName}.` - ); } - getRequesterPaysStatus().catch(console.error); + getRequesterPaysStatus(); // [END storage_get_requester_pays_status] } main(...process.argv.slice(2)); diff --git a/storage/getServiceAccount.js b/storage/getServiceAccount.js index 6d4a29ba32..e3acc72e43 100644 --- a/storage/getServiceAccount.js +++ b/storage/getServiceAccount.js @@ -36,17 +36,20 @@ function main(projectId = 'serviceAccountProjectId') { }); async function getServiceAccount() { - const [serviceAccount] = await storage.getServiceAccount(); - console.log( - `The GCS service account for project ${projectId} is: ${serviceAccount.emailAddress}` - ); + try { + const [serviceAccount] = await storage.getServiceAccount(); + console.log( + `The GCS service account for project ${projectId} is: ${serviceAccount.emailAddress}` + ); + } catch (error) { + console.error( + 'Error executing get service account:', + error.message || error + ); + } } - getServiceAccount().catch(console.error); + getServiceAccount(); // [END storage_get_service_account] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); main(...process.argv.slice(2)); diff --git a/storage/quickstart.js b/storage/quickstart.js index 699924f852..6e329da9fa 100644 --- a/storage/quickstart.js +++ b/storage/quickstart.js @@ -35,12 +35,16 @@ function main(bucketName = 'my-new-bucket') { // const bucketName = 'your-unique-bucket-name'; async function createBucket() { - // Creates the new bucket - await storage.createBucket(bucketName); - console.log(`Bucket ${bucketName} created.`); + try { + // Creates the new bucket + await storage.createBucket(bucketName); + console.log(`Bucket ${bucketName} created.`); + } catch (error) { + console.error('Error executing create bucket:', error.message || error); + } } - createBucket().catch(console.error); + createBucket(); // [END storage_quickstart] } diff --git a/storage/setClientEndpoint.js b/storage/setClientEndpoint.js index 912d8ef77e..6d5ed079bf 100644 --- a/storage/setClientEndpoint.js +++ b/storage/setClientEndpoint.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +'use strict'; + /** * This application demonstrates set a custom endpoint with * the Google Cloud Storage API. @@ -30,19 +32,22 @@ function main(apiEndpoint = 'https://storage.googleapis.com') { // Imports the Google Cloud client library const {Storage} = require('@google-cloud/storage'); + try { + // Creates a client + const storage = new Storage({ + apiEndpoint: apiEndpoint, + useAuthWithCustomEndpoint: true, + }); - // Creates a client - const storage = new Storage({ - apiEndpoint: apiEndpoint, - useAuthWithCustomEndpoint: true, - }); - - console.log(`Client initiated with endpoint: ${storage.apiEndpoint}.`); + console.log(`Client initiated with endpoint: ${storage.apiEndpoint}.`); + } catch (error) { + console.error( + 'Error executing set client endpoint:', + error.message || error + ); + } // [END storage_set_client_endpoint] } -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); + main(...process.argv.slice(2));