Constructor
new Bucket(storage, name, optionsopt)
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
storage |
Storage |
A Storage instance. |
|||||||||
name |
string |
The name of the bucket. |
|||||||||
options |
object |
<optional> |
Configuration object. Properties
|
Members
acl
Cloud Storage uses access control lists (ACLs) to manage object and bucket access. ACLs are the mechanism you use to share objects with other users and allow other users to access your buckets and objects.
An ACL consists of one or more entries, where each entry grants permissions
to an entity. Permissions define the actions that can be performed against
an object or bucket (for example, READ
or WRITE
); the entity defines
who the permission applies to (for example, a specific user or group of
users).
The acl
object on a Bucket instance provides methods to get you a list of
the ACLs defined on your bucket, as well as set, update, and delete them.
Buckets also have default ACLs for all created files. Default ACLs specify permissions that all new objects added to the bucket will inherit by default. You can add, delete, get, and update entities and permissions for these as well with Bucket#acl.default.
Properties:
Name | Type | Description |
---|---|---|
default |
Acl |
Cloud Storage Buckets have
default ACLs
for all created files. You can add, delete, get, and update entities and
permissions for these as well. The method signatures and examples are all
the same, after only prefixing the method call with |
- Mixes In:
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
//-
// Make a bucket's contents publicly readable.
//-
const myBucket = storage.bucket('my-bucket');
const options = {
entity: 'allUsers',
role: storage.acl.READER_ROLE
};
myBucket.acl.add(options, function(err, aclObject) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
myBucket.acl.add(options).then(function(data) {
const aclObject = data[0];
const apiResponse = data[1];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function printBucketAcl() {
// Gets the ACL for the bucket
const [acls] = await storage.bucket(bucketName).acl.get();
acls.forEach(acl => {
console.log(`${acl.role}: ${acl.entity}`);
});
}
printBucketAcl().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The email address of the user to check
// const userEmail = 'user-email-to-check';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function printBucketAclForUser() {
const options = {
// Specify the user
entity: `user-${userEmail}`,
};
// Gets the user's ACL for the bucket
const [aclObject] = await storage.bucket(bucketName).acl.get(options);
console.log(`${aclObject.role}: ${aclObject.entity}`);
}
printBucketAclForUser().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The email address of the user to add
// const userEmail = 'user-email-to-add';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function addBucketOwner() {
// Makes the user an owner of the bucket. You can use addAllUsers(),
// addDomain(), addProject(), addGroup(), and addAllAuthenticatedUsers()
// to grant access to different types of entities. You can also use "readers"
// and "writers" to grant different roles.
await storage.bucket(bucketName).acl.owners.addUser(userEmail);
console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`);
}
addBucketOwner().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The email address of the user to remove
// const userEmail = 'user-email-to-remove';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function removeBucketOwner() {
// Removes the user from the access control list of the bucket. You can use
// deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and
// deleteAllAuthenticatedUsers() to remove access for different types of entities.
await storage.bucket(bucketName).acl.owners.deleteUser(userEmail);
console.log(`Removed user ${userEmail} from bucket ${bucketName}.`);
}
removeBucketOwner().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The email address of the user to add
// const userEmail = 'user-email-to-add';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function addBucketDefaultOwner() {
// Makes the user an owner in the default ACL of the bucket. You can use
// addAllUsers(), addDomain(), addProject(), addGroup(), and
// addAllAuthenticatedUsers() to grant access to different types of entities.
// You can also use "readers" and "writers" to grant different roles.
await storage.bucket(bucketName).acl.default.owners.addUser(userEmail);
console.log(`Added user ${userEmail} as an owner on bucket ${bucketName}.`);
}
addBucketDefaultOwner().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The email address of the user to remove
// const userEmail = 'user-email-to-remove';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function removeBucketDefaultOwner() {
// Removes the user from the access control list of the bucket. You can use
// deleteAllUsers(), deleteDomain(), deleteProject(), deleteGroup(), and
// deleteAllAuthenticatedUsers() to remove access for different types of entities.
await storage.bucket(bucketName).acl.default.owners.deleteUser(userEmail);
console.log(`Removed user ${userEmail} from bucket ${bucketName}.`);
}
removeBucketDefaultOwner().catch(console.error);
cloudStorageURI
The bucket's Cloud Storage URI (gs://
)
Example
```ts
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
// `gs://my-bucket`
const href = bucket.cloudStorageURI.href;
```
crc32cGenerator
A function that generates a CRC32C Validator. Defaults to CRC32C
iam
Get and set IAM policies for your bucket.
- Mixes In:
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Get the IAM policy for your bucket.
//-
bucket.iam.getPolicy(function(err, policy) {
console.log(policy);
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.iam.getPolicy().then(function(data) {
const policy = data[0];
const apiResponse = data[1];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function viewBucketIamMembers() {
// For more information please read:
// https://cloud.google.com/storage/docs/access-control/iam
const results = await storage
.bucket(bucketName)
.iam.getPolicy({requestedPolicyVersion: 3});
const bindings = results[0].bindings;
console.log(`Bindings for bucket ${bucketName}:`);
for (const binding of bindings) {
console.log(` Role: ${binding.role}`);
console.log(' Members:');
const members = binding.members;
for (const member of members) {
console.log(` ${member}`);
}
const condition = binding.condition;
if (condition) {
console.log(' Condition:');
console.log(` Title: ${condition.title}`);
console.log(` Description: ${condition.description}`);
console.log(` Expression: ${condition.expression}`);
}
}
}
viewBucketIamMembers().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The role to grant
// const roleName = 'roles/storage.objectViewer';
// The members to grant the new role to
// const members = [
// 'user:jdoe@example.com',
// 'group:admins@example.com',
// ];
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function addBucketIamMember() {
// Get a reference to a Google Cloud Storage bucket
const bucket = storage.bucket(bucketName);
// For more information please read:
// https://cloud.google.com/storage/docs/access-control/iam
const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3});
// Adds the new roles to the bucket's IAM policy
policy.bindings.push({
role: roleName,
members: members,
});
// Updates the bucket's IAM policy
await bucket.iam.setPolicy(policy);
console.log(
`Added the following member(s) with role ${roleName} to ${bucketName}:`
);
members.forEach(member => {
console.log(` ${member}`);
});
}
addBucketIamMember().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The role to revoke
// const roleName = 'roles/storage.objectViewer';
// The members to revoke the roles from
// const members = [
// 'user:jdoe@example.com',
// 'group:admins@example.com',
// ];
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function removeBucketIamMember() {
// Get a reference to a Google Cloud Storage bucket
const bucket = storage.bucket(bucketName);
// For more information please read:
// https://cloud.google.com/storage/docs/access-control/iam
const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3});
// Finds and updates the appropriate role-member group, without a condition.
const index = policy.bindings.findIndex(
binding => binding.role === roleName && !binding.condition
);
const role = policy.bindings[index];
if (role) {
role.members = role.members.filter(
member => members.indexOf(member) === -1
);
// Updates the policy object with the new (or empty) role-member group
if (role.members.length === 0) {
policy.bindings.splice(index, 1);
} else {
policy.bindings.index = role;
}
// Updates the bucket's IAM policy
await bucket.iam.setPolicy(policy);
} else {
// No matching role-member group(s) were found
throw new Error('No matching role-member group(s) found.');
}
console.log(
`Removed the following member(s) with role ${roleName} from ${bucketName}:`
);
members.forEach(member => {
console.log(` ${member}`);
});
}
removeBucketIamMember().catch(console.error);
metadata
The API-formatted resource description of the bucket.
Note: This is not guaranteed to be up-to-date when accessed. To get the
latest record, call the getMetadata()
method.
name
The bucket's name.
Methods
addLifecycleRule(rule, optionsopt, callbackopt) → {Promise.<SetBucketMetadataResponse>}
Add an object lifecycle management rule to the bucket.
By default, an Object Lifecycle Management rule provided to this method
will be included to the existing policy. To replace all existing rules,
supply the options
argument, setting append
to false
.
To add multiple rules, pass a list to the rule
parameter. Calling this
function multiple times asynchronously does not guarantee that all rules
are added correctly.
See Object Lifecycle Management See Buckets: patch API Documentation
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
rule |
LifecycleRule | Array.<LifecycleRule> |
The new lifecycle rule or rules to be added to objects in this bucket. Properties
|
|||||||||||||||||
options |
AddLifecycleRuleOptions |
<optional> |
Configuration object. Properties
|
||||||||||||||||
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Automatically have an object deleted from this bucket once it is 3 years
// of age.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
age: 365 * 3 // Specified in days.
}
}, function(err, apiResponse) {
if (err) {
// Error handling omitted.
}
const lifecycleRules = bucket.metadata.lifecycle.rule;
// Iterate over the Object Lifecycle Management rules on this bucket.
lifecycleRules.forEach(lifecycleRule => {});
});
//-
// By default, the rule you provide will be added to the existing policy.
// Optionally, you can disable this behavior to replace all of the
// pre-existing rules.
//-
const options = {
append: false
};
bucket.addLifecycleRule({
action: 'delete',
condition: {
age: 365 * 3 // Specified in days.
}
}, options, function(err, apiResponse) {
if (err) {
// Error handling omitted.
}
// All rules have been replaced with the new "delete" rule.
// Iterate over the Object Lifecycle Management rules on this bucket.
lifecycleRules.forEach(lifecycleRule => {});
});
//-
// For objects created before 2018, "downgrade" the storage class.
//-
bucket.addLifecycleRule({
action: 'setStorageClass',
storageClass: 'COLDLINE',
condition: {
createdBefore: new Date('2018')
}
}, function(err, apiResponse) {});
//-
// Delete objects created before 2016 which have the Coldline storage
// class.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
matchesStorageClass: [
'COLDLINE'
],
createdBefore: new Date('2016')
}
}, function(err, apiResponse) {});
//-
// Delete object that has a noncurrent timestamp that is at least 100 days.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
daysSinceNoncurrentTime: 100
}
}, function(err, apiResponse) {});
//-
// Delete object that has a noncurrent timestamp before 2020-01-01.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
noncurrentTimeBefore: new Date('2020-01-01')
}
}, function(err, apiResponse) {});
//-
// Delete object that has a customTime that is at least 100 days.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
daysSinceCustomTime: 100
}
}, function(err, apiResponse) ());
//-
// Delete object that has a customTime before 2020-01-01.
//-
bucket.addLifecycleRule({
action: 'delete',
condition: {
customTimeBefore: new Date('2020-01-01')
}
}, function(err, apiResponse) {});
```
combine(sources, destination, optionsopt, callbackopt) → {Promise.<CombineResponse>}
Combine multiple files into one new file.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
sources |
Array.<string> | Array.<File> |
The source files that will be combined. |
|||||||||||||
destination |
string | File |
The file you would like the source files combined into. |
|||||||||||||
options |
CombineOptions |
<optional> |
Configuration options. Properties
|
||||||||||||
callback |
CombineCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<CombineResponse> |
Throws:
-
-
if a non-array is provided as sources argument.
- Type
- Error
-
-
-
if no sources are provided.
- Type
- Error
-
-
-
if no destination is provided.
- Type
- Error
-
Example
```
const logBucket = storage.bucket('log-bucket');
const sources = [
logBucket.file('2013-logs.txt'),
logBucket.file('2014-logs.txt')
];
const allLogs = logBucket.file('all-logs.txt');
logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) {
// newFile === allLogs
});
//-
// If the callback is omitted, we'll return a Promise.
//-
logBucket.combine(sources, allLogs).then(function(data) {
const newFile = data[0];
const apiResponse = data[1];
});
```
create(metadataopt, callbackopt) → {Promise.<CreateBucketResponse>}
Create a bucket.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
metadata |
CreateBucketRequest |
<optional> |
Metadata to set for the bucket. |
callback |
CreateBucketCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<CreateBucketResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.create(function(err, bucket, apiResponse) {
if (!err) {
// The bucket was created successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.create().then(function(data) {
const bucket = data[0];
const apiResponse = data[1];
});
```
createChannel(id, config, optionsopt, callbackopt) → {Promise.<CreateChannelResponse>}
Create a channel that will be notified when objects in this bucket changes.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
id |
string |
The ID of the channel to create. |
||||||||||||||||||||||||||||||||||||||||||||||
config |
CreateChannelConfig |
Configuration for creating channel. Properties
|
||||||||||||||||||||||||||||||||||||||||||||||
options |
CreateChannelOptions |
<optional> |
Configuration options. Properties
|
|||||||||||||||||||||||||||||||||||||||||||||
callback |
CreateChannelCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<CreateChannelResponse> |
Throws:
-
-
If an ID is not provided.
- Type
- Error
-
-
-
If an address is not provided.
- Type
- Error
-
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
const id = 'new-channel-id';
const config = {
address: 'https://...'
};
bucket.createChannel(id, config, function(err, channel, apiResponse) {
if (!err) {
// Channel created successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.createChannel(id, config).then(function(data) {
const channel = data[0];
const apiResponse = data[1];
});
```
createNotification(topic, optionsopt, callbackopt) → {Promise.<CreateNotificationResponse>}
Creates a notification subscription for the bucket.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
topic |
Topic | string |
The Cloud PubSub topic to which this subscription publishes. If the project ID is omitted, the current project ID will be used. Acceptable formats are:
|
|||||||||||||||||||||||||
options |
CreateNotificationOptions |
<optional> |
Metadata to set for the notification. Properties
|
||||||||||||||||||||||||
callback |
CreateNotificationCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<CreateNotificationResponse> |
- See:
Throws:
-
If a valid topic is not provided.
- Type
- Error
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const callback = function(err, notification, apiResponse) {
if (!err) {
// The notification was created successfully.
}
};
myBucket.createNotification('my-topic', callback);
//-
// Configure the nofiication by providing Notification metadata.
//-
const metadata = {
objectNamePrefix: 'prefix-'
};
myBucket.createNotification('my-topic', metadata, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
myBucket.createNotification('my-topic').then(function(data) {
const notification = data[0];
const apiResponse = data[1];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The name of a topic
// const topic = 'my-topic';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function createNotification() {
// Creates a notification
await storage.bucket(bucketName).createNotification(topic);
console.log('Notification subscription created.');
}
createNotification().catch(console.error);
delete(optionsopt, callbackopt) → {Promise.<DeleteBucketResponse>}
Delete the bucket.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
DeleteBucketOptions |
<optional> |
Configuration options. Properties
|
|||||||||||||||
callback |
DeleteBucketCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<DeleteBucketResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.delete(function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.delete().then(function(data) {
const apiResponse = data[0];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function deleteBucket() {
await storage.bucket(bucketName).delete();
console.log(`Bucket ${bucketName} deleted`);
}
deleteBucket().catch(console.error);
deleteFiles(queryopt, callbackopt) → {Promise}
Iterate over the bucket's files, calling file.delete()
on each.
This is not an atomic request. A delete attempt will be made for each file individually. Any one can fail, in which case only a portion of the files you intended to be deleted would have.
Operations are performed in parallel, up to 10 at once. The first error
breaks the loop and will execute the provided callback with it. Specify
{ force: true }
to suppress the errors until all files have had a chance
to be processed.
File preconditions cannot be passed to this function. It will not retry unless the idempotency strategy is set to retry always.
The query
object passed as the first argument will also be passed to
Bucket#getFiles.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
query |
DeleteFilesOptions |
<optional> |
Query object. See Bucket#getFiles Properties
|
||||||||
callback |
DeleteFilesCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Delete all of the files in the bucket.
//-
bucket.deleteFiles(function(err) {});
//-
// By default, if a file cannot be deleted, this method will stop deleting
// files from your bucket. You can override this setting with `force:
// true`.
//-
bucket.deleteFiles({
force: true
}, function(errors) {
// `errors`:
// Array of errors if any occurred, otherwise null.
});
//-
// The first argument to this method acts as a query to
// Bucket#getFiles. As an example, you can delete files
// which match a prefix.
//-
bucket.deleteFiles({
prefix: 'images/'
}, function(err) {
if (!err) {
// All files in the `images` directory have been deleted.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.deleteFiles().then(function() {});
```
deleteLabels(labelsopt, callbackopt, optionsopt) → {Promise.<DeleteLabelsResponse>}
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
labels |
string | Array.<string> |
<optional> |
The labels to delete. If no labels are provided, all of the labels are removed. |
callback |
DeleteLabelsCallback |
<optional> |
Callback function. |
options |
DeleteLabelsOptions |
<optional> |
Options, including precondition options |
Returns:
Type | Description |
---|---|
Promise.<DeleteLabelsResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Delete all of the labels from this bucket.
//-
bucket.deleteLabels(function(err, apiResponse) {});
//-
// Delete a single label.
//-
bucket.deleteLabels('labelone', function(err, apiResponse) {});
//-
// Delete a specific set of labels.
//-
bucket.deleteLabels([
'labelone',
'labeltwo'
], function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.deleteLabels().then(function(data) {
const apiResponse = data[0];
});
```
disableRequesterPays(callbackopt, optionsopt) → {Promise.<DisableRequesterPaysCallback>}
This feature is not yet widely-available.
Disable requesterPays
functionality from this bucket.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
callback |
DisableRequesterPaysCallback |
<optional> |
Callback function. |
options |
DisableRequesterPaysOptions |
<optional> |
Options, including precondition options |
Returns:
Type | Description |
---|---|
Promise.<DisableRequesterPaysCallback> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.disableRequesterPays(function(err, apiResponse) {
if (!err) {
// requesterPays functionality disabled successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.disableRequesterPays().then(function(data) {
const apiResponse = data[0];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function disableRequesterPays() {
// Disables requester-pays requests
await storage.bucket(bucketName).disableRequesterPays();
console.log(
`Requester-pays requests have been disabled for bucket ${bucketName}`
);
}
disableRequesterPays().catch(console.error);
enableLogging(config, callbackopt) → {Promise.<SetBucketMetadataResponse>}
Enable logging functionality for this bucket. This will make two API requests, first to grant Cloud Storage WRITE permission to the bucket, then to set the appropriate configuration on the Bucket's metadata.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
config |
EnableLoggingOptions |
Configuration options. Properties
|
|||||||||||||
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
const config = {
prefix: 'log'
};
bucket.enableLogging(config, function(err, apiResponse) {
if (!err) {
// Logging functionality enabled successfully.
}
});
```
Optionally, provide a destination bucket.
```
const config = {
prefix: 'log',
bucket: 'destination-bucket'
};
bucket.enableLogging(config, function(err, apiResponse) {});
```
If the callback is omitted, we'll return a Promise.
```
bucket.enableLogging(config).then(function(data) {
const apiResponse = data[0];
});
```
enableRequesterPays(optionsOrCallbackopt) → {Promise.<EnableRequesterPaysResponse>}
This feature is not yet widely-available.
Enable requesterPays
functionality for this bucket. This enables you, the
bucket owner, to have the requesting user assume the charges for the access
to your bucket and its contents.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
optionsOrCallback |
EnableRequesterPaysCallback | EnableRequesterPaysOptions |
<optional> |
Callback function or precondition options. |
Returns:
Type | Description |
---|---|
Promise.<EnableRequesterPaysResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.enableRequesterPays(function(err, apiResponse) {
if (!err) {
// requesterPays functionality enabled successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.enableRequesterPays().then(function(data) {
const apiResponse = data[0];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function enableRequesterPays() {
await storage.bucket(bucketName).enableRequesterPays();
console.log(
`Requester-pays requests have been enabled for bucket ${bucketName}`
);
}
enableRequesterPays().catch(console.error);
exists(optionsopt, callbackopt) → {Promise.<BucketExistsResponse>}
Check if the bucket exists.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
BucketExistsOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
BucketExistsCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<BucketExistsResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.exists(function(err, exists) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.exists().then(function(data) {
const exists = data[0];
});
```
file(name, optionsopt) → {File}
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
name |
string |
The name of the file in this bucket. |
|||||||||||||||||||||
options |
FileOptions |
<optional> |
Configuration options. Properties
|
Returns:
Type | Description |
---|---|
File |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
const file = bucket.file('my-existing-file.png');
```
get(optionsopt, callbackopt) → {Promise.<GetBucketResponse>}
Get a bucket if it exists.
You may optionally use this to "get or create" an object by providing
an object with autoCreate
set to true
. Any extra configuration that
is normally required for the create
method must be contained within
this object as well.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
GetBucketOptions |
<optional> |
Configuration options. Properties
|
||||||||||||
callback |
GetBucketCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetBucketResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.get(function(err, bucket, apiResponse) {
// `bucket.metadata` has been populated.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.get().then(function(data) {
const bucket = data[0];
const apiResponse = data[1];
});
```
getFiles(queryopt, callbackopt) → {Promise.<GetFilesResponse>}
Get File objects for the files currently in the bucket.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
query |
GetFilesOptions |
<optional> |
Query object for listing files. Properties
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
callback |
GetFilesCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetFilesResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.getFiles(function(err, files) {
if (!err) {
// files is an array of File objects.
}
});
//-
// If your bucket has versioning enabled, you can get all of your files
// scoped to their generation.
//-
bucket.getFiles({
versions: true
}, function(err, files) {
// Each file is scoped to its generation.
});
//-
// To control how many API requests are made and page through the results
// manually, set `autoPaginate` to `false`.
//-
const callback = function(err, files, nextQuery, apiResponse) {
if (nextQuery) {
// More results exist.
bucket.getFiles(nextQuery, callback);
}
// The `metadata` property is populated for you with the metadata at the
// time of fetching.
files[0].metadata;
// However, in cases where you are concerned the metadata could have
// changed, use the `getMetadata` method.
files[0].getMetadata(function(err, metadata) {});
};
bucket.getFiles({
autoPaginate: false
}, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getFiles().then(function(data) {
const files = data[0];
});
```
<h6>Simulating a File System</h6><p>With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.</p><p>Consider the following remote objects:</p><ol><li>"a"</li><li>"a/b/c/d"</li><li>"b/d/e"</li></ol><p>Using a delimiter of `/` will return a single file, "a".</p><p>`apiResponse.prefixes` will return the "sub-directories" that were found:</p><ol><li>"a/"</li><li>"b/"</li></ol>
```
bucket.getFiles({
autoPaginate: false,
delimiter: '/'
}, function(err, files, nextQuery, apiResponse) {
// files = [
// {File} // File object for file "a"
// ]
// apiResponse.prefixes = [
// 'a/',
// 'b/'
// ]
});
```
Using prefixes, it's now possible to simulate a file system with follow-up requests.
```
bucket.getFiles({
autoPaginate: false,
delimiter: '/',
prefix: 'a/'
}, function(err, files, nextQuery, apiResponse) {
// No files found within "directory" a.
// files = []
// However, a "sub-directory" was found.
// This prefix can be used to continue traversing the "file system".
// apiResponse.prefixes = [
// 'a/b/'
// ]
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function listFiles() {
// Lists files in the bucket
const [files] = await storage.bucket(bucketName).getFiles();
console.log('Files:');
files.forEach(file => {
console.log(file.name);
});
}
listFiles().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The directory prefix to search for
// const prefix = 'myDirectory/';
// The delimiter to use
// const delimiter = '/';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function listFilesByPrefix() {
/**
* This can be used to list all blobs in a "folder", e.g. "public/".
*
* The delimiter argument can be used to restrict the results to only the
* "files" in the given "folder". Without the delimiter, the entire tree under
* the prefix is returned. For example, given these blobs:
*
* /a/1.txt
* /a/b/2.txt
*
* If you just specify prefix = 'a/', you'll get back:
*
* /a/1.txt
* /a/b/2.txt
*
* However, if you specify prefix='a/' and delimiter='/', you'll get back:
*
* /a/1.txt
*/
const options = {
prefix: prefix,
};
if (delimiter) {
options.delimiter = delimiter;
}
// Lists files in the bucket, filtered by a prefix
const [files] = await storage.bucket(bucketName).getFiles(options);
console.log('Files:');
files.forEach(file => {
console.log(file.name);
});
}
listFilesByPrefix().catch(console.error);
getFilesStream(queryopt) → {ReadableStream}
Get File objects for the files currently in the bucket as a readable object stream.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
query |
GetFilesOptions |
<optional> |
Query object for listing files. |
Returns:
Type | Description |
---|---|
ReadableStream |
A readable stream that emits File instances. |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.getFilesStream()
.on('error', console.error)
.on('data', function(file) {
// file is a File object.
})
.on('end', function() {
// All files retrieved.
});
//-
// If you anticipate many results, you can end a stream early to prevent
// unnecessary processing and API requests.
//-
bucket.getFilesStream()
.on('data', function(file) {
this.end();
});
//-
// If you're filtering files with a delimiter, you should use
// Bucket#getFiles and set `autoPaginate: false` in order to
// preserve the `apiResponse` argument.
//-
const prefixes = [];
function callback(err, files, nextQuery, apiResponse) {
prefixes = prefixes.concat(apiResponse.prefixes);
if (nextQuery) {
bucket.getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
}
}
bucket.getFiles({
autoPaginate: false,
delimiter: '/'
}, callback);
```
getLabels(optionsopt, callbackopt) → {Promise.<GetLabelsCallback>}
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
Configuration options. Properties
|
||||||||
callback |
GetLabelsCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetLabelsCallback> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.getLabels(function(err, labels) {
if (err) {
// Error handling omitted.
}
// labels = {
// label: 'labelValue',
// ...
// }
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getLabels().then(function(data) {
const labels = data[0];
});
```
getMetadata(optionsopt, callbackopt) → {Promise.<GetBucketMetadataResponse>}
Get the bucket's metadata.
To set metadata, see Bucket#setMetadata.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
GetBucketMetadataOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
GetBucketMetadataCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetBucketMetadataResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.getMetadata(function(err, metadata, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getMetadata().then(function(data) {
const metadata = data[0];
const apiResponse = data[1];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function getRequesterPaysStatus() {
// Gets the requester-pays status of a bucket
const [metadata] = await storage.bucket(bucketName).getMetadata();
let status;
if (metadata && metadata.billing && metadata.billing.requesterPays) {
status = 'enabled';
} else {
status = 'disabled';
}
console.log(
`Requester-pays requests are ${status} for bucket ${bucketName}.`
);
}
getRequesterPaysStatus().catch(console.error);
getNotifications(optionsopt, callbackopt) → {Promise.<GetNotificationsResponse>}
Retrieves a list of notification subscriptions for a given bucket.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
GetNotificationsOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
GetNotificationsCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetNotificationsResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
bucket.getNotifications(function(err, notifications, apiResponse) {
if (!err) {
// notifications is an array of Notification objects.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getNotifications().then(function(data) {
const notifications = data[0];
const apiResponse = data[1];
});
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function listNotifications() {
// Lists notifications in the bucket
const [notifications] = await storage.bucket(bucketName).getNotifications();
console.log('Notifications:');
notifications.forEach(notification => {
console.log(notification.id);
});
}
listNotifications().catch(console.error);
getSignedUrl(config, callbackopt) → {Promise.<GetSignedUrlResponse>}
Get a signed URL to allow limited time access to a bucket.
In Google Cloud Platform environments, such as Cloud Functions and App
Engine, you usually don't provide a keyFilename
or credentials
during
instantiation. In those environments, we call the
signBlob API
to create a signed URL. That API requires either the
https://www.googleapis.com/auth/iam
or
https://www.googleapis.com/auth/cloud-platform
scope, so be sure they are
enabled.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
config |
GetBucketSignedUrlConfig |
Configuration object. Properties
|
||||||||||||||||||||||||||||||||||||
callback |
GetSignedUrlCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<GetSignedUrlResponse> |
Properties:
Name | Type | Attributes | Description |
---|---|---|---|
config.queryParams |
object |
<optional> |
Additional query parameters to include in the signed URL. |
Throws:
-
if an expiration timestamp from the past is given.
- Type
- Error
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
//-
// Generate a URL that allows temporary access to list files in a bucket.
//-
const request = require('request');
const config = {
action: 'list',
expires: '03-17-2025'
};
bucket.getSignedUrl(config, function(err, url) {
if (err) {
console.error(err);
return;
}
// The bucket is now available to be listed from this URL.
request(url, function(err, resp) {
// resp.statusCode = 200
});
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getSignedUrl(config).then(function(data) {
const url = data[0];
});
```
lock(metageneration, callbackopt) → {Promise.<BucketLockResponse>}
Lock a previously-defined retention policy. This will prevent changes to the policy.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
metageneration |
number | string |
The bucket's metageneration. This is accesssible from calling File#getMetadata. |
|
callback |
BucketLockCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<BucketLockResponse> |
Throws:
-
if a metageneration is not provided.
- Type
- Error
Example
```
const storage = require('@google-cloud/storage')();
const bucket = storage.bucket('albums');
const metageneration = 2;
bucket.lock(metageneration, function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.lock(metageneration).then(function(data) {
const apiResponse = data[0];
});
```
makePrivate(optionsopt, callbackopt) → {Promise.<MakeBucketPrivateResponse>}
Make the bucket listing private.
You may also choose to make the contents of the bucket private by
specifying includeFiles: true
. This will automatically run
File#makePrivate for every file in the bucket.
When specifying includeFiles: true
, use force: true
to delay execution
of your callback until all files have been processed. By default, the
callback is executed after the first error. Use force
to queue such
errors until all files have been processed, after which they will be
returned as an array as the first argument to your callback.
NOTE: This may cause the process to be long-running and use a high number of requests. Use with caution.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
MakeBucketPrivateOptions |
<optional> |
Configuration options. Properties
|
|||||||||||||||||||||||||
callback |
MakeBucketPrivateCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<MakeBucketPrivateResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Make the bucket private.
//-
bucket.makePrivate(function(err) {});
//-
// Make the bucket and its contents private.
//-
const opts = {
includeFiles: true
};
bucket.makePrivate(opts, function(err, files) {
// `err`:
// The first error to occur, otherwise null.
//
// `files`:
// Array of files successfully made private in the bucket.
});
//-
// Make the bucket and its contents private, using force to suppress errors
// until all files have been processed.
//-
const opts = {
includeFiles: true,
force: true
};
bucket.makePrivate(opts, function(errors, files) {
// `errors`:
// Array of errors if any occurred, otherwise null.
//
// `files`:
// Array of files successfully made private in the bucket.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.makePrivate(opts).then(function(data) {
const files = data[0];
});
```
makePublic(optionsopt, callbackopt) → {Promise.<MakeBucketPublicResponse>}
Make the bucket publicly readable.
You may also choose to make the contents of the bucket publicly readable by
specifying includeFiles: true
. This will automatically run
File#makePublic for every file in the bucket.
When specifying includeFiles: true
, use force: true
to delay execution
of your callback until all files have been processed. By default, the
callback is executed after the first error. Use force
to queue such
errors until all files have been processed, after which they will be
returned as an array as the first argument to your callback.
NOTE: This may cause the process to be long-running and use a high number of requests. Use with caution.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
MakeBucketPublicOptions |
<optional> |
Configuration options. Properties
|
|||||||||||||||
callback |
MakeBucketPublicCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<MakeBucketPublicResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Make the bucket publicly readable.
//-
bucket.makePublic(function(err) {});
//-
// Make the bucket and its contents publicly readable.
//-
const opts = {
includeFiles: true
};
bucket.makePublic(opts, function(err, files) {
// `err`:
// The first error to occur, otherwise null.
//
// `files`:
// Array of files successfully made public in the bucket.
});
//-
// Make the bucket and its contents publicly readable, using force to
// suppress errors until all files have been processed.
//-
const opts = {
includeFiles: true,
force: true
};
bucket.makePublic(opts, function(errors, files) {
// `errors`:
// Array of errors if any occurred, otherwise null.
//
// `files`:
// Array of files successfully made public in the bucket.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.makePublic(opts).then(function(data) {
const files = data[0];
});
```
notification(id) → {Notification}
Get a reference to a Cloud Pub/Sub Notification.
Parameters:
Name | Type | Description |
---|---|---|
id |
string |
ID of notification. |
Returns:
Type | Description |
---|---|
Notification |
- See:
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
const notification = bucket.notification('1');
```
removeRetentionPeriod(callbackopt, optionsopt) → {Promise.<SetBucketMetadataResponse>}
Remove an already-existing retention policy from this bucket, if it is not locked.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
options |
SetBucketMetadataOptions |
<optional> |
Options, including precondition options |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Example
```
const storage = require('@google-cloud/storage')();
const bucket = storage.bucket('albums');
bucket.removeRetentionPeriod(function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.removeRetentionPeriod().then(function(data) {
const apiResponse = data[0];
});
```
setCorsConfiguration(corsConfiguration, callbackopt, optionsopt) → {Promise.<SetBucketMetadataResponse>}
This can be used to set the CORS configuration on the bucket.
The configuration will be overwritten with the value passed into this.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
corsConfiguration |
Array.<Cors> |
The new CORS configuration to set Properties
|
|||||||||||||||||||||
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
||||||||||||||||||||
options |
SetBucketMetadataOptions |
<optional> |
Options, including precondition options. |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Example
```
const storage = require('@google-cloud/storage')();
const bucket = storage.bucket('albums');
const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour
bucket.setCorsConfiguration(corsConfiguration);
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.setCorsConfiguration(corsConfiguration).then(function(data) {
const apiResponse = data[0];
});
```
setLabels(labels, optionsopt, callbackopt) → {Promise.<SetLabelsResponse>}
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
labels |
object.<string, string> |
Labels to set on the bucket. |
|||||||||
options |
SetLabelsOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
SetLabelsCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<SetLabelsResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
const labels = {
labelone: 'labelonevalue',
labeltwo: 'labeltwovalue'
};
bucket.setLabels(labels, function(err, metadata) {
if (!err) {
// Labels set successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.setLabels(labels).then(function(data) {
const metadata = data[0];
});
```
setMetadata(metadata, optionsopt, callbackopt) → {Promise.<SetBucketMetadataResponse>}
Set the bucket's metadata.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
metadata |
object.<string, *> |
The metadata you wish to set. |
|||||||||
options |
SetBucketMetadataOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Set website metadata field on the bucket.
//-
const metadata = {
website: {
mainPageSuffix: 'http://example.com',
notFoundPage: 'http://example.com/404.html'
}
};
bucket.setMetadata(metadata, function(err, apiResponse) {});
//-
// Enable versioning for your bucket.
//-
bucket.setMetadata({
versioning: {
enabled: true
}
}, function(err, apiResponse) {});
//-
// Enable KMS encryption for objects within this bucket.
//-
bucket.setMetadata({
encryption: {
defaultKmsKeyName: 'projects/grape-spaceship-123/...'
}
}, function(err, apiResponse) {});
//-
// Set the default event-based hold value for new objects in this
// bucket.
//-
bucket.setMetadata({
defaultEventBasedHold: true
}, function(err, apiResponse) {});
//-
// Remove object lifecycle rules.
//-
bucket.setMetadata({
lifecycle: null
}, function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.setMetadata(metadata).then(function(data) {
const apiResponse = data[0];
});
```
setRetentionPeriod(duration, callbackopt, optionsopt) → {Promise.<SetBucketMetadataResponse>}
Lock all objects contained in the bucket, based on their creation time. Any
attempt to overwrite or delete objects younger than the retention period
will result in a PERMISSION_DENIED
error.
An unlocked retention policy can be modified or removed from the bucket via
File#removeRetentionPeriod and File#setRetentionPeriod. A
locked retention policy cannot be removed or shortened in duration for the
lifetime of the bucket. Attempting to remove or decrease period of a locked
retention policy will result in a PERMISSION_DENIED
error. You can still
increase the policy.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
duration |
* |
In seconds, the minimum retention time for all objects contained in this bucket. |
|
callback |
SetBucketMetadataCallback |
<optional> |
Callback function. |
options |
SetBucketMetadataCallback |
<optional> |
Options, including precondition options. |
Returns:
Type | Description |
---|---|
Promise.<SetBucketMetadataResponse> |
Example
```
const storage = require('@google-cloud/storage')();
const bucket = storage.bucket('albums');
const DURATION_SECONDS = 15780000; // 6 months.
//-
// Lock the objects in this bucket for 6 months.
//-
bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) {
const apiResponse = data[0];
});
```
setStorageClass(storageClass, optionsopt, callbackopt) → {Promise}
Set the default storage class for new files in this bucket.
See Storage Classes
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
storageClass |
string |
The new storage class. ( |
|||||||||
options |
object |
<optional> |
Configuration options. Properties
|
||||||||
callback |
SetStorageClassCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.setStorageClass('nearline', function(err, apiResponse) {
if (err) {
// Error handling omitted.
}
// The storage class was updated successfully.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.setStorageClass('nearline').then(function() {});
```
setUserProject(userProject)
Set a user project to be billed for all requests made from this Bucket object and any files referenced from this Bucket object.
Parameters:
Name | Type | Description |
---|---|---|
userProject |
string |
The user project. |
Example
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
bucket.setUserProject('grape-spaceship-123');
```
upload(pathString, optionsopt, callbackopt) → {Promise.<UploadResponse>}
Upload a file to the bucket. This is a convenience method that wraps File#createWriteStream.
Resumable uploads are enabled by default
See Upload Options (Simple or Resumable) See Objects: insert API Documentation
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
pathString |
string |
The fully qualified path to the file you wish to upload to your bucket. |
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
options |
UploadOptions |
<optional> |
Configuration options. Properties
|
|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
callback |
UploadCallback |
<optional> |
Callback function. |
Returns:
Type | Description |
---|---|
Promise.<UploadResponse> |
Examples
```
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('albums');
//-
// Upload a file from a local path.
//-
bucket.upload('/local/path/image.png', function(err, file, apiResponse) {
// Your bucket now contains:
// - "image.png" (with the contents of `/local/path/image.png')
// `file` is an instance of a File object that refers to your new file.
});
//-
// It's not always that easy. You will likely want to specify the filename
// used when your new file lands in your bucket.
//
// You may also want to set metadata or customize other options.
//-
const options = {
destination: 'new-image.png',
validation: 'crc32c',
metadata: {
metadata: {
event: 'Fall trip to the zoo'
}
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});
//-
// You can also have a file gzip'd on the fly.
//-
bucket.upload('index.html', { gzip: true }, function(err, file) {
// Your bucket now contains:
// - "index.html" (automatically compressed with gzip)
// Downloading the file with `file.download` will automatically decode
the
// file.
});
//-
// You may also re-use a File object, {File}, that references
// the file you wish to create or overwrite.
//-
const options = {
destination: bucket.file('existing-file.png'),
resumable: false
};
bucket.upload('local-img.png', options, function(err, newFile) {
// Your bucket now contains:
// - "existing-file.png" (with the contents of `local-img.png')
// Note:
// The `newFile` parameter is equal to `file`.
});
//-
// To use
// <a
href="https://cloud.google.com/storage/docs/encryption#customer-supplied">
// Customer-supplied Encryption Keys</a>, provide the `encryptionKey`
option.
//-
const crypto = require('crypto');
const encryptionKey = crypto.randomBytes(32);
bucket.upload('img.png', {
encryptionKey: encryptionKey
}, function(err, newFile) {
// `img.png` was uploaded with your custom encryption key.
// `newFile` is already configured to use the encryption key when making
// operations on the remote object.
// However, to use your encryption key later, you must create a `File`
// instance with the `key` supplied:
const file = bucket.file('img.png', {
encryptionKey: encryptionKey
});
// Or with `file#setEncryptionKey`:
const file = bucket.file('img.png');
file.setEncryptionKey(encryptionKey);
});
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.upload('local-image.png').then(function(data) {
const file = data[0];
});
To upload a file from a URL, use File#createWriteStream.
```
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The path to your file to upload
// const filePath = 'path/to/your/file';
// The new ID for your GCS file
// const destFileName = 'your-new-file-name';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function uploadFile() {
const options = {
destination: destFileName,
// Optional:
// Set a generation-match precondition to avoid potential race conditions
// and data corruptions. The request to upload is aborted if the object's
// generation number does not match your precondition. For a destination
// object that does not yet exist, set the ifGenerationMatch precondition to 0
// If the destination object already exists in your bucket, set instead a
// generation-match precondition using its generation number.
preconditionOpts: {ifGenerationMatch: generationMatchPrecondition},
};
await storage.bucket(bucketName).upload(filePath, options);
console.log(`${filePath} uploaded to ${bucketName}`);
}
uploadFile().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const bucketName = 'your-unique-bucket-name';
// The path to your file to upload
// const filePath = 'path/to/your/file';
// The new ID for your GCS file
// const destFileName = 'your-new-file-name';
// The key to encrypt the object with
// const key = 'TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g=';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function uploadEncryptedFile() {
const options = {
destination: destFileName,
encryptionKey: Buffer.from(key, 'base64'),
// Optional:
// Set a generation-match precondition to avoid potential race conditions
// and data corruptions. The request to upload is aborted if the object's
// generation number does not match your precondition. For a destination
// object that does not yet exist, set the ifGenerationMatch precondition to 0
// If the destination object already exists in your bucket, set instead a
// generation-match precondition using its generation number.
preconditionOpts: {ifGenerationMatch: generationMatchPrecondition},
};
await storage.bucket(bucketName).upload(filePath, options);
console.log(
`File ${filePath} uploaded to gs://${bucketName}/${destFileName}`
);
}
uploadEncryptedFile().catch(console.error);