Constructor
new File(bucket, name, optionsopt)
Constructs a file object.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
bucket |
Bucket |
The Bucket instance this file is attached to. |
|
name |
string |
The name of the remote file. |
|
options |
FileOptions |
<optional> |
Configuration options. |
Methods
copy(destination, optionsopt, callbackopt) → {Promise.<CopyResponse>}
Copy this file to another file. By default, this will copy the file to the same bucket, but you can choose to copy it to another Bucket by providing a Bucket or File object or a URL starting with "gs://".
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
destination |
string | Bucket | File |
Destination file. |
|
options |
CopyOptions |
<optional> |
Configuration options. See an |
callback |
CopyCallback |
<optional> |
Callback function. |
Throws:
-
If the destination file is not provided.
- Type
- Error
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
//-
// You can pass in a variety of types for the destination.
//
// For all of the below examples, assume we are working with the following
// Bucket and File objects.
//-
const bucket = storage.bucket('my-bucket');
const file = bucket.file('my-image.png');
//-
// If you pass in a string for the destination, the file is copied to its
// current bucket, under the new name provided.
//-
file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {
// `my-bucket` now contains:
// - "my-image.png"
// - "my-image-copy.png"
// `copiedFile` is an instance of a File object that refers to your new
// file.
});
//-
// If you pass in a string starting with "gs://" for the destination, the
// file is copied to the other bucket and under the new name provided.
//-
const newLocation = 'gs://another-bucket/my-image-copy.png';
file.copy(newLocation, function(err, copiedFile, apiResponse) {
// `my-bucket` still contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image-copy.png"
// `copiedFile` is an instance of a File object that refers to your new
// file.
});
//-
// If you pass in a Bucket object, the file will be copied to that bucket
// using the same name.
//-
const anotherBucket = storage.bucket('another-bucket');
file.copy(anotherBucket, function(err, copiedFile, apiResponse) {
// `my-bucket` still contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image.png"
// `copiedFile` is an instance of a File object that refers to your new
// file.
});
//-
// If you pass in a File object, you have complete control over the new
// bucket and filename.
//-
const anotherFile = anotherBucket.file('my-awesome-image.png');
file.copy(anotherFile, function(err, copiedFile, apiResponse) {
// `my-bucket` still contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-awesome-image.png"
// Note:
// The `copiedFile` parameter is equal to `anotherFile`.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.copy(newLocation).then(function(data) {
const newFile = data[0];
const apiResponse = data[1];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const srcBucketName = 'Name of the source bucket, e.g. my-bucket';
// const srcFilename = 'Name of the source file, e.g. file.txt';
// const destBucketName = 'Name of the destination bucket, e.g. my-other-bucket';
// const destFilename = 'Destination name of file, e.g. file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function copyFile() {
// Copies the file to the other bucket
await storage
.bucket(srcBucketName)
.file(srcFilename)
.copy(storage.bucket(destBucketName).file(destFilename));
console.log(
`gs://${srcBucketName}/${srcFilename} copied to gs://${destBucketName}/${destFilename}.`
);
}
copyFile().catch(console.error);
createReadStream(optionsopt) → {ReadableStream}
Create a readable stream to read the contents of the remote file. It can be piped to a writable stream or listened to for 'data' events to read a file's contents.
In the unlikely event there is a mismatch between what you downloaded and the version in your Bucket, your error handler will receive an error with code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best recourse is to try downloading the file again.
For faster crc32c computation, you must manually install
fast-crc32c
:
$ npm install --save fast-crc32c
NOTE: Readable streams will emit the end
event when the file is fully
downloaded.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
options |
CreateReadStreamOptions |
<optional> |
Configuration options. |
Example
//-
// <h4>Downloading a File</h4>
//
// The example below demonstrates how we can reference a remote file, then
// pipe its contents to a local file. This is effectively creating a local
// backup of your remote data.
//-
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
const fs = require('fs');
const remoteFile = bucket.file('image.png');
const localFilename = '/Users/stephen/Photos/image.png';
remoteFile.createReadStream()
.on('error', function(err) {})
.on('response', function(response) {
// Server connected and responded with the specified status and headers.
})
.on('end', function() {
// The file is fully downloaded.
})
.pipe(fs.createWriteStream(localFilename));
//-
// To limit the downloaded data to only a byte range, pass an options
// object.
//-
const logFile = myBucket.file('access_log');
logFile.createReadStream({
start: 10000,
end: 20000
})
.on('error', function(err) {})
.pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
//-
// To read a tail byte range, specify only `options.end` as a negative
// number.
//-
const logFile = myBucket.file('access_log');
logFile.createReadStream({
end: -100
})
.on('error', function(err) {})
.pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
createResumableUpload(optionsopt, callbackopt) → {Promise.<CreateResumableUploadResponse>}
Create a unique resumable upload session URI. This is the first step when performing a resumable upload.
See the Resumable upload guide for more on how the entire process works.
Note
If you are just looking to perform a resumable upload without worrying about any of the details, see File#createWriteStream. Resumable uploads are performed by default.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
options |
CreateResumableUploadOptions |
<optional> |
Configuration options. |
callback |
CreateResumableUploadCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.createResumableUpload(function(err, uri) {
if (!err) {
// `uri` can be used to PUT data to.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.createResumableUpload().then(function(data) {
const uri = data[0];
});
createWriteStream(optionsopt) → {WritableStream}
Create a writable stream to overwrite the contents of the file in your bucket.
A File object can also be used to create files for the first time.
Resumable uploads are automatically enabled and must be shut off explicitly
by setting options.resumable
to false
.
Resumable uploads require write access to the $HOME directory. Through
config-store
, some metadata
is stored. By default, if the directory is not writable, we will fall back
to a simple upload. However, if you explicitly request a resumable upload,
and we cannot write to the config directory, we will return a
ResumableUploadError
.
There is some overhead when using a resumable upload that can cause noticeable performance degradation while uploading a series of small files. When uploading files less than 10MB, it is recommended that the resumable feature is disabled.
For faster crc32c computation, you must manually install
fast-crc32c
:
$ npm install --save fast-crc32c
NOTE: Writable streams will emit the finish
event when the file is fully
uploaded.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
options |
CreateWriteStreamOptions |
<optional> |
Configuration options. |
Example
const fs = require('fs');
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
//-
// <h4>Uploading a File</h4>
//
// Now, consider a case where we want to upload a file to your bucket. You
// have the option of using Bucket#upload, but that is just
// a convenience method which will do the following.
//-
fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
.pipe(file.createWriteStream())
.on('error', function(err) {})
.on('finish', function() {
// The file upload is complete.
});
//-
// <h4>Uploading a File with gzip compression</h4>
//-
fs.createReadStream('/Users/stephen/site/index.html')
.pipe(file.createWriteStream({ gzip: true }))
.on('error', function(err) {})
.on('finish', function() {
// The file upload is complete.
});
//-
// Downloading the file with `createReadStream` will automatically decode
// the file.
//-
//-
// <h4>Uploading a File with Metadata</h4>
//
// One last case you may run into is when you want to upload a file to your
// bucket and set its metadata at the same time. Like above, you can use
// Bucket#upload to do this, which is just a wrapper around
// the following.
//-
fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
.pipe(file.createWriteStream({
metadata: {
contentType: 'image/jpeg',
metadata: {
custom: 'metadata'
}
}
}))
.on('error', function(err) {})
.on('finish', function() {
// The file upload is complete.
});
delete(optionsopt, callbackopt) → {Promise.<DeleteFileResponse>}
Delete the file.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
Configuration options. Properties
|
||||||||
callback |
DeleteFileCallback |
<optional> |
Callback function. |
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.delete(function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.delete().then(function(data) {
const apiResponse = data[0];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to delete, e.g. file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function deleteFile() {
// Deletes the file from the bucket
await storage.bucket(bucketName).file(filename).delete();
console.log(`gs://${bucketName}/${filename} deleted.`);
}
deleteFile().catch(console.error);
deleteResumableCache()
Delete failed resumable upload file cache.
Resumable file upload cache the config file to restart upload in case of failure. In certain scenarios, the resumable upload will not works and upload file cache needs to be deleted to upload the same file.
Following are some of the scenarios.
Resumable file upload failed even though the file is successfully saved on the google storage and need to clean up a resumable file cache to update the same file.
Resumable file upload failed due to pre-condition (i.e generation number is not matched) and want to upload a same file with the new generation number.
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file', { generation: 0 });
const contents = 'This is the contents of the file.';
file.save(contents, function(err) {
if (err) {
file.deleteResumableCache();
}
});
download(optionsopt, callbackopt) → {Promise.<DownloadResponse>}
Convenience method to download a file into memory or to a local destination.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
Configuration options. The arguments match those passed to File#createReadStream. Properties
|
||||||||||||
callback |
DownloadCallback |
<optional> |
Callback function. |
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
//-
// Download a file into memory. The contents will be available as the
second
// argument in the demonstration below, `contents`.
//-
file.download(function(err, contents) {});
//-
// Download a file to a local destination.
//-
file.download({
destination: '/Users/me/Desktop/file-backup.txt'
}, function(err) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.download().then(function(data) {
const contents = data[0];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'Remote file to download, e.g. file.txt';
// const destFilename = 'Local destination for file, e.g. ./local/path/to/file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadFile() {
const options = {
// The path to which the file should be downloaded, e.g. "./file.txt"
destination: destFilename,
};
// Downloads the file
await storage.bucket(bucketName).file(srcFilename).download(options);
console.log(
`gs://${bucketName}/${srcFilename} downloaded to ${destFilename}.`
);
}
downloadFile().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'File to download, e.g. file_encrypted.txt';
// const destFilename = 'Local destination for file, e.g. ./file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadEncryptedFile() {
const options = {
// The path to which the file should be downloaded, e.g. "./file.txt"
destination: destFilename,
};
// Descrypts and downloads the file. This can only be done with the key used
// to encrypt and upload the file.
await storage
.bucket(bucketName)
.file(srcFilename)
.setEncryptionKey(Buffer.from(key, 'base64'))
.download(options);
console.log(`File ${srcFilename} downloaded to ${destFilename}.`);
}
downloadEncryptedFile().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const projectId = 'The project ID to bill from, e.g. some-project-id';
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'Name of file to download, e.g. file.txt';
// const destFilename = 'Local destination of file, e.g. ./local/path/to/file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadFileUsingRequesterPays() {
const options = {
// The path to which the file should be downloaded, e.g. "./file.txt"
destination: destFilename,
// The project to bill from, if requester-pays requests are enabled
userProject: projectId,
};
// Downloads the file
await storage.bucket(bucketName).file(srcFilename).download(options);
console.log(
`gs://${bucketName}/${srcFilename} downloaded to ${destFilename} using requester-pays requests.`
);
}
downloadFileUsingRequesterPays().catch(console.error);
exists(optionsopt, callbackopt) → {Promise.<FileExistsResponse>}
Check if the file exists.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
options |
<optional> |
Configuration options. Properties
|
||||||||
callback |
FileExistsCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.exists(function(err, exists) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.exists().then(function(data) {
const exists = data[0];
});
generateSignedPostPolicyV2(options, callbackopt) → {Promise.<GenerateSignedPostPolicyV2Response>}
Get a signed policy document to allow a user to upload data with a POST request.
In Google Cloud Platform environments, such as Cloud Functions and App
Engine, you usually don't provide a keyFilename
or credentials
during
instantiation. In those environments, we call the
signBlob
API
to create a signed policy. That API requires either the
https://www.googleapis.com/auth/iam
or
https://www.googleapis.com/auth/cloud-platform
scope, so be sure they are
enabled.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
Configuration options. Properties
|
|||||||||||||||||||||||||||||||||||||||||||||
callback |
GenerateSignedPostPolicyV2Callback |
<optional> |
Callback function. |
Throws:
-
-
If an expiration timestamp from the past is given.
- Type
- Error
-
-
-
If options.equals has an array with less or more than two members.
- Type
- Error
-
-
-
If options.startsWith has an array with less or more than two members.
- Type
- Error
-
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
const options = {
equals: ['$Content-Type', 'image/jpeg'],
expires: '10-25-2022',
contentLengthRange: {
min: 0,
max: 1024
}
};
file.generateSignedPostPolicyV2(options, function(err, policy) {
// policy.string: the policy document in plain text.
// policy.base64: the policy document in base64.
// policy.signature: the policy signature in base64.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.generateSignedPostPolicyV2(options).then(function(data) {
const policy = data[0];
});
generateSignedPostPolicyV4(options, callbackopt) → {Promise.<GenerateSignedPostPolicyV4Response>}
Get a v4 signed policy document to allow a user to upload data with a POST request.
In Google Cloud Platform environments, such as Cloud Functions and App
Engine, you usually don't provide a keyFilename
or credentials
during
instantiation. In those environments, we call the
signBlob
API
to create a signed policy. That API requires either the
https://www.googleapis.com/auth/iam
or
https://www.googleapis.com/auth/cloud-platform
scope, so be sure they are
enabled.
Parameters:
Name | Type | Attributes | Default | Description | ||||||
---|---|---|---|---|---|---|---|---|---|---|
options |
object |
Configuration options. Properties
|
||||||||
config.virtualHostedStyle |
boolean |
<optional> |
false |
Use virtual hosted-style
URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style
('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs
should generally be preferred instaed of path-style URL.
Currently defaults to |
||||||
config.bucketBoundHostname |
string |
<optional> |
The bucket-bound hostname to return in the result, e.g. "https://cdn.example.com". |
|||||||
config.fields |
object |
<optional> |
Form fields to include in the signed policy. Any fields with key beginning with 'x-ignore-' will not be included in the policy to be signed. |
|||||||
config.conditions |
Array.<object> |
<optional> |
Conditions
to include in the signed policy. All fields given in |
|||||||
callback |
GenerateSignedPostPolicyV4Callback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
const options = {
expires: '10-25-2022',
conditions: [
['eq', '$Content-Type', 'image/jpeg'],
['content-length-range', 0, 1024],
],
fields: {
acl: 'public-read',
'x-goog-meta-foo': 'bar',
'x-ignore-mykey': 'data'
}
};
file.generateSignedPostPolicyV4(options, function(err, response) {
// response.url The request URL
// response.fields The form fields (including the signature) to include
// to be used to upload objects by HTML forms.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.generateSignedPostPolicyV4(options).then(function(data) {
const response = data[0];
// response.url The request URL
// response.fields The form fields (including the signature) to include
// to be used to upload objects by HTML forms.
});
get(optionsopt, callbackopt) → {Promise.<GetFileResponse>}
Get a file object and its metadata if it exists.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
options |
<optional> |
Configuration options. Properties
|
||||||||
callback |
GetFileCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.get(function(err, file, apiResponse) {
// file.metadata` has been populated.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.get().then(function(data) {
const file = data[0];
const apiResponse = data[1];
});
getExpirationDate(callbackopt) → {Promise.<GetExpirationDateResponse>}
If this bucket has a retention policy defined, use this method to get a Date object representing the earliest time this file will expire.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
callback |
GetExpirationDateCallback |
<optional> |
Callback function. |
Example
const storage = require('@google-cloud/storage')();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.getExpirationDate(function(err, expirationDate) {
// expirationDate is a Date object.
});
getMetadata(optionsopt, callbackopt) → {Promise.<GetFileMetadataResponse>}
Get the file's metadata.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
Configuration options. Properties
|
||||||||
callback |
GetFileMetadataCallback |
<optional> |
Callback function. |
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.getMetadata(function(err, metadata, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.getMetadata().then(function(data) {
const metadata = data[0];
const apiResponse = data[1];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to access, e.g. file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function getMetadata() {
// Gets the metadata for the file
const [metadata] = await storage
.bucket(bucketName)
.file(filename)
.getMetadata();
console.log(`File: ${metadata.name}`);
console.log(`Bucket: ${metadata.bucket}`);
console.log(`Storage class: ${metadata.storageClass}`);
console.log(`Self link: ${metadata.selfLink}`);
console.log(`ID: ${metadata.id}`);
console.log(`Size: ${metadata.size}`);
console.log(`Updated: ${metadata.updated}`);
console.log(`Generation: ${metadata.generation}`);
console.log(`Metageneration: ${metadata.metageneration}`);
console.log(`Etag: ${metadata.etag}`);
console.log(`Owner: ${metadata.owner}`);
console.log(`Component count: ${metadata.component_count}`);
console.log(`Crc32c: ${metadata.crc32c}`);
console.log(`md5Hash: ${metadata.md5Hash}`);
console.log(`Cache-control: ${metadata.cacheControl}`);
console.log(`Content-type: ${metadata.contentType}`);
console.log(`Content-disposition: ${metadata.contentDisposition}`);
console.log(`Content-encoding: ${metadata.contentEncoding}`);
console.log(`Content-language: ${metadata.contentLanguage}`);
console.log(`Media link: ${metadata.mediaLink}`);
console.log(`KMS Key Name: ${metadata.kmsKeyName}`);
console.log(`Temporary Hold: ${metadata.temporaryHold}`);
console.log(`Event-based hold: ${metadata.eventBasedHold}`);
console.log(
`Effective Expiration Time: ${metadata.effectiveExpirationTime}`
);
console.log(`Metadata: ${metadata.metadata}`);
}
getMetadata().catch(console.error);
getSignedPolicy(options, callbackopt) → {Promise.<GetSignedPolicyResponse>}
Get a v2 signed policy document to allow a user to upload data with a POST request.
In Google Cloud Platform environments, such as Cloud Functions and App
Engine, you usually don't provide a keyFilename
or credentials
during
instantiation. In those environments, we call the
signBlob
API
to create a signed policy. That API requires either the
https://www.googleapis.com/auth/iam
or
https://www.googleapis.com/auth/cloud-platform
scope, so be sure they are
enabled.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
Configuration options. Properties
|
|||||||||||||||||||||||||||||||||||||||||||||
callback |
GetSignedPolicyCallback |
<optional> |
Callback function. |
- Deprecated:
-
- `getSignedPolicy()` is deprecated in favor of `generateSignedPostPolicyV2()` and `generateSignedPostPolicyV4()`. Currently, this method is an alias to `getSignedPolicyV2()`, and will be removed in a future major release. We recommend signing new policies using v4.
- See:
Throws:
-
-
If an expiration timestamp from the past is given.
- Type
- Error
-
-
-
If options.equals has an array with less or more than two members.
- Type
- Error
-
-
-
If options.startsWith has an array with less or more than two members.
- Type
- Error
-
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
const options = {
equals: ['$Content-Type', 'image/jpeg'],
expires: '10-25-2022',
contentLengthRange: {
min: 0,
max: 1024
}
};
file.getSignedPolicy(options, function(err, policy) {
// policy.string: the policy document in plain text.
// policy.base64: the policy document in base64.
// policy.signature: the policy signature in base64.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.getSignedPolicy(options).then(function(data) {
const policy = data[0];
});
getSignedUrl(config, callbackopt) → {Promise.<GetSignedUrlResponse>}
Get a signed URL to allow limited time access to the file.
In Google Cloud Platform environments, such as Cloud Functions and App
Engine, you usually don't provide a keyFilename
or credentials
during
instantiation. In those environments, we call the
signBlob
API
to create a signed URL. That API requires either the
https://www.googleapis.com/auth/iam
or
https://www.googleapis.com/auth/cloud-platform
scope, so be sure they are
enabled.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
config |
object |
Configuration object. Properties
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
callback |
GetSignedUrlCallback |
<optional> |
Callback function. |
Throws:
-
if an expiration timestamp from the past is given.
- Type
- Error
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
//-
// Generate a URL that allows temporary access to download your file.
//-
const request = require('request');
const config = {
action: 'read',
expires: '03-17-2025'
};
file.getSignedUrl(config, function(err, url) {
if (err) {
console.error(err);
return;
}
// The file is now available to read from this URL.
request(url, function(err, resp) {
// resp.statusCode = 200
});
});
//-
// Generate a URL to allow write permissions. This means anyone with this
URL
// can send a POST request with new data that will overwrite the file.
//-
file.getSignedUrl({
action: 'write',
expires: '03-17-2025'
}, function(err, url) {
if (err) {
console.error(err);
return;
}
// The file is now available to be written to.
const writeStream = request.put(url);
writeStream.end('New data');
writeStream.on('complete', function(resp) {
// Confirm the new content was saved.
file.download(function(err, fileContents) {
console.log('Contents:', fileContents.toString());
// Contents: New data
});
});
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.getSignedUrl(config).then(function(data) {
const url = data[0];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to access, e.g. file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function generateSignedUrl() {
// These options will allow temporary read access to the file
const options = {
version: 'v2', // defaults to 'v2' if missing.
action: 'read',
expires: Date.now() + 1000 * 60 * 60, // one hour
};
// Get a v2 signed URL for the file
const [url] = await storage
.bucket(bucketName)
.file(filename)
.getSignedUrl(options);
console.log(`The signed url for ${filename} is ${url}.`);
}
generateSignedUrl().catch(console.error);
isPublic(callbackopt) → {Promise.<IsPublicResponse>}
Check whether this file is public or not by sending a HEAD request without credentials. No errors from the server indicates that the current file is public. A 403-Forbidden error https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden indicates that file is private. Any other non 403 error is propagated to user.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
callback |
IsPublicCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
//-
// Check whether the file is publicly accessible.
//-
file.isPublic(function(err, resp) {
if (err) {
console.error(err);
return;
}
console.log(`the file ${file.id} is public: ${resp}`) ;
})
//-
// If the callback is omitted, we'll return a Promise.
//-
file.isPublic().then(function(data) {
const resp = data[0];
});
makePrivate(optionsopt, callbackopt) → {Promise.<MakeFilePrivateResponse>}
Make a file private to the project and remove all other permissions.
Set options.strict
to true to make the file private to only the owner.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
options |
MakeFilePrivateOptions |
<optional> |
Configuration options. |
callback |
MakeFilePrivateCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
//-
// Set the file private so only project maintainers can see and modify it.
//-
file.makePrivate(function(err) {});
//-
// Set the file private so only the owner can see and modify it.
//-
file.makePrivate({ strict: true }, function(err) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.makePrivate().then(function(data) {
const apiResponse = data[0];
});
makePublic(callbackopt) → {Promise.<MakeFilePublicResponse>}
Set a file to be publicly readable and maintain all previous permissions.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
callback |
MakeFilePublicCallback |
<optional> |
Callback function. |
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
file.makePublic(function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.makePublic().then(function(data) {
const apiResponse = data[0];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to make public, e.g. file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function makePublic() {
// Makes the file public
await storage.bucket(bucketName).file(filename).makePublic();
console.log(`gs://${bucketName}/${filename} is now public.`);
}
makePublic().catch(console.error);
move(destination, callbackopt) → {Promise.<MoveResponse>}
Move this file to another location. By default, this will rename the file and keep it in the same bucket, but you can choose to move it to another Bucket by providing a Bucket or File object or a URL beginning with "gs://".
Warning:
There is currently no atomic move
method in the Cloud Storage API,
so this method is a composition of File#copy (to the new
location) and File#delete (from the old location). While
unlikely, it is possible that an error returned to your callback could be
triggered from either one of these API calls failing, which could leave a
duplicate file lingering. The error message will indicate what operation
has failed.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
destination |
string | Bucket | File |
Destination file. |
|
callback |
MoveCallback |
<optional> |
Callback function. |
Throws:
-
If the destination file is not provided.
- Type
- Error
Examples
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
//-
// You can pass in a variety of types for the destination.
//
// For all of the below examples, assume we are working with the following
// Bucket and File objects.
//-
const bucket = storage.bucket('my-bucket');
const file = bucket.file('my-image.png');
//-
// If you pass in a string for the destination, the file is moved to its
// current bucket, under the new name provided.
//-
file.move('my-image-new.png', function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
// but contains instead:
// - "my-image-new.png"
// `destinationFile` is an instance of a File object that refers to your
// new file.
});
//-
// If you pass in a string starting with "gs://" for the destination, the
// file is copied to the other bucket and under the new name provided.
//-
const newLocation = 'gs://another-bucket/my-image-new.png';
file.move(newLocation, function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image-new.png"
// `destinationFile` is an instance of a File object that refers to your
// new file.
});
//-
// If you pass in a Bucket object, the file will be moved to that bucket
// using the same name.
//-
const anotherBucket = gcs.bucket('another-bucket');
file.move(anotherBucket, function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image.png"
// `destinationFile` is an instance of a File object that refers to your
// new file.
});
//-
// If you pass in a File object, you have complete control over the new
// bucket and filename.
//-
const anotherFile = anotherBucket.file('my-awesome-image.png');
file.move(anotherFile, function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-awesome-image.png"
// Note:
// The `destinationFile` parameter is equal to `anotherFile`.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.move('my-image-new.png').then(function(data) {
const destinationFile = data[0];
const apiResponse = data[1];
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'File to move, e.g. file.txt';
// const destFilename = 'Destination for file, e.g. moved.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function moveFile() {
// Moves the file within the bucket
await storage.bucket(bucketName).file(srcFilename).move(destFilename);
console.log(
`gs://${bucketName}/${srcFilename} moved to gs://${bucketName}/${destFilename}.`
);
}
moveFile().catch(console.error);
rotateEncryptionKey(optionsopt, callbackopt) → {Promise.<File>}
This method allows you to update the encryption key associated with this file.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
options |
RotateEncryptionKeyOptions |
<optional> |
Configuration options. |
callback |
RotateEncryptionKeyCallback |
<optional> |
Example
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const fileName = 'Nome of a file in the bucket, e.g. my-file';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function rotateEncryptionKey() {
// See the "Generating your own encryption key" section above.
// const oldKey = 'The current base64 encoded customer-supplied encryption key';
// const newKey = 'A new base64 encoded customer-supplied encryption key';
await storage
.bucket(bucketName)
.file(fileName, {
encryptionKey: Buffer.from(oldKey, 'base64'),
})
.rotateEncryptionKey({
encryptionKey: Buffer.from(newKey, 'base64'),
});
console.log('Encryption key rotated successfully.');
}
rotateEncryptionKey().catch(console.error);
save(data, optionsopt, callbackopt) → {Promise}
Write arbitrary data to a file.
This is a convenience method which wraps File#createWriteStream.
Resumable uploads are automatically enabled and must be shut off explicitly
by setting options.resumable
to false
.
There is some overhead when using a resumable upload that can cause noticeable performance degradation while uploading a series of small files. When uploading files less than 10MB, it is recommended that the resumable feature is disabled.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
data |
* |
The data to write to a file. |
|
options |
SaveOptions |
<optional> |
See File#createWriteStream's |
callback |
SaveCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
const contents = 'This is the contents of the file.';
file.save(contents, function(err) {
if (!err) {
// File written successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.save(contents).then(function() {});
setEncryptionKey(encryptionKey) → {File}
The Storage API allows you to use a custom key for server-side encryption.
Parameters:
Name | Type | Description |
---|---|---|
encryptionKey |
string | buffer |
An AES-256 encryption key. |
Examples
const crypto = require('crypto');
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const encryptionKey = crypto.randomBytes(32);
const fileWithCustomEncryption = myBucket.file('my-file');
fileWithCustomEncryption.setEncryptionKey(encryptionKey);
const fileWithoutCustomEncryption = myBucket.file('my-file');
fileWithCustomEncryption.save('data', function(err) {
// Try to download with the File object that hasn't had
// `setEncryptionKey()` called:
fileWithoutCustomEncryption.download(function(err) {
// We will receive an error:
// err.message === 'Bad Request'
// Try again with the File object we called `setEncryptionKey()` on:
fileWithCustomEncryption.download(function(err, contents) {
// contents.toString() === 'data'
});
});
});
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'Local file to upload, e.g. ./local/path/to/file.txt';
// const destFilename = 'Remote destination for file, e.g. file_encrypted.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function uploadEncryptedFile() {
const options = {
// The path to which the file should be uploaded, e.g. "file_encrypted.txt"
destination: destFilename,
// Encrypt the file with a customer-supplied key.
// See the "Generating your own encryption key" section above.
encryptionKey: Buffer.from(key, 'base64'),
};
// Encrypts and uploads a local file, e.g. "./local/path/to/file.txt".
// The file will only be retrievable using the key used to upload it.
await storage.bucket(bucketName).upload(srcFilename, options);
console.log(
`File ${srcFilename} uploaded to gs://${bucketName}/${destFilename}.`
);
}
uploadEncryptedFile().catch(console.error);
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const srcFilename = 'File to download, e.g. file_encrypted.txt';
// const destFilename = 'Local destination for file, e.g. ./file.txt';
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadEncryptedFile() {
const options = {
// The path to which the file should be downloaded, e.g. "./file.txt"
destination: destFilename,
};
// Descrypts and downloads the file. This can only be done with the key used
// to encrypt and upload the file.
await storage
.bucket(bucketName)
.file(srcFilename)
.setEncryptionKey(Buffer.from(key, 'base64'))
.download(options);
console.log(`File ${srcFilename} downloaded to ${destFilename}.`);
}
downloadEncryptedFile().catch(console.error);
setMetadata(metadataopt, optionsopt, callbackopt) → {Promise.<SetFileMetadataResponse>}
Merge the given metadata with the current remote file's metadata. This will set metadata if it was previously unset or update previously set metadata. To unset previously set metadata, set its value to null.
You can set custom key/value pairs in the metadata key of the given object, however the other properties outside of this object must adhere to the official API documentation.
NOTE: multiple calls to setMetadata in parallel might result in unpredictable results. See issue.
See the examples below for more information.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
metadata |
object |
<optional> |
The metadata you wish to update. |
options |
SetFileMetadataOptions |
<optional> |
Configuration options. |
callback |
SetFileMetadataCallback |
<optional> |
Callback function. |
Example
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file');
const metadata = {
contentType: 'application/x-font-ttf',
metadata: {
my: 'custom',
properties: 'go here'
}
};
file.setMetadata(metadata, function(err, apiResponse) {});
// Assuming current metadata = { hello: 'world', unsetMe: 'will do' }
file.setMetadata({
metadata: {
abc: '123', // will be set.
unsetMe: null, // will be unset (deleted).
hello: 'goodbye' // will be updated from 'world' to 'goodbye'.
}
}, function(err, apiResponse) {
// metadata should now be { abc: '123', hello: 'goodbye' }
});
//-
// Set a temporary hold on this file from its bucket's retention period
// configuration.
//
file.setMetadata({
temporaryHold: true
}, function(err, apiResponse) {});
//-
// Alternatively, you may set a temporary hold. This will follow the
// same behavior as an event-based hold, with the exception that the
// bucket's retention policy will not renew for this file from the time
// the hold is released.
//-
file.setMetadata({
eventBasedHold: true
}, function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.setMetadata(metadata).then(function(data) {
const apiResponse = data[0];
});
setStorageClass(storageClass, optionsopt, callbackopt) → {Promise.<SetStorageClassResponse>}
Set the storage class for this file.
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
storageClass |
string |
The new storage class. ( |
|||||||||
options |
SetStorageClassOptions |
<optional> |
Configuration options. Properties
|
||||||||
callback |
SetStorageClassCallback |
<optional> |
Callback function. |
Example
file.setStorageClass('nearline', function(err, apiResponse) {
if (err) {
// Error handling omitted.
}
// The storage class was updated successfully.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.setStorageClass('nearline').then(function() {});
setUserProject(userProject)
Set a user project to be billed for all requests made from this File object.
Parameters:
Name | Type | Description |
---|---|---|
userProject |
string |
The user project. |