Constructor
new Table(dataset, id, optionsopt)
Parameters:
Name | Type | Attributes | Description | ||||||||
---|---|---|---|---|---|---|---|---|---|---|---|
dataset |
Dataset |
Dataset instance. |
|||||||||
id |
string |
The ID of the table. |
|||||||||
options |
object |
<optional> |
Table options. Properties
|
Members
createReadStream
Create a readable stream of the rows of data in your table. This method is simply a wrapper around Table#getRows.
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.createReadStream(options)
.on('error', console.error)
.on('data', row => {})
.on('end', function() {
// All rows have been retrieved.
});
//-
// If you anticipate many results, you can end a stream early to prevent
// unnecessary processing and API requests.
//-
table.createReadStream()
.on('data', function(row) {
this.end();
});
Methods
copy(destination, metadataopt, callbackopt) → {Promise}
Copy data from one table to another, optionally creating that table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
destination |
Table |
The destination table. |
|||||||||||||
metadata |
object |
<optional> |
Metadata to set with the copy operation. The
metadata object should be in the format of the
Properties
|
||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
Throws:
-
If a destination other than a Table object is provided.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const yourTable = dataset.table('your-table');
table.copy(yourTable, (err, apiResponse) => {});
//-
// See the <a href="http://goo.gl/dKWIyS">`configuration.copy`</a> object
for
// all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.copy(yourTable, metadata, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.copy(yourTable, metadata).then((data) => {
const apiResponse = data[0];
});
copyFrom(sourceTables, metadataopt, callbackopt) → {Promise}
Copy data from multiple tables into this table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
sourceTables |
Table | Array.<Table> |
The source table(s) to copy data from. |
|||||||||||||
metadata |
object |
<optional> |
Metadata to set with the copy operation. The
metadata object should be in the format of the
Properties
|
||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
Throws:
-
If a source other than a Table object is provided.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const sourceTables = [
dataset.table('your-table'),
dataset.table('your-second-table')
];
table.copyFrom(sourceTables, (err, apiResponse) => {});
//-
// See the <a href="http://goo.gl/dKWIyS">`configuration.copy`</a> object
for
// all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.copyFrom(sourceTables, metadata, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.copyFrom(sourceTables, metadata).then((data) => {
const apiResponse = data[0];
});
create(optionsopt, callbackopt) → {Promise}
Create a table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
See Dataset#createTable. |
||||||||||||||||
callback |
function |
<optional> |
Properties
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.create((err, table, apiResponse) => {
if (!err) {
// The table was created successfully.
}
});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.create().then((data) => {
const table = data[0];
const apiResponse = data[1];
});
createCopyFromJob(sourceTables, metadataopt, callbackopt) → {Promise}
Copy data from multiple tables into this table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
sourceTables |
Table | Array.<Table> |
The source table(s) to copy data from. |
|||||||||||||||||
metadata |
object |
<optional> |
Metadata to set with the copy operation. The
metadata object should be in the format of the
Properties
|
||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Throws:
-
If a source other than a Table object is provided.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const sourceTables = [
dataset.table('your-table'),
dataset.table('your-second-table')
];
const callback = (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
};
table.createCopyFromJob(sourceTables, callback);
//-
// See the <a href="http://goo.gl/dKWIyS">`configuration.copy`</a> object
for
// all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.createCopyFromJob(sourceTables, metadata, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createCopyFromJob(sourceTables, metadata).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createCopyJob(destination, metadataopt, callbackopt) → {Promise}
Copy data from one table to another, optionally creating that table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
destination |
Table |
The destination table. |
|||||||||||||||||
metadata |
object |
<optional> |
Metadata to set with the copy operation. The
metadata object should be in the format of the
Properties
|
||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Throws:
-
If a destination other than a Table object is provided.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const yourTable = dataset.table('your-table');
table.createCopyJob(yourTable, (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
});
//-
// See the <a href="http://goo.gl/dKWIyS">`configuration.copy`</a> object
for
// all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createCopyJob(yourTable, metadata).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createExtractJob(destination, optionsopt, callback)
Export table to Cloud Storage.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
destination |
string | File |
Where the file should be exported to. A string or a File object. |
|||||||||||||||||||||
options |
object |
<optional> |
The configuration object. Properties
|
||||||||||||||||||||
callback |
function |
The callback function. Properties
|
- Source:
- See:
Throws:
-
-
If destination isn't a File object.
- Type
- Error
-
-
-
If destination format isn't recongized.
- Type
- Error
-
Example
const {Storage} = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const extractedFile = storage.bucket('institutions').file('2014.csv');
function callback(err, job, apiResponse) {
// `job` is a Job object that can be used to check the status of the
// request.
}
//-
// To use the default options, just pass a File
object.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table.createExtractJob(extractedFile, callback);
//-
// If you need more customization, pass an `options` object.
//-
const options = {
format: 'json',
gzip: true
};
table.createExtractJob(extractedFile, options, callback);
//-
// You can also specify multiple destination files.
//-
table.createExtractJob([
storage.bucket('institutions').file('2014.json'),
storage.bucket('institutions-copy').file('2014.json')
], options, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createExtractJob(extractedFile, options).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createLoadJob(source, metadataopt, callbackopt) → {Promise}
Load data from a local file or Storage File.
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using {@liink Table#insert}.
Note: The file type will be inferred by the given file's extension. If you
wish to override this, you must provide metadata.format
.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
source |
string | File |
The source file to load. A string or a File object. |
|||||||||||||||||
metadata |
object |
<optional> |
Metadata to set with the load operation. The
metadata object should be in the format of the
Properties
|
||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Throws:
-
If the source isn't a string file name or a File instance.
- Type
- Error
Example
const {Storage} = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a local file.
//-
const callback = (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
};
table.createLoadJob('./institutions.csv', callback);
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (http://goo.gl/BVcXk4) for a full list of supported values.
//-
const metadata = {
encoding: 'ISO-8859-1',
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
table.createLoadJob('./my-data.csv', metadata, callback);
//-
// Load data from a file in your Cloud Storage bucket.
//-
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const data = storage.bucket('institutions').file('data.csv');
table.createLoadJob(data, callback);
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table.createLoadJob([
storage.bucket('institutions').file('2011.csv'),
storage.bucket('institutions').file('2012.csv')
], callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createLoadJob(data).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createQueryJob()
Run a query as a job. No results are immediately returned. Instead, your callback will be executed with a Job object that you must ping for the results. See the Job documentation for explanations of how to check on the status of the job.
See BigQuery#createQueryJob for full documentation of this method.
createQueryStream(query) → {stream}
Run a query scoped to your dataset as a readable object stream.
See BigQuery#createQueryStream for full documentation of this method.
Parameters:
Name | Type | Description |
---|---|---|
query |
object |
See BigQuery#createQueryStream for full documentation of this method. |
createWriteStream(metadataopt) → {WritableStream}
Load data into your table from a readable stream of AVRO, CSV, JSON, ORC, or PARQUET data.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
metadata |
string | object |
<optional> |
Metadata to set with the load operation.
The metadata object should be in the format of the
Properties
|
- Source:
- See:
Throws:
-
If source format isn't recognized.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a CSV file.
//-
const request = require('request');
const csvUrl = 'http://goo.gl/kSE7z6';
const metadata = {
allowJaggedRows: true,
skipLeadingRows: 1
};
request.get(csvUrl)
.pipe(table.createWriteStream(metadata))
.on('job', (job) => {
// `job` is a Job object that can be used to check the status of the
// request.
})
.on('complete', (job) => {
// The job has completed successfully.
});
//-
// Load data from a JSON file.
//-
const fs = require('fs');
fs.createReadStream('./test/testdata/testfile.json')
.pipe(table.createWriteStream('json'))
.on('job', (job) => {
// `job` is a Job object that can be used to check the status of the
// request.
})
.on('complete', (job) => {
// The job has completed successfully.
});
delete(callbackopt) → {Promise}
Delete a table and all its data.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
callback |
function |
<optional> |
Properties
|
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.delete((err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.delete().then((data) => {
const apiResponse = data[0];
});
exists(callbackopt) → {Promise}
Check if the table exists.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
callback |
function |
<optional> |
Properties
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.exists((err, exists) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.exists().then((data) => {
const exists = data[0];
});
extract(destination, optionsopt, callbackopt) → {Promise}
Export table to Cloud Storage.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
destination |
string | File |
Where the file should be exported to. A string or a File. |
||||||||||||||||||||||||||
options |
object |
<optional> |
The configuration object. Properties
|
|||||||||||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
Throws:
-
-
If destination isn't a File object.
- Type
- Error
-
-
-
If destination format isn't recongized.
- Type
- Error
-
Example
const Storage = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const extractedFile = storage.bucket('institutions').file('2014.csv');
//-
// To use the default options, just pass a File
object.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table.extract(extractedFile, (err, apiResponse) => {});
//-
// If you need more customization, pass an `options` object.
//-
const options = {
format: 'json',
gzip: true
};
table.extract(extractedFile, options, (err, apiResponse) => {});
//-
// You can also specify multiple destination files.
//-
table.extract([
storage.bucket('institutions').file('2014.json'),
storage.bucket('institutions-copy').file('2014.json')
], options, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.extract(extractedFile, options).then((data) => {
const apiResponse = data[0];
});
get(optionsopt, callbackopt) → {Promise}
Get a table if it exists.
You may optionally use this to "get or create" an object by providing
an object with autoCreate
set to true
. Any extra configuration that
is normally required for the create
method must be contained within
this object as well.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
options |
<optional> |
Configuration object. Properties
|
||||||||||||||||
callback |
function |
<optional> |
Properties
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.get((err, table, apiResponse) => {
// `table.metadata` has been populated.
});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.get().then((data) => {
const table = data[0];
const apiResponse = data[1];
});
getMetadata(callbackopt) → {Promise}
Return the metadata associated with the Table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.getMetadata((err, metadata, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.getMetadata().then((data) => {
const metadata = data[0];
const apiResponse = data[1];
});
getRows(optionsopt, callbackopt) → {Promise}
Retrieves table data from a specified set of rows. The rows are returned to your callback as an array of objects matching your table's schema.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
options |
object |
<optional> |
The configuration object. Properties
|
||||||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
table.getRows((err, rows) => {
if (!err) {
// rows is an array of results.
}
});
//-
// To control how many API requests are made and page through the results
// manually, set `autoPaginate` to `false`.
//-
function manualPaginationCallback(err, rows, nextQuery, apiResponse) {
if (nextQuery) {
// More results exist.
table.getRows(nextQuery, manualPaginationCallback);
}
}
table.getRows({
autoPaginate: false
}, manualPaginationCallback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.getRows().then((data) => {
const rows = data[0];
});
insert(rows, optionsopt, callbackopt) → {Promise}
Stream data into BigQuery one record at a time without running a load job.
If you need to create an entire table from a file, consider using Table#load instead.
Parameters:
Name | Type | Attributes | Description | |||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
rows |
object | Array.<object> |
The rows to insert into the table. |
||||||||||||||||||||||||||||||||||||
options |
object |
<optional> |
Configuration object. Properties
|
|||||||||||||||||||||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Insert a single row.
//-
table.insert({
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
}, insertHandler);
//-
// Insert multiple rows at a time.
//-
const rows = [
{
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
},
// ...
];
table.insert(rows, insertHandler);
//-
// Insert a row as according to the <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll">specification</a>.
//-
const row = {
insertId: '1',
json: {
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
}
};
const options = {
raw: true
};
table.insert(row, options, insertHandler);
//-
// Handling the response. See <a href="https://developers.google.com/bigquery/troubleshooting-errors">Troubleshooting Errors</a> for best practices on how to handle errors.
//-
function insertHandler(err, apiResponse) {
if (err) {
// An API error or partial failure occurred.
if (err.name === 'PartialFailureError') {
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
}
}
//-
// If the callback is omitted, we'll return a Promise.
//-
table.insert(rows)
.then((data) => {
const apiResponse = data[0];
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === 'PartialFailureError') {
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
});
load(source, metadataopt, callbackopt) → {Promise}
Load data from a local file or Storage File.
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using Table#insert.
Note: The file type will be inferred by the given file's extension. If you
wish to override this, you must provide metadata.format
.
Parameters:
Name | Type | Attributes | Description | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
source |
string | File |
The source file to load. A filepath as a string or a File object. |
|||||||||||||||||
metadata |
object |
<optional> |
Metadata to set with the load operation. The
metadata object should be in the format of the
Properties
|
||||||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
Throws:
-
If the source isn't a string file name or a File instance.
- Type
- Error
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a local file.
//-
table.load('./institutions.csv', (err, apiResponse) => {});
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (http://goo.gl/BVcXk4) for a full list of supported values.
//-
const metadata = {
encoding: 'ISO-8859-1',
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
table.load('./my-data.csv', metadata, (err, apiResponse) => {});
//-
// Load data from a file in your Cloud Storage bucket.
//-
const gcs = require('@google-cloud/storage')({
projectId: 'grape-spaceship-123'
});
const data = gcs.bucket('institutions').file('data.csv');
table.load(data, (err, apiResponse) => {});
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table.load([
gcs.bucket('institutions').file('2011.csv'),
gcs.bucket('institutions').file('2012.csv')
], function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.load(data).then(function(data) {
const apiResponse = data[0];
});
query(query, callbackopt) → {Promise}
Run a query scoped to your dataset.
See BigQuery#query for full documentation of this method.
Parameters:
Name | Type | Attributes | Description |
---|---|---|---|
query |
object |
See BigQuery#query for full documentation of this method. |
|
callback |
function |
<optional> |
See BigQuery#query for full documentation of this method. |
setMetadata(metadata, callbackopt) → {Promise}
Set the metadata on the table.
Parameters:
Name | Type | Attributes | Description | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
metadata |
object |
The metadata key/value object to set. Properties
|
|||||||||||||
callback |
function |
<optional> |
The callback function. Properties
|
- Source:
- See:
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const metadata = {
name: 'My recipes',
description: 'A table for storing my recipes.',
schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
};
table.setMetadata(metadata, (err, metadata, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.setMetadata(metadata).then((data) => {
const metadata = data[0];
const apiResponse = data[1];
});