Table objects are returned by methods such as , , and .
Package
@google-cloud/bigquery
Example
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
Constructors
(constructor)(dataset, id, options)
constructor
(
dataset
:
Dataset
,
id
:
string
,
options
?:
TableOptions
);
Constructs a new instance of the Table
class
Parameters
Properties
bigQuery
dataset
location
rowQueue
Methods
_createLoadJob(source, metadata)
_createLoadJob
(
source
:
string
|
File
|
File
[],
metadata
:
JobLoadMetadata
)
:
Promise<JobResponse>
;
Parameters
Returns
copy(destination, metadata)
copy
(
destination
:
Table
,
metadata
?:
CopyTableMetadata
)
:
Promise<JobMetadataResponse>
;
Copy data from one table to another, optionally creating that table.
Parameters
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
yourTable
=
dataset
.
table
(
'your-table'
);
table
.
copy
(
yourTable
,
(
err
,
apiResponse
)
=
>
{});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
copy
(
yourTable
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
copy
(
yourTable
,
metadata
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
copy(destination, metadata, callback)
copy
(
destination
:
Table
,
metadata
:
CopyTableMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
copy(destination, callback)
copy
(
destination
:
Table
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
copyFrom(sourceTables, metadata)
copyFrom
(
sourceTables
:
Table
|
Table
[],
metadata
?:
CopyTableMetadata
)
:
Promise<JobMetadataResponse>
;
Copy data from multiple tables into this table.
Parameters Name |
Description |
sourceTables |
Table
| Table
[]
The source table(s) to copy data from.
|
metadata |
CopyTableMetadata
Metadata to set with the copy operation. The metadata object should be in the format of a `JobConfigurationTableCopy`
object.
|
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
sourceTables
=
[
dataset
.
table
(
'your-table'
),
dataset
.
table
(
'your-second-table'
)
];
table
.
copyFrom
(
sourceTables
,
(
err
,
apiResponse
)
=
>
{});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
copyFrom
(
sourceTables
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
copyFrom
(
sourceTables
,
metadata
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
copyFrom(sourceTables, metadata, callback)
copyFrom
(
sourceTables
:
Table
|
Table
[],
metadata
:
CopyTableMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
copyFrom(sourceTables, callback)
copyFrom
(
sourceTables
:
Table
|
Table
[],
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
createCopyFromJob(source, metadata)
createCopyFromJob
(
source
:
Table
|
Table
[],
metadata
?:
CopyTableMetadata
)
:
Promise<JobResponse>
;
Parameters
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
sourceTables
=
[
dataset
.
table
(
'your-table'
),
dataset
.
table
(
'your-second-table'
)
];
const
callback
=
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
};
table
.
createCopyFromJob
(
sourceTables
,
callback
);
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
createCopyFromJob
(
sourceTables
,
metadata
,
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createCopyFromJob
(
sourceTables
,
metadata
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createCopyFromJob(source, metadata, callback)
createCopyFromJob
(
source
:
Table
|
Table
[],
metadata
:
CopyTableMetadata
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createCopyFromJob(source, callback)
createCopyFromJob
(
source
:
Table
|
Table
[],
callback
:
JobCallback
)
:
void
;
Parameters Name |
Description |
source |
Table
| Table
[]
|
callback |
JobCallback
|
Returns
createCopyJob(destination, metadata)
createCopyJob
(
destination
:
Table
,
metadata
?:
CreateCopyJobMetadata
)
:
Promise<JobResponse>
;
Parameters
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
yourTable
=
dataset
.
table
(
'your-table'
);
table
.
createCopyJob
(
yourTable
,
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
createCopyJob
(
yourTable
,
metadata
,
(
err
,
job
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createCopyJob
(
yourTable
,
metadata
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createCopyJob(destination, metadata, callback)
createCopyJob
(
destination
:
Table
,
metadata
:
CreateCopyJobMetadata
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createCopyJob(destination, callback)
createCopyJob
(
destination
:
Table
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createExtractJob
(
destination
:
File
,
options
?:
CreateExtractJobOptions
)
:
Promise<JobResponse>
;
Parameters Name |
Description |
destination |
File
Where the file should be exported to. A string or a object.
|
options |
CreateExtractJobOptions
The configuration object.
|
Returns
Example const
{
Storage
}
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
extractedFile
=
storage
.
bucket
(
'institutions'
).
file
(
'2014.csv'
);
function
callback
(
err
,
job
,
apiResponse
)
{
// `job` is a Job object that can be used to check the status of the
// request.
}
//-
// To use the default options, just pass a {@link
https
:
//googleapis.dev/nodejs/storage/latest/File.html File}
object
.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table
.
createExtractJob
(
extractedFile
,
callback
);
//-
// If you need more customization, pass an `options` object.
//-
const
options
=
{
format
:
' json
'
,
gzip
:
true
};
table
.
createExtractJob
(
extractedFile
,
options
,
callback
);
//-
// You can also specify multiple destination files.
//-
table
.
createExtractJob
([
storage
.
bucket
(
'institutions'
).
file
(
'2014.json'
),
storage
.
bucket
(
'institutions-copy'
).
file
(
'2014.json'
)
],
options
,
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createExtractJob
(
extractedFile
,
options
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createExtractJob
(
destination
:
File
,
options
:
CreateExtractJobOptions
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createExtractJob
(
destination
:
File
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createInsertStream(options)
createInsertStream
(
options
?:
InsertStreamOptions
)
:
Writable
;
Parameter
Returns
createLoadJob(source, metadata)
createLoadJob
(
source
:
string
|
File
,
metadata
?:
JobLoadMetadata
)
:
Promise<JobResponse>
;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
See Jobs: insert API Documentation
Parameters Name |
Description |
source |
string | File
The source file to load. A string (path) to a local file, or one or more objects.
|
metadata |
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load`
property of a Jobs resource.
|
Returns
Example const
{
Storage
}
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a local file.
//-
const
callback
=
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
};
table
.
createLoadJob
(
'./institutions.csv'
,
callback
);
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const
metadata
=
{
encoding
:
'ISO-8859-1'
,
sourceFormat
:
'NEWLINE_DELIMITED_JSON'
};
table
.
createLoadJob
(
'./my-data.csv'
,
metadata
,
callback
);
//-
// Load data from a file in your Cloud Storage bucket.
//-
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
data
=
storage
.
bucket
(
'institutions'
).
file
(
'data.csv'
);
table
.
createLoadJob
(
data
,
callback
);
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table
.
createLoadJob
([
storage
.
bucket
(
'institutions'
).
file
(
'2011.csv'
),
storage
.
bucket
(
'institutions'
).
file
(
'2012.csv'
)
],
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createLoadJob
(
data
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createLoadJob(source, metadata, callback)
createLoadJob
(
source
:
string
|
File
,
metadata
:
JobLoadMetadata
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createLoadJob(source, callback)
createLoadJob
(
source
:
string
|
File
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createQueryJob(options)
createQueryJob
(
options
:
Query
)
:
Promise<JobResponse>
;
Run a query as a job. No results are immediately returned. Instead, your callback will be executed with a Job
object that you must ping for the results. See the Job documentation for explanations of how to check on the status of the job.
See for full documentation of this method.
Parameter
Returns
createQueryJob(options, callback)
createQueryJob
(
options
:
Query
,
callback
:
JobCallback
)
:
void
;
Parameters
Returns
createQueryStream(query)
createQueryStream
(
query
:
Query
)
:
Duplex
;
Run a query scoped to your dataset as a readable object stream.
See for full documentation of this method.
Parameter Name |
Description |
query |
Query
See for full documentation of this method.
|
Returns Type |
Description |
Duplex
|
{stream} See for full documentation of this method.
|
createReadStream(options)
createReadStream
(
options
?:
GetRowsOptions
)
:
ResourceStream<RowMetadata>
;
Parameter
Returns
createSchemaFromString_(str)
static
createSchemaFromString_
(
str
:
string
)
:
TableSchema
;
Convert a comma-separated name:type string to a table schema object.
Parameter Name |
Description |
str |
string
Comma-separated schema string.
|
Returns Type |
Description |
TableSchema
|
{object} Table schema in the format the API expects.
|
createWriteStream_(metadata)
createWriteStream_
(
metadata
:
JobLoadMetadata
|
string
)
:
Writable
;
Creates a write stream. Unlike the public version, this will not automatically poll the underlying job.
Parameter Name |
Description |
metadata |
JobLoadMetadata
| string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load`
property of a Jobs resource. If a string is given, it will be used as the filetype.
|
Returns
createWriteStream(metadata)
createWriteStream
(
metadata
:
JobLoadMetadata
|
string
)
:
Writable
;
Parameter Name |
Description |
metadata |
JobLoadMetadata
| string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load`
property of a Jobs resource. If a string is given, it will be used as the filetype.
|
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a CSV file.
//-
const
request
=
require
(
'request'
);
const
csvUrl
=
'http://goo.gl/kSE7z6'
;
const
metadata
=
{
allowJaggedRows
:
true
,
skipLeadingRows
:
1
};
request
.
get
(
csvUrl
)
.
pipe
(
table
.
createWriteStream
(
metadata
))
.
on
(
' job
'
,
(
job
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
})
.
on
(
'complete'
,
(
job
)
=
>
{
// The job has completed successfully.
});
//-
// Load data from a JSON file.
//-
const
fs
=
require
(
'fs'
);
fs
.
createReadStream
(
'./test/testdata/testfile.json'
)
.
pipe
(
table
.
createWriteStream
(
' json
'
))
.
on
(
' job
'
,
(
job
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
})
.
on
(
'complete'
,
(
job
)
=
>
{
// The job has completed successfully.
});
encodeValue_(value)
static
encodeValue_
(
value
?:
{}
|
null
)
:
{}
|
null
;
Convert a row entry from native types to their encoded types that the API expects.
Parameter Name |
Description |
value |
{} | null
The value to be converted.
|
Returns
extract
(
destination
:
File
,
options
?:
CreateExtractJobOptions
)
:
Promise<JobMetadataResponse>
;
Export table to Cloud Storage.
Parameters Name |
Description |
destination |
File
Where the file should be exported to. A string or a .
|
options |
CreateExtractJobOptions
The configuration object.
|
Returns
Example const
Storage
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
extractedFile
=
storage
.
bucket
(
'institutions'
).
file
(
'2014.csv'
);
//-
// To use the default options, just pass a {@link
https
:
//googleapis.dev/nodejs/storage/latest/File.html File}
object
.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table
.
extract
(
extractedFile
,
(
err
,
apiResponse
)
=
>
{});
//-
// If you need more customization, pass an `options` object.
//-
const
options
=
{
format
:
' json
'
,
gzip
:
true
};
table
.
extract
(
extractedFile
,
options
,
(
err
,
apiResponse
)
=
>
{});
//-
// You can also specify multiple destination files.
//-
table
.
extract
([
storage
.
bucket
(
'institutions'
).
file
(
'2014.json'
),
storage
.
bucket
(
'institutions-copy'
).
file
(
'2014.json'
)
],
options
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
extract
(
extractedFile
,
options
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
extract
(
destination
:
File
,
options
:
CreateExtractJobOptions
,
callback
?:
JobMetadataCallback
)
:
void
;
Parameters
Returns
extract
(
destination
:
File
,
callback
?:
JobMetadataCallback
)
:
void
;
Parameters
Returns
static
formatMetadata_
(
options
:
TableMetadata
)
:
FormattedMetadata
;
Parameter
Returns
getIamPolicy(optionsOrCallback)
getIamPolicy
(
optionsOrCallback
?:
GetPolicyOptions
|
PolicyCallback
)
:
Promise<PolicyResponse>
;
Run a query scoped to your dataset.
Parameter
Returns
getIamPolicy(options, callback)
getIamPolicy
(
options
:
GetPolicyOptions
,
callback
:
PolicyCallback
)
:
void
;
Parameters
Returns
getRows(options)
getRows
(
options
?:
GetRowsOptions
)
:
Promise<RowsResponse>
;
{array} RowsResponse {array} 0 The rows.
Parameter
Returns
getRows(options, callback)
getRows
(
options
:
GetRowsOptions
,
callback
:
RowsCallback
)
:
void
;
Parameters
Returns
getRows(callback)
getRows
(
callback
:
RowsCallback
)
:
void
;
Parameter
Returns
insert(rows, options)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
options
?:
InsertRowsOptions
)
:
Promise<InsertRowsResponse>
;
Stream data into BigQuery one record at a time without running a load job.
If you need to create an entire table from a file, consider using instead.
Note, if a table was recently created, inserts may fail until the table is consistent within BigQuery. If a schema
is supplied, this method will automatically retry those failed inserts, and it will even create the table with the provided schema if it does not exist.
See Tabledata: insertAll API Documentation
See Streaming Insert Limits
See Troubleshooting Errors
Parameters
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Insert a single row.
//-
table
.
insert
({
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
},
insertHandler
);
//-
// Insert multiple rows at a time.
//-
const
rows
=
[
{
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
},
// ...
];
table
.
insert
(
rows
,
insertHandler
);
//-
// Insert a row as according to the specification
.
//-
const
row
=
{
insertId
:
'1'
,
json
:
{
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
}
};
const
options
=
{
raw
:
true
};
table
.
insert
(
row
,
options
,
insertHandler
);
//-
// Handling the response. See Troubleshooting Errors
for best practices on how to handle errors.
//-
function
insertHandler
(
err
,
apiResponse
)
{
if
(
err
)
{
// An API error or partial failure occurred.
if
(
err
.
name
===
'PartialFailureError'
)
{
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
}
}
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
insert
(
rows
)
.
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
})
.
catch
((
err
)
=
>
{
// An API error or partial failure occurred.
if
(
err
.
name
===
'PartialFailureError'
)
{
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
});
insert(rows, options, callback)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
options
:
InsertRowsOptions
,
callback
:
InsertRowsCallback
)
:
void
;
Parameters
Returns
insert(rows, callback)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
callback
:
InsertRowsCallback
)
:
void
;
Parameters
Returns
load(source, metadata)
load
(
source
:
string
|
File
,
metadata
?:
JobLoadMetadata
)
:
Promise<JobMetadataResponse>
;
Parameters
Returns
load(source, metadata, callback)
load
(
source
:
string
|
File
,
metadata
:
JobLoadMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
load(source, callback)
load
(
source
:
string
|
File
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
load(source, metadata)
load
(
source
:
string
|
File
,
metadata
?:
JobLoadMetadata
)
:
Promise<JobMetadataResponse>
;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
Parameters Name |
Description |
source |
string | File
The source file to load. A filepath as a string or a object.
|
metadata |
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load`
property of a Jobs resource.
|
Returns
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a local file.
//-
table
.
load
(
'./institutions.csv'
,
(
err
,
apiResponse
)
=
>
{});
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const
metadata
=
{
encoding
:
'ISO-8859-1'
,
sourceFormat
:
'NEWLINE_DELIMITED_JSON'
};
table
.
load
(
'./my-data.csv'
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// Load data from a file in your Cloud Storage bucket.
//-
const
gcs
=
require
(
' @google-cloud/storage
'
)({
projectId
:
'grape-spaceship-123'
});
const
data
=
gcs
.
bucket
(
'institutions'
).
file
(
'data.csv'
);
table
.
load
(
data
,
(
err
,
apiResponse
)
=
>
{});
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table
.
load
([
gcs
.
bucket
(
'institutions'
).
file
(
'2011.csv'
),
gcs
.
bucket
(
'institutions'
).
file
(
'2012.csv'
)
],
function
(
err
,
apiResponse
)
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
load
(
data
).
then
(
function
(
data
)
{
const
apiResponse
=
data
[
0
];
});
load(source, metadata, callback)
load
(
source
:
string
|
File
,
metadata
:
JobLoadMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
load(source, callback)
load
(
source
:
string
|
File
,
callback
:
JobMetadataCallback
)
:
void
;
Parameters
Returns
query(query)
query
(
query
:
Query
)
:
Promise<SimpleQueryRowsResponse>
;
Run a query scoped to your dataset.
See for full documentation of this method.
Parameter Name |
Description |
query |
Query
See for full documentation of this method.
|
Returns
query(query)
query
(
query
:
string
)
:
Promise<SimpleQueryRowsResponse>
;
Parameter
Returns
query(query, callback)
query
(
query
:
Query
,
callback
:
SimpleQueryRowsCallback
)
:
void
;
Parameters
Returns
setIamPolicy(policy, options)
setIamPolicy
(
policy
:
Policy
,
options
?:
SetPolicyOptions
)
:
Promise<PolicyResponse>
;
Run a query scoped to your dataset.
Parameters
Returns
setIamPolicy(policy, options, callback)
setIamPolicy
(
policy
:
Policy
,
options
:
SetPolicyOptions
,
callback
:
PolicyCallback
)
:
void
;
Parameters
Returns
setIamPolicy(policy, callback)
setIamPolicy
(
policy
:
Policy
,
callback
:
PolicyCallback
)
:
void
;
Parameters
Returns
setMetadata
(
metadata
:
SetTableMetadataOptions
)
:
Promise<SetMetadataResponse>
;
Parameter
Returns Type |
Description |
Promise
< SetMetadataResponse
> |
{Promise<common.SetMetadataResponse>}
|
Example const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
metadata
=
{
name
:
'My recipes'
,
description
:
'A table for storing my recipes.'
,
schema
:
'name:string, servings:integer, cookingTime:float, quick:boolean'
};
table
.
setMetadata
(
metadata
,
(
err
,
metadata
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
setMetadata
(
metadata
).
then
((
data
)
=
>
{
const
metadata
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
setMetadata
(
metadata
:
SetTableMetadataOptions
,
callback
:
ResponseCallback
)
:
void
;
Parameters
Returns
testIamPermissions(permissions)
testIamPermissions
(
permissions
:
string
|
string
[])
:
Promise<PermissionsResponse>
;
Run a query scoped to your dataset.
Parameter Name |
Description |
permissions |
string | string[]
|
Returns
testIamPermissions(permissions, callback)
testIamPermissions
(
permissions
:
string
|
string
[],
callback
:
PermissionsCallback
)
:
void
;
Parameters
Returns