Table objects are returned by methods such as , , and .
Package
@google-cloud/bigqueryExample
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
Constructors
(constructor)(dataset, id, options)
constructor
(
dataset
:
Dataset
,
id
:
string
,
options
?:
TableOptions
);
Constructs a new instance of the Table
class
Properties
bigQuery
bigQuery
:
BigQuery
;
dataset
dataset
:
Dataset
;
location
location
?:
string
;
rowQueue
rowQueue
?:
RowQueue
;
Methods
_createLoadJob(source, metadata)
_createLoadJob
(
source
:
string
|
File
|
File
[],
metadata
:
JobLoadMetadata
)
:
Promise<JobResponse>
;
Promise
< JobResponse_2
>
{Promise
copy(destination, metadata)
copy
(
destination
:
Table
,
metadata
?:
CopyTableMetadata
)
:
Promise<JobMetadataResponse>
;
Copy data from one table to another, optionally creating that table.
destination
metadata
CopyTableMetadata
Metadata to set with the copy operation. The metadata object should be in the format of a `JobConfigurationTableCopy` object. object.
Promise
< JobMetadataResponse_2
>
{Promise
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
yourTable
=
dataset
.
table
(
'your-table'
);
table
.
copy
(
yourTable
,
(
err
,
apiResponse
)
=
>
{});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
copy
(
yourTable
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
copy
(
yourTable
,
metadata
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
copy(destination, metadata, callback)
copy
(
destination
:
Table
,
metadata
:
CopyTableMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
destination
metadata
callback
JobMetadataCallback
void
copy(destination, callback)
copy
(
destination
:
Table
,
callback
:
JobMetadataCallback
)
:
void
;
void
copyFrom(sourceTables, metadata)
copyFrom
(
sourceTables
:
Table
|
Table
[],
metadata
?:
CopyTableMetadata
)
:
Promise<JobMetadataResponse>
;
Copy data from multiple tables into this table.
metadata
CopyTableMetadata
Metadata to set with the copy operation. The metadata object should be in the format of a `JobConfigurationTableCopy` object.
Promise
< JobMetadataResponse_2
>
{Promise
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
sourceTables
=
[
dataset
.
table
(
'your-table'
),
dataset
.
table
(
'your-second-table'
)
];
table
.
copyFrom
(
sourceTables
,
(
err
,
apiResponse
)
=
>
{});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
copyFrom
(
sourceTables
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
copyFrom
(
sourceTables
,
metadata
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
copyFrom(sourceTables, metadata, callback)
copyFrom
(
sourceTables
:
Table
|
Table
[],
metadata
:
CopyTableMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
metadata
callback
JobMetadataCallback
void
copyFrom(sourceTables, callback)
copyFrom
(
sourceTables
:
Table
|
Table
[],
callback
:
JobMetadataCallback
)
:
void
;
void
createCopyFromJob(source, metadata)
createCopyFromJob
(
source
:
Table
|
Table
[],
metadata
?:
CopyTableMetadata
)
:
Promise<JobResponse>
;
Copy data from multiple tables into this table.
metadata
CopyTableMetadata
Metadata to set with the copy operation. The metadata object should be in the format of a `JobConfigurationTableCopy` object.
Promise
< JobResponse_2
>
{Promise
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
sourceTables
=
[
dataset
.
table
(
'your-table'
),
dataset
.
table
(
'your-second-table'
)
];
const
callback
=
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
};
table
.
createCopyFromJob
(
sourceTables
,
callback
);
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
createCopyFromJob
(
sourceTables
,
metadata
,
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createCopyFromJob
(
sourceTables
,
metadata
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createCopyFromJob(source, metadata, callback)
createCopyFromJob
(
source
:
Table
|
Table
[],
metadata
:
CopyTableMetadata
,
callback
:
JobCallback
)
:
void
;
void
createCopyFromJob(source, callback)
createCopyFromJob
(
source
:
Table
|
Table
[],
callback
:
JobCallback
)
:
void
;
void
createCopyJob(destination, metadata)
createCopyJob
(
destination
:
Table
,
metadata
?:
CreateCopyJobMetadata
)
:
Promise<JobResponse>
;
Copy data from one table to another, optionally creating that table.
destination
metadata
CreateCopyJobMetadata
Metadata to set with the copy operation. The metadata object should be in the format of a `JobConfigurationTableCopy` object.
Promise
< JobResponse_2
>
{Promise
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
yourTable
=
dataset
.
table
(
'your-table'
);
table
.
createCopyJob
(
yourTable
,
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const
metadata
=
{
createDisposition
:
'CREATE_NEVER'
,
writeDisposition
:
'WRITE_TRUNCATE'
};
table
.
createCopyJob
(
yourTable
,
metadata
,
(
err
,
job
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createCopyJob
(
yourTable
,
metadata
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createCopyJob(destination, metadata, callback)
createCopyJob
(
destination
:
Table
,
metadata
:
CreateCopyJobMetadata
,
callback
:
JobCallback
)
:
void
;
void
createCopyJob(destination, callback)
createCopyJob
(
destination
:
Table
,
callback
:
JobCallback
)
:
void
;
void
createExtractJob(destination, options)
createExtractJob
(
destination
:
File
,
options
?:
CreateExtractJobOptions
)
:
Promise<JobResponse>
;
Export table to Cloud Storage.
destination
File
Where the file should be exported to. A string or a object.
options
CreateExtractJobOptions
The configuration object.
Promise
< JobResponse_2
>
{Promise
const
{
Storage
}
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
extractedFile
=
storage
.
bucket
(
'institutions'
).
file
(
'2014.csv'
);
function
callback
(
err
,
job
,
apiResponse
)
{
// `job` is a Job object that can be used to check the status of the
// request.
}
//-
// To use the default options, just pass a {@link
https
:
//googleapis.dev/nodejs/storage/latest/File.html File}
object
.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table
.
createExtractJob
(
extractedFile
,
callback
);
//-
// If you need more customization, pass an `options` object.
//-
const
options
=
{
format
:
' json
'
,
gzip
:
true
};
table
.
createExtractJob
(
extractedFile
,
options
,
callback
);
//-
// You can also specify multiple destination files.
//-
table
.
createExtractJob
([
storage
.
bucket
(
'institutions'
).
file
(
'2014.json'
),
storage
.
bucket
(
'institutions-copy'
).
file
(
'2014.json'
)
],
options
,
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createExtractJob
(
extractedFile
,
options
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createExtractJob(destination, options, callback)
createExtractJob
(
destination
:
File
,
options
:
CreateExtractJobOptions
,
callback
:
JobCallback
)
:
void
;
destination
File
options
CreateExtractJobOptions
callback
JobCallback
void
createExtractJob(destination, callback)
createExtractJob
(
destination
:
File
,
callback
:
JobCallback
)
:
void
;
destination
File
callback
JobCallback
void
createInsertStream(options)
createInsertStream
(
options
?:
InsertStreamOptions
)
:
Writable
;
Writable
createLoadJob(source, metadata)
createLoadJob
(
source
:
string
|
File
|
File
[],
metadata
?:
JobLoadMetadata
)
:
Promise<JobResponse>
;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
source
string | File_3
| File_3
[]
The source file to load. A string (path) to a local file, or one or more objects.
metadata
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource.
Promise
< JobResponse_2
>
{Promise
const
{
Storage
}
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a local file.
//-
const
callback
=
(
err
,
job
,
apiResponse
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
};
table
.
createLoadJob
(
'./institutions.csv'
,
callback
);
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const
metadata
=
{
encoding
:
'ISO-8859-1'
,
sourceFormat
:
'NEWLINE_DELIMITED_JSON'
};
table
.
createLoadJob
(
'./my-data.csv'
,
metadata
,
callback
);
//-
// Load data from a file in your Cloud Storage bucket.
//-
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
data
=
storage
.
bucket
(
'institutions'
).
file
(
'data.csv'
);
table
.
createLoadJob
(
data
,
callback
);
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table
.
createLoadJob
([
storage
.
bucket
(
'institutions'
).
file
(
'2011.csv'
),
storage
.
bucket
(
'institutions'
).
file
(
'2012.csv'
)
],
callback
);
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
createLoadJob
(
data
).
then
((
data
)
=
>
{
const
job
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
createLoadJob(source, metadata, callback)
createLoadJob
(
source
:
string
|
File
|
File
[],
metadata
:
JobLoadMetadata
,
callback
:
JobCallback
)
:
void
;
source
string | File_3
| File_3
[]
metadata
callback
JobCallback
void
createLoadJob(source, callback)
createLoadJob
(
source
:
string
|
File
|
File
[],
callback
:
JobCallback
)
:
void
;
source
string | File_3
| File_3
[]
callback
JobCallback
void
createQueryJob(options)
createQueryJob
(
options
:
Query
)
:
Promise<JobResponse>
;
Run a query as a job. No results are immediately returned. Instead, your callback will be executed with a Job object that you must ping for the results. See the Job documentation for explanations of how to check on the status of the job.
See for full documentation of this method.
Promise
< JobResponse_2
>
createQueryJob(options, callback)
createQueryJob
(
options
:
Query
,
callback
:
JobCallback
)
:
void
;
void
createQueryStream(query)
createQueryStream
(
query
:
Query
)
:
Duplex
;
Run a query scoped to your dataset as a readable object stream.
See for full documentation of this method.
Duplex
{stream} See for full documentation of this method.
createReadStream(options)
createReadStream
(
options
?:
GetRowsOptions
)
:
ResourceStream<RowMetadata>
;
createSchemaFromString_(str)
static
createSchemaFromString_
(
str
:
string
)
:
TableSchema
;
Convert a comma-separated name:type string to a table schema object.
str
string
Comma-separated schema string.
createWriteStream_(metadata)
createWriteStream_
(
metadata
:
JobLoadMetadata
|
string
)
:
Writable
;
Creates a write stream. Unlike the public version, this will not automatically poll the underlying job.
metadata
JobLoadMetadata
| string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.
Writable
{WritableStream}
createWriteStream(metadata)
createWriteStream
(
metadata
:
JobLoadMetadata
|
string
)
:
Writable
;
Load data into your table from a readable stream of AVRO, CSV, JSON, ORC, or PARQUET data.
metadata
JobLoadMetadata
| string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.
Writable
{WritableStream}
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a CSV file.
//-
const
request
=
require
(
'request'
);
const
csvUrl
=
'http://goo.gl/kSE7z6'
;
const
metadata
=
{
allowJaggedRows
:
true
,
skipLeadingRows
:
1
};
request
.
get
(
csvUrl
)
.
pipe
(
table
.
createWriteStream
(
metadata
))
.
on
(
' job
'
,
(
job
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
})
.
on
(
'complete'
,
(
job
)
=
>
{
// The job has completed successfully.
});
//-
// Load data from a JSON file.
//-
const
fs
=
require
(
'fs'
);
fs
.
createReadStream
(
'./test/testdata/testfile.json'
)
.
pipe
(
table
.
createWriteStream
(
' json
'
))
.
on
(
' job
'
,
(
job
)
=
>
{
// `job` is a Job object that can be used to check the status of the
// request.
})
.
on
(
'complete'
,
(
job
)
=
>
{
// The job has completed successfully.
});
encodeValue_(value)
static
encodeValue_
(
value
?:
{}
|
null
)
:
{}
|
null
;
Convert a row entry from native types to their encoded types that the API expects.
value
{} | null
The value to be converted.
{} | null
{*} The converted value.
extract(destination, options)
extract
(
destination
:
File
,
options
?:
CreateExtractJobOptions
)
:
Promise<JobMetadataResponse>
;
Export table to Cloud Storage.
destination
File
Where the file should be exported to. A string or a .
options
CreateExtractJobOptions
The configuration object.
Promise
< JobMetadataResponse_2
>
{Promise
const
Storage
=
require
(
' @google-cloud/storage
'
);
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
storage
=
new
Storage
({
projectId
:
'grape-spaceship-123'
});
const
extractedFile
=
storage
.
bucket
(
'institutions'
).
file
(
'2014.csv'
);
//-
// To use the default options, just pass a {@link
https
:
//googleapis.dev/nodejs/storage/latest/File.html File}
object
.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table
.
extract
(
extractedFile
,
(
err
,
apiResponse
)
=
>
{});
//-
// If you need more customization, pass an `options` object.
//-
const
options
=
{
format
:
' json
'
,
gzip
:
true
};
table
.
extract
(
extractedFile
,
options
,
(
err
,
apiResponse
)
=
>
{});
//-
// You can also specify multiple destination files.
//-
table
.
extract
([
storage
.
bucket
(
'institutions'
).
file
(
'2014.json'
),
storage
.
bucket
(
'institutions-copy'
).
file
(
'2014.json'
)
],
options
,
(
err
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
extract
(
extractedFile
,
options
).
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
});
extract(destination, options, callback)
extract
(
destination
:
File
,
options
:
CreateExtractJobOptions
,
callback
?:
JobMetadataCallback
)
:
void
;
destination
File
options
CreateExtractJobOptions
callback
JobMetadataCallback
void
extract(destination, callback)
extract
(
destination
:
File
,
callback
?:
JobMetadataCallback
)
:
void
;
destination
File
callback
JobMetadataCallback
void
formatMetadata_(options)
static
formatMetadata_
(
options
:
TableMetadata
)
:
FormattedMetadata
;
getIamPolicy(optionsOrCallback)
getIamPolicy
(
optionsOrCallback
?:
GetPolicyOptions
|
PolicyCallback
)
:
Promise<PolicyResponse>
;
Run a query scoped to your dataset.
getIamPolicy(options, callback)
getIamPolicy
(
options
:
GetPolicyOptions
,
callback
:
PolicyCallback
)
:
void
;
void
getRows(options)
getRows
(
options
?:
GetRowsOptions
)
:
Promise<RowsResponse>
;
{array} RowsResponse {array} 0 The rows.
getRows(options, callback)
getRows
(
options
:
GetRowsOptions
,
callback
:
RowsCallback
)
:
void
;
void
getRows(callback)
getRows
(
callback
:
RowsCallback
)
:
void
;
void
insert(rows, options)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
options
?:
InsertRowsOptions
)
:
Promise<InsertRowsResponse>
;
Stream data into BigQuery one record at a time without running a load job.
If you need to create an entire table from a file, consider using instead.
Note, if a table was recently created, inserts may fail until the table is consistent within BigQuery. If a schema
is supplied, this method will automatically retry those failed inserts, and it will even create the table with the provided schema if it does not exist.
See Tabledata: insertAll API Documentation See Streaming Insert Limits See Troubleshooting Errors
rows
options
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Insert a single row.
//-
table
.
insert
({
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
},
insertHandler
);
//-
// Insert multiple rows at a time.
//-
const
rows
=
[
{
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
},
// ...
];
table
.
insert
(
rows
,
insertHandler
);
//-
// Insert a row as according to the specification
.
//-
const
row
=
{
insertId
:
'1'
,
json
:
{
INSTNM
:
'Motion Picture Institute of Michigan'
,
CITY
:
'Troy'
,
STABBR
:
'MI'
}
};
const
options
=
{
raw
:
true
};
table
.
insert
(
row
,
options
,
insertHandler
);
//-
// Handling the response. See Troubleshooting Errors
for best practices on how to handle errors.
//-
function
insertHandler
(
err
,
apiResponse
)
{
if
(
err
)
{
// An API error or partial failure occurred.
if
(
err
.
name
===
'PartialFailureError'
)
{
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
}
}
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
insert
(
rows
)
.
then
((
data
)
=
>
{
const
apiResponse
=
data
[
0
];
})
.
catch
((
err
)
=
>
{
// An API error or partial failure occurred.
if
(
err
.
name
===
'PartialFailureError'
)
{
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
});
insert(rows, options, callback)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
options
:
InsertRowsOptions
,
callback
:
InsertRowsCallback
)
:
void
;
void
insert(rows, callback)
insert
(
rows
:
RowMetadata
|
RowMetadata
[],
callback
:
InsertRowsCallback
)
:
void
;
void
load(source, metadata)
load
(
source
:
string
|
File
|
File
[],
metadata
?:
JobLoadMetadata
)
:
Promise<JobMetadataResponse>
;
Promise
< JobMetadataResponse_2
>
load(source, metadata, callback)
load
(
source
:
string
|
File
|
File
[],
metadata
:
JobLoadMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
source
string | File_3
| File_3
[]
metadata
callback
JobMetadataCallback
void
load(source, callback)
load
(
source
:
string
|
File
|
File
[],
callback
:
JobMetadataCallback
)
:
void
;
source
string | File_3
| File_3
[]
callback
JobMetadataCallback
void
load(source, metadata)
load
(
source
:
string
|
File
|
File
[],
metadata
?:
JobLoadMetadata
)
:
Promise<JobMetadataResponse>
;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
source
string | File_3
| File_3
[]
The source file to load. A filepath as a string or a object.
metadata
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource.
Promise
< JobMetadataResponse_2
>
{Promise
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
//-
// Load data from a local file.
//-
table
.
load
(
'./institutions.csv'
,
(
err
,
apiResponse
)
=
>
{});
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const
metadata
=
{
encoding
:
'ISO-8859-1'
,
sourceFormat
:
'NEWLINE_DELIMITED_JSON'
};
table
.
load
(
'./my-data.csv'
,
metadata
,
(
err
,
apiResponse
)
=
>
{});
//-
// Load data from a file in your Cloud Storage bucket.
//-
const
gcs
=
require
(
' @google-cloud/storage
'
)({
projectId
:
'grape-spaceship-123'
});
const
data
=
gcs
.
bucket
(
'institutions'
).
file
(
'data.csv'
);
table
.
load
(
data
,
(
err
,
apiResponse
)
=
>
{});
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table
.
load
([
gcs
.
bucket
(
'institutions'
).
file
(
'2011.csv'
),
gcs
.
bucket
(
'institutions'
).
file
(
'2012.csv'
)
],
function
(
err
,
apiResponse
)
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
load
(
data
).
then
(
function
(
data
)
{
const
apiResponse
=
data
[
0
];
});
load(source, metadata, callback)
load
(
source
:
string
|
File
|
File
[],
metadata
:
JobLoadMetadata
,
callback
:
JobMetadataCallback
)
:
void
;
source
string | File_3
| File_3
[]
metadata
callback
JobMetadataCallback
void
load(source, callback)
load
(
source
:
string
|
File
|
File
[],
callback
:
JobMetadataCallback
)
:
void
;
source
string | File_3
| File_3
[]
callback
JobMetadataCallback
void
query(query)
query
(
query
:
Query
)
:
Promise<SimpleQueryRowsResponse>
;
Run a query scoped to your dataset.
See for full documentation of this method.
query(query)
query
(
query
:
string
)
:
Promise<SimpleQueryRowsResponse>
;
query
string
query(query, callback)
query
(
query
:
Query
,
callback
:
SimpleQueryRowsCallback
)
:
void
;
void
setIamPolicy(policy, options)
setIamPolicy
(
policy
:
Policy
,
options
?:
SetPolicyOptions
)
:
Promise<PolicyResponse>
;
Run a query scoped to your dataset.
setIamPolicy(policy, options, callback)
setIamPolicy
(
policy
:
Policy
,
options
:
SetPolicyOptions
,
callback
:
PolicyCallback
)
:
void
;
void
setIamPolicy(policy, callback)
setIamPolicy
(
policy
:
Policy
,
callback
:
PolicyCallback
)
:
void
;
void
setMetadata(metadata)
setMetadata
(
metadata
:
SetTableMetadataOptions
)
:
Promise<SetMetadataResponse>
;
Set the metadata on the table.
Promise
< SetMetadataResponse
>
{Promise<common.SetMetadataResponse>}
const
{
BigQuery
}
=
require
(
' @google-cloud/bigquery
'
);
const
bigquery
=
new
BigQuery
();
const
dataset
=
bigquery
.
dataset
(
'my-dataset'
);
const
table
=
dataset
.
table
(
'my-table'
);
const
metadata
=
{
name
:
'My recipes'
,
description
:
'A table for storing my recipes.'
,
schema
:
'name:string, servings:integer, cookingTime:float, quick:boolean'
};
table
.
setMetadata
(
metadata
,
(
err
,
metadata
,
apiResponse
)
=
>
{});
//-
// If the callback is omitted, we'll return a Promise.
//-
table
.
setMetadata
(
metadata
).
then
((
data
)
=
>
{
const
metadata
=
data
[
0
];
const
apiResponse
=
data
[
1
];
});
setMetadata(metadata, callback)
setMetadata
(
metadata
:
SetTableMetadataOptions
,
callback
:
ResponseCallback
)
:
void
;
void
testIamPermissions(permissions)
testIamPermissions
(
permissions
:
string
|
string
[])
:
Promise<PermissionsResponse>
;
Run a query scoped to your dataset.
permissions
string | string[]
testIamPermissions(permissions, callback)
testIamPermissions
(
permissions
:
string
|
string
[],
callback
:
PermissionsCallback
)
:
void
;
void