Export a table to a CSV file

Exports a table to a CSV file in a Cloud Storage bucket.

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

C#

Before trying this sample, follow the C# setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery C# API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  using 
  
  Google.Cloud.BigQuery.V2 
 
 ; 
 using 
  
 System 
 ; 
 public 
  
 class 
  
 BigQueryExtractTable 
 { 
  
 public 
  
 void 
  
 ExtractTable 
 ( 
  
 string 
  
 projectId 
  
 = 
  
 "your-project-id" 
 , 
  
 string 
  
 bucketName 
  
 = 
  
 "your-bucket-name" 
 ) 
  
 { 
  
  BigQueryClient 
 
  
 client 
  
 = 
  
  BigQueryClient 
 
 . 
  Create 
 
 ( 
 projectId 
 ); 
  
 // Define a destination URI. Use a single wildcard URI if you think 
  
 // your exported data will be larger than the 1 GB maximum value. 
  
 string 
  
 destinationUri 
  
 = 
  
 $"gs://{bucketName}/shakespeare-*.csv" 
 ; 
  
  BigQueryJob 
 
  
 job 
  
 = 
  
 client 
 . 
  CreateExtractJob 
 
 ( 
  
 projectId 
 : 
  
 "bigquery-public-data" 
 , 
  
 datasetId 
 : 
  
 "samples" 
 , 
  
 tableId 
 : 
  
 "shakespeare" 
 , 
  
 destinationUri 
 : 
  
 destinationUri 
  
 ); 
  
 job 
  
 = 
  
 job 
 . 
  PollUntilCompleted 
 
 (). 
 ThrowOnAnyError 
 (); 
  
 // Waits for the job to complete. 
  
 Console 
 . 
 Write 
 ( 
 $"Exported table to {destinationUri}." 
 ); 
  
 } 
 } 
 

Go

Before trying this sample, follow the Go setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Go API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "cloud.google.com/go/bigquery" 
 ) 
 // exportTableAsCompressedCSV demonstrates using an export job to 
 // write the contents of a table into Cloud Storage as CSV. 
 func 
  
 exportTableAsCSV 
 ( 
 projectID 
 , 
  
 gcsURI 
  
 string 
 ) 
  
 error 
  
 { 
  
 // projectID := "my-project-id" 
  
 // gcsURI := "gs://mybucket/shakespeare.csv" 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 client 
 , 
  
 err 
  
 := 
  
 bigquery 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
 projectID 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "bigquery.NewClient: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 defer 
  
 client 
 . 
 Close 
 () 
  
 srcProject 
  
 := 
  
 "bigquery-public-data" 
  
 srcDataset 
  
 := 
  
 "samples" 
  
 srcTable 
  
 := 
  
 "shakespeare" 
  
 gcsRef 
  
 := 
  
 bigquery 
 . 
  NewGCSReference 
 
 ( 
 gcsURI 
 ) 
  
 gcsRef 
 . 
 FieldDelimiter 
  
 = 
  
 "," 
  
 extractor 
  
 := 
  
 client 
 . 
  DatasetInProject 
 
 ( 
 srcProject 
 , 
  
 srcDataset 
 ). 
 Table 
 ( 
 srcTable 
 ). 
 ExtractorTo 
 ( 
 gcsRef 
 ) 
  
 extractor 
 . 
 DisableHeader 
  
 = 
  
 true 
  
 // You can choose to run the job in a specific location for more complex data locality scenarios. 
  
 // Ex: In this example, source dataset and GCS bucket are in the US. 
  
 extractor 
 . 
  Location 
 
  
 = 
  
 "US" 
  
 job 
 , 
  
 err 
  
 := 
  
 extractor 
 . 
 Run 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 status 
 , 
  
 err 
  
 := 
  
 job 
 . 
 Wait 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 if 
  
 err 
  
 := 
  
 status 
 . 
  Err 
 
 (); 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 return 
  
 nil 
 } 
 

Java

Before trying this sample, follow the Java setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Java API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 com.google.cloud. RetryOption 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQuery 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryException 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryOptions 
 
 ; 
 import 
  
 com.google.cloud.bigquery. Job 
 
 ; 
 import 
  
 com.google.cloud.bigquery. Table 
 
 ; 
 import 
  
 com.google.cloud.bigquery. TableId 
 
 ; 
 import 
  
 org.threeten.bp. Duration 
 
 ; 
 public 
  
 class 
 ExtractTableToCsv 
  
 { 
  
 public 
  
 static 
  
 void 
  
 main 
 ( 
  String 
 
 [] 
  
 args 
 ) 
  
 { 
  
 // TODO(developer): Replace these variables before running the sample. 
  
  String 
 
  
 projectId 
  
 = 
  
 "bigquery-public-data" 
 ; 
  
  String 
 
  
 datasetName 
  
 = 
  
 "samples" 
 ; 
  
  String 
 
  
 tableName 
  
 = 
  
 "shakespeare" 
 ; 
  
  String 
 
  
 bucketName 
  
 = 
  
 "my-bucket" 
 ; 
  
  String 
 
  
 destinationUri 
  
 = 
  
 "gs://" 
  
 + 
  
 bucketName 
  
 + 
  
 "/path/to/file" 
 ; 
  
 // For more information on export formats available see: 
  
 // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types 
  
 // For more information on Job see: 
  
 // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html 
  
  String 
 
  
 dataFormat 
  
 = 
  
 "CSV" 
 ; 
  
 extractTableToCsv 
 ( 
 projectId 
 , 
  
 datasetName 
 , 
  
 tableName 
 , 
  
 destinationUri 
 , 
  
 dataFormat 
 ); 
  
 } 
  
 // Exports datasetName:tableName to destinationUri as raw CSV 
  
 public 
  
 static 
  
 void 
  
 extractTableToCsv 
 ( 
  
  String 
 
  
 projectId 
 , 
  
  String 
 
  
 datasetName 
 , 
  
  String 
 
  
 tableName 
 , 
  
  String 
 
  
 destinationUri 
 , 
  
  String 
 
  
 dataFormat 
 ) 
  
 { 
  
 try 
  
 { 
  
 // Initialize client that will be used to send requests. This client only needs to be created 
  
 // once, and can be reused for multiple requests. 
  
  BigQuery 
 
  
 bigquery 
  
 = 
  
  BigQueryOptions 
 
 . 
 getDefaultInstance 
 (). 
 getService 
 (); 
  
  TableId 
 
  
 tableId 
  
 = 
  
  TableId 
 
 . 
 of 
 ( 
 projectId 
 , 
  
 datasetName 
 , 
  
 tableName 
 ); 
  
  Table 
 
  
 table 
  
 = 
  
 bigquery 
 . 
  getTable 
 
 ( 
 tableId 
 ); 
  
  Job 
 
  
 job 
  
 = 
  
 table 
 . 
  extract 
 
 ( 
 dataFormat 
 , 
  
 destinationUri 
 ); 
  
 // Blocks until this job completes its execution, either failing or succeeding. 
  
  Job 
 
  
 completedJob 
  
 = 
  
 job 
 . 
  waitFor 
 
 ( 
  
 RetryOption 
 . 
 initialRetryDelay 
 ( 
 Duration 
 . 
 ofSeconds 
 ( 
 1 
 )), 
  
 RetryOption 
 . 
 totalTimeout 
 ( 
 Duration 
 . 
 ofMinutes 
 ( 
 3 
 ))); 
  
 if 
  
 ( 
 completedJob 
  
 == 
  
 null 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Job not executed since it no longer exists." 
 ); 
  
 return 
 ; 
  
 } 
  
 else 
  
 if 
  
 ( 
 completedJob 
 . 
  getStatus 
 
 (). 
 getError 
 () 
  
 != 
  
 null 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
  
 "BigQuery was unable to extract due to an error: \n" 
  
 + 
  
 job 
 . 
  getStatus 
 
 (). 
 getError 
 ()); 
  
 return 
 ; 
  
 } 
  
 System 
 . 
 out 
 . 
 println 
 ( 
  
 "Table export successful. Check in GCS bucket for the " 
  
 + 
  
 dataFormat 
  
 + 
  
 " file." 
 ); 
  
 } 
  
 catch 
  
 ( 
  BigQueryException 
 
  
 | 
  
 InterruptedException 
  
 e 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Table extraction job was interrupted. \n" 
  
 + 
  
 e 
 . 
 toString 
 ()); 
  
 } 
  
 } 
 } 
 

Node.js

Before trying this sample, follow the Node.js setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Node.js API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  // Import the Google Cloud client libraries 
 const 
  
 { 
 BigQuery 
 } 
  
 = 
  
 require 
 ( 
 ' @google-cloud/bigquery 
' 
 ); 
 const 
  
 { 
 Storage 
 } 
  
 = 
  
 require 
 ( 
 ' @google-cloud/storage 
' 
 ); 
 const 
  
 bigquery 
  
 = 
  
 new 
  
  BigQuery 
 
 (); 
 const 
  
 storage 
  
 = 
  
 new 
  
 Storage 
 (); 
 async 
  
 function 
  
 extractTableToGCS 
 () 
  
 { 
  
 // Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV. 
  
 /** 
 * TODO(developer): Uncomment the following lines before running the sample. 
 */ 
  
 // const datasetId = "my_dataset"; 
  
 // const tableId = "my_table"; 
  
 // const bucketName = "my-bucket"; 
  
 // const filename = "file.csv"; 
  
 // Location must match that of the source table. 
  
 const 
  
 options 
  
 = 
  
 { 
  
 location 
 : 
  
 'US' 
 , 
  
 }; 
  
 // Export data from the table into a Google Cloud Storage file 
  
 const 
  
 [ 
 job 
 ] 
  
 = 
  
 await 
  
 bigquery 
  
 . 
 dataset 
 ( 
 datasetId 
 ) 
  
 . 
 table 
 ( 
 tableId 
 ) 
  
 . 
 extract 
 ( 
 storage 
 . 
 bucket 
 ( 
 bucketName 
 ). 
 file 
 ( 
 filename 
 ), 
  
 options 
 ); 
  
 console 
 . 
 log 
 ( 
 `Job 
 ${ 
  job 
 
 . 
 id 
 } 
 created.` 
 ); 
  
 // Check the job's status for errors 
  
 const 
  
 errors 
  
 = 
  
  job 
 
 . 
 status 
 . 
 errors 
 ; 
  
 if 
  
 ( 
 errors 
 && 
 errors 
 . 
 length 
 > 
 0 
 ) 
  
 { 
  
 throw 
  
 errors 
 ; 
  
 } 
 } 
 

PHP

Before trying this sample, follow the PHP setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery PHP API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  use Google\Cloud\BigQuery\BigQueryClient; 
 /** 
 * Extracts the given table as json to given GCS bucket. 
 * 
 * @param string $projectId The project Id of your Google Cloud Project. 
 * @param string $datasetId The BigQuery dataset ID. 
 * @param string $tableId The BigQuery table ID. 
 * @param string $bucketName Bucket name in Google Cloud Storage 
 */ 
 function extract_table( 
 string $projectId, 
 string $datasetId, 
 string $tableId, 
 string $bucketName 
 ): void { 
 $bigQuery = new BigQueryClient([ 
 'projectId' => $projectId, 
 ]); 
 $dataset = $bigQuery->dataset($datasetId); 
 $table = $dataset->table($tableId); 
 $destinationUri = "gs://{$bucketName}/{$tableId}.json"; 
 // Define the format to use. If the format is not specified, 'CSV' will be used. 
 $format = 'NEWLINE_DELIMITED_JSON'; 
 // Create the extract job 
 $extractConfig = $table->extract($destinationUri)->destinationFormat($format); 
 // Run the job 
 $job = $table->runJob($extractConfig);  // Waits for the job to complete 
 printf('Exported %s to %s' . PHP_EOL, $table->id(), $destinationUri); 
 } 
 

Python

Before trying this sample, follow the Python setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Python API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  # from google.cloud import bigquery 
 # client = bigquery.Client() 
 # bucket_name = 'my-bucket' 
 project 
 = 
 "bigquery-public-data" 
 dataset_id 
 = 
 "samples" 
 table_id 
 = 
 "shakespeare" 
 destination_uri 
 = 
 "gs:// 
 {} 
 / 
 {} 
 " 
 . 
 format 
 ( 
 bucket_name 
 , 
 "shakespeare.csv" 
 ) 
 dataset_ref 
 = 
 bigquery 
 . 
 DatasetReference 
 ( 
 project 
 , 
 dataset_id 
 ) 
 table_ref 
 = 
 dataset_ref 
 . 
 table 
 ( 
 table_id 
 ) 
 extract_job 
 = 
 client 
 . 
 extract_table 
 ( 
 table_ref 
 , 
 destination_uri 
 , 
 # Location must match that of the source table. 
 location 
 = 
 "US" 
 , 
 ) 
 # API request 
 extract_job 
 . 
 result 
 () 
 # Waits for job to complete. 
 print 
 ( 
 "Exported 
 {} 
 : 
 {} 
 . 
 {} 
 to 
 {} 
 " 
 . 
 format 
 ( 
 project 
 , 
 dataset_id 
 , 
 table_id 
 , 
 destination_uri 
 ) 
 ) 
 

Ruby

Before trying this sample, follow the Ruby setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Ruby API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  require 
  
 "google/cloud/bigquery" 
 def 
  
 extract_table 
  
 bucket_name 
  
 = 
  
 "my-bucket" 
 , 
  
 dataset_id 
  
 = 
  
 "my_dataset_id" 
 , 
  
 table_id 
  
 = 
  
 "my_table_id" 
  
 bigquery 
  
 = 
  
 Google 
 :: 
 Cloud 
 :: 
  Bigquery 
 
 . 
  new 
 
  
 dataset 
  
 = 
  
 bigquery 
 . 
 dataset 
  
 dataset_id 
  
 table 
  
 = 
  
 dataset 
 . 
 table 
  
 table_id 
  
 # Define a destination URI. Use a single wildcard URI if you think 
  
 # your exported data will be larger than the 1 GB maximum value. 
  
 destination_uri 
  
 = 
  
 "gs:// 
 #{ 
 bucket_name 
 } 
 /output-*.csv" 
  
 extract_job 
  
 = 
  
 table 
 . 
 extract_job 
  
 destination_uri 
  
 do 
  
 | 
 config 
 | 
  
 # Location must match that of the source table. 
  
 config 
 . 
 location 
  
 = 
  
 "US" 
  
 end 
  
 extract_job 
 . 
 wait_until_done! 
  
 # Waits for the job to complete 
  
 puts 
  
 "Exported 
 #{ 
 table 
 . 
 id 
 } 
 to 
 #{ 
 destination_uri 
 } 
 " 
 end 
 

What's next

To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .

Design a Mobile Site
View Site in Mobile | Classic
Share by: