Import a local file

Import a local file into a table.

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

C#

Before trying this sample, follow the C# setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery C# API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  using 
  
  Google.Cloud.BigQuery.V2 
 
 ; 
 using 
  
 System 
 ; 
 using 
  
 System.IO 
 ; 
 public 
  
 class 
  
 BigQueryLoadFromFile 
 { 
  
 public 
  
 void 
  
 LoadFromFile 
 ( 
  
 string 
  
 projectId 
  
 = 
  
 "your-project-id" 
 , 
  
 string 
  
 datasetId 
  
 = 
  
 "your_dataset_id" 
 , 
  
 string 
  
 tableId 
  
 = 
  
 "your_table_id" 
 , 
  
 string 
  
 filePath 
  
 = 
  
 "path/to/file.csv" 
  
 ) 
  
 { 
  
  BigQueryClient 
 
  
 client 
  
 = 
  
  BigQueryClient 
 
 . 
  Create 
 
 ( 
 projectId 
 ); 
  
 // Create job configuration 
  
 var 
  
 uploadCsvOptions 
  
 = 
  
 new 
  
  UploadCsvOptions 
 
 () 
  
 { 
  
 SkipLeadingRows 
  
 = 
  
 1 
 , 
  
 // Skips the file headers 
  
 Autodetect 
  
 = 
  
 true 
  
 }; 
  
 using 
  
 ( 
 FileStream 
  
 stream 
  
 = 
  
 File 
 . 
 Open 
 ( 
 filePath 
 , 
  
 FileMode 
 . 
 Open 
 )) 
  
 { 
  
 // Create and run job 
  
 // Note that there are methods available for formats other than CSV 
  
  BigQueryJob 
 
  
 job 
  
 = 
  
 client 
 . 
  UploadCsv 
 
 ( 
  
 datasetId 
 , 
  
 tableId 
 , 
  
 null 
 , 
  
 stream 
 , 
  
 uploadCsvOptions 
 ); 
  
 job 
  
 = 
  
 job 
 . 
  PollUntilCompleted 
 
 (). 
 ThrowOnAnyError 
 (); 
  
 // Waits for the job to complete. 
  
 // Display the number of rows uploaded 
  
  BigQueryTable 
 
  
 table 
  
 = 
  
 client 
 . 
  GetTable 
 
 ( 
 datasetId 
 , 
  
 tableId 
 ); 
  
 Console 
 . 
 WriteLine 
 ( 
  
 $"Loaded {table. Resource 
.NumRows} rows to {table. FullyQualifiedId 
}" 
 ); 
  
 } 
  
 } 
 } 
 

Go

Before trying this sample, follow the Go setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Go API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "os" 
  
 "cloud.google.com/go/bigquery" 
 ) 
 // importCSVFromFile demonstrates loading data into a BigQuery table using a file on the local filesystem. 
 func 
  
 importCSVFromFile 
 ( 
 projectID 
 , 
  
 datasetID 
 , 
  
 tableID 
 , 
  
 filename 
  
 string 
 ) 
  
 error 
  
 { 
  
 // projectID := "my-project-id" 
  
 // datasetID := "mydataset" 
  
 // tableID := "mytable" 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 client 
 , 
  
 err 
  
 := 
  
 bigquery 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
 projectID 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "bigquery.NewClient: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 defer 
  
 client 
 . 
 Close 
 () 
  
 f 
 , 
  
 err 
  
 := 
  
 os 
 . 
 Open 
 ( 
 filename 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 source 
  
 := 
  
 bigquery 
 . 
  NewReaderSource 
 
 ( 
 f 
 ) 
  
 source 
 . 
 AutoDetect 
  
 = 
  
 true 
  
 // Allow BigQuery to determine schema. 
  
 source 
 . 
 SkipLeadingRows 
  
 = 
  
 1 
  
 // CSV has a single header line. 
  
 loader 
  
 := 
  
 client 
 . 
 Dataset 
 ( 
 datasetID 
 ). 
 Table 
 ( 
 tableID 
 ). 
  LoaderFrom 
 
 ( 
 source 
 ) 
  
 job 
 , 
  
 err 
  
 := 
  
 loader 
 . 
 Run 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 status 
 , 
  
 err 
  
 := 
  
 job 
 . 
 Wait 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 if 
  
 err 
  
 := 
  
 status 
 . 
  Err 
 
 (); 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 return 
  
 nil 
 } 
 

Java

Before trying this sample, follow the Java setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Java API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 com.google.cloud.bigquery. BigQuery 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryException 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryOptions 
 
 ; 
 import 
  
 com.google.cloud.bigquery. FormatOptions 
 
 ; 
 import 
  
 com.google.cloud.bigquery. Job 
 
 ; 
 import 
  
 com.google.cloud.bigquery. JobId 
 
 ; 
 import 
  
 com.google.cloud.bigquery. JobStatistics 
. LoadStatistics 
 
 ; 
 import 
  
 com.google.cloud.bigquery. TableDataWriteChannel 
 
 ; 
 import 
  
 com.google.cloud.bigquery. TableId 
 
 ; 
 import 
  
 com.google.cloud.bigquery. WriteChannelConfiguration 
 
 ; 
 import 
  
 java.io.IOException 
 ; 
 import 
  
 java.io.OutputStream 
 ; 
 import 
  
 java.nio.channels.Channels 
 ; 
 import 
  
 java.nio.file.FileSystems 
 ; 
 import 
  
 java.nio.file.Files 
 ; 
 import 
  
 java.nio.file.Path 
 ; 
 import 
  
 java.util.UUID 
 ; 
 public 
  
 class 
 LoadLocalFile 
  
 { 
  
 public 
  
 static 
  
 void 
  
 main 
 ( 
 String 
 [] 
  
 args 
 ) 
  
 throws 
  
 IOException 
 , 
  
 InterruptedException 
  
 { 
  
 String 
  
 datasetName 
  
 = 
  
 "MY_DATASET_NAME" 
 ; 
  
 String 
  
 tableName 
  
 = 
  
 "MY_TABLE_NAME" 
 ; 
  
 Path 
  
 csvPath 
  
 = 
  
 FileSystems 
 . 
 getDefault 
 (). 
 getPath 
 ( 
 "." 
 , 
  
 "my-data.csv" 
 ); 
  
 loadLocalFile 
 ( 
 datasetName 
 , 
  
 tableName 
 , 
  
 csvPath 
 , 
  
  FormatOptions 
 
 . 
  csv 
 
 ()); 
  
 } 
  
 public 
  
 static 
  
 void 
  
 loadLocalFile 
 ( 
  
 String 
  
 datasetName 
 , 
  
 String 
  
 tableName 
 , 
  
 Path 
  
 csvPath 
 , 
  
  FormatOptions 
 
  
 formatOptions 
 ) 
  
 throws 
  
 IOException 
 , 
  
 InterruptedException 
  
 { 
  
 try 
  
 { 
  
 // Initialize client that will be used to send requests. This client only needs to be created 
  
 // once, and can be reused for multiple requests. 
  
  BigQuery 
 
  
 bigquery 
  
 = 
  
  BigQueryOptions 
 
 . 
 getDefaultInstance 
 (). 
 getService 
 (); 
  
  TableId 
 
  
 tableId 
  
 = 
  
  TableId 
 
 . 
 of 
 ( 
 datasetName 
 , 
  
 tableName 
 ); 
  
  WriteChannelConfiguration 
 
  
 writeChannelConfiguration 
  
 = 
  
  WriteChannelConfiguration 
 
 . 
 newBuilder 
 ( 
 tableId 
 ). 
 setFormatOptions 
 ( 
 formatOptions 
 ). 
 build 
 (); 
  
 // The location and JobName must be specified; other fields can be auto-detected. 
  
 String 
  
 jobName 
  
 = 
  
 "jobId_" 
  
 + 
  
 UUID 
 . 
 randomUUID 
 (). 
 toString 
 (); 
  
  JobId 
 
  
 jobId 
  
 = 
  
  JobId 
 
 . 
 newBuilder 
 (). 
 setLocation 
 ( 
 "us" 
 ). 
  setJob 
 
 ( 
 jobName 
 ). 
 build 
 (); 
  
 // Imports a local file into a table. 
  
 try 
  
 ( 
  TableDataWriteChannel 
 
  
 writer 
  
 = 
  
 bigquery 
 . 
  writer 
 
 ( 
 jobId 
 , 
  
 writeChannelConfiguration 
 ); 
  
 OutputStream 
  
 stream 
  
 = 
  
 Channels 
 . 
 newOutputStream 
 ( 
 writer 
 )) 
  
 { 
  
 Files 
 . 
  copy 
 
 ( 
 csvPath 
 , 
  
 stream 
 ); 
  
 } 
  
 // Get the Job created by the TableDataWriteChannel and wait for it to complete. 
  
  Job 
 
  
 job 
  
 = 
  
 bigquery 
 . 
  getJob 
 
 ( 
 jobId 
 ); 
  
  Job 
 
  
 completedJob 
  
 = 
  
 job 
 . 
  waitFor 
 
 (); 
  
 if 
  
 ( 
 completedJob 
  
 == 
  
 null 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Job not executed since it no longer exists." 
 ); 
  
 return 
 ; 
  
 } 
  
 else 
  
 if 
  
 ( 
 completedJob 
 . 
 getStatus 
 (). 
 getError 
 () 
  
 != 
  
 null 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
  
 "BigQuery was unable to load local file to the table due to an error: \n" 
  
 + 
  
 job 
 . 
 getStatus 
 (). 
 getError 
 ()); 
  
 return 
 ; 
  
 } 
  
 // Get output status 
  
  LoadStatistics 
 
  
 stats 
  
 = 
  
 job 
 . 
 getStatistics 
 (); 
  
 System 
 . 
 out 
 . 
 printf 
 ( 
 "Successfully loaded %d rows. \n" 
 , 
  
 stats 
 . 
  getOutputRows 
 
 ()); 
  
 } 
  
 catch 
  
 ( 
  BigQueryException 
 
  
 e 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Local file not loaded. \n" 
  
 + 
  
 e 
 . 
 toString 
 ()); 
  
 } 
  
 } 
 } 
 

Node.js

Before trying this sample, follow the Node.js setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Node.js API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  // Imports the Google Cloud client library 
 const 
  
 { 
 BigQuery 
 } 
  
 = 
  
 require 
 ( 
 ' @google-cloud/bigquery 
' 
 ); 
 const 
  
 bigquery 
  
 = 
  
 new 
  
  BigQuery 
 
 (); 
 async 
  
 function 
  
 loadLocalFile 
 () 
  
 { 
  
 // Imports a local file into a table. 
  
 /** 
 * TODO(developer): Uncomment the following lines before running the sample. 
 */ 
  
 // const filename = '/path/to/file.csv'; 
  
 // const datasetId = 'my_dataset'; 
  
 // const tableId = 'my_table'; 
  
 // Load data from a local file into the table 
  
 const 
  
 [ 
 job 
 ] 
  
 = 
  
 await 
  
 bigquery 
  
 . 
 dataset 
 ( 
 datasetId 
 ) 
  
 . 
 table 
 ( 
 tableId 
 ) 
  
 . 
  load 
 
 ( 
 filename 
 ); 
  
 console 
 . 
 log 
 ( 
 `Job 
 ${ 
  job 
 
 . 
 id 
 } 
 completed.` 
 ); 
 } 
 

PHP

Before trying this sample, follow the PHP setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery PHP API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  use Google\Cloud\BigQuery\BigQueryClient; 
 /** 
 * Imports data to the given table from given csv 
 * 
 * @param string $projectId The project Id of your Google Cloud Project. 
 * @param string $datasetId The BigQuery dataset ID. 
 * @param string $tableId The BigQuery table ID. 
 * @param string $source The path to the CSV source file to import. 
 */ 
 function import_from_local_csv( 
 string $projectId, 
 string $datasetId, 
 string $tableId, 
 string $source 
 ): void { 
 // instantiate the bigquery table service 
 $bigQuery = new BigQueryClient([ 
 'projectId' => $projectId, 
 ]); 
 $dataset = $bigQuery->dataset($datasetId); 
 $table = $dataset->table($tableId); 
 // create the import job 
 $loadConfig = $table->load(fopen($source, 'r'))->sourceFormat('CSV'); 
 $job = $table->runJob($loadConfig); 
 // check if the job is complete 
 $job->reload(); 
 if (!$job->isComplete()) { 
 throw new \Exception('Job has not yet completed', 500); 
 } 
 // check if the job has errors 
 if (isset($job->info()['status']['errorResult'])) { 
 $error = $job->info()['status']['errorResult']['message']; 
 printf('Error running job: %s' . PHP_EOL, $error); 
 } else { 
 print('Data imported successfully' . PHP_EOL); 
 } 
 } 
 

Python

Before trying this sample, follow the Python setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Python API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  from 
  
 google.cloud 
  
 import 
  bigquery 
 
 # Construct a BigQuery client object. 
 client 
 = 
  bigquery 
 
 . 
  Client 
 
 () 
 # TODO(developer): Set table_id to the ID of the table to create. 
 # table_id = "your-project.your_dataset.your_table_name" 
 job_config 
 = 
  bigquery 
 
 . 
  LoadJobConfig 
 
 ( 
 source_format 
 = 
  bigquery 
 
 . 
  SourceFormat 
 
 . 
 CSV 
 , 
 skip_leading_rows 
 = 
 1 
 , 
 autodetect 
 = 
 True 
 , 
 ) 
 with 
 open 
 ( 
 file_path 
 , 
 "rb" 
 ) 
 as 
 source_file 
 : 
 job 
 = 
 client 
 . 
  load_table_from_file 
 
 ( 
 source_file 
 , 
 table_id 
 , 
 job_config 
 = 
 job_config 
 ) 
  job 
 
 . 
 result 
 () 
 # Waits for the job to complete. 
 table 
 = 
 client 
 . 
  get_table 
 
 ( 
 table_id 
 ) 
 # Make an API request. 
 print 
 ( 
 "Loaded 
 {} 
 rows and 
 {} 
 columns to 
 {} 
 " 
 . 
 format 
 ( 
 table 
 . 
  num_rows 
 
 , 
 len 
 ( 
 table 
 . 
 schema 
 ), 
 table_id 
 ) 
 ) 
 

Ruby

Before trying this sample, follow the Ruby setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Ruby API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  require 
  
 "google/cloud/bigquery" 
 def 
  
 load_from_file 
  
 dataset_id 
  
 = 
  
 "your_dataset_id" 
 , 
  
 file_path 
  
 = 
  
 "path/to/file.csv" 
  
 bigquery 
  
 = 
  
 Google 
 :: 
 Cloud 
 :: 
  Bigquery 
 
 . 
  new 
 
  
 dataset 
  
 = 
  
 bigquery 
 . 
 dataset 
  
 dataset_id 
  
 table_id 
  
 = 
  
 "new_table_id" 
  
 # Infer the config.location based on the location of the referenced dataset. 
  
 load_job 
  
 = 
  
 dataset 
 . 
 load_job 
  
 table_id 
 , 
  
 file_path 
  
 do 
  
 | 
 config 
 | 
  
 config 
 . 
 skip_leading 
  
 = 
  
 1 
  
 config 
 . 
  autodetect 
 
  
 = 
  
 true 
  
 end 
  
 load_job 
 . 
 wait_until_done! 
  
 # Waits for table load to complete. 
  
 table 
  
 = 
  
 dataset 
 . 
 table 
  
 table_id 
  
 puts 
  
 "Loaded 
 #{ 
 table 
 . 
  rows_count 
 
 } 
 rows into 
 #{ 
 table 
 . 
 id 
 } 
 " 
 end 
 

What's next

To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .

Design a Mobile Site
View Site in Mobile | Classic
Share by: