Streaming insert

Inserts simple rows into a table using the streaming API (insertAll).

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

C#

Before trying this sample, follow the C# setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery C# API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  using 
  
  Google.Cloud.BigQuery.V2 
 
 ; 
 public 
  
 class 
  
 BigQueryTableInsertRows 
 { 
  
 public 
  
 void 
  
 TableInsertRows 
 ( 
  
 string 
  
 projectId 
  
 = 
  
 "your-project-id" 
 , 
  
 string 
  
 datasetId 
  
 = 
  
 "your_dataset_id" 
 , 
  
 string 
  
 tableId 
  
 = 
  
 "your_table_id" 
  
 ) 
  
 { 
  
  BigQueryClient 
 
  
 client 
  
 = 
  
  BigQueryClient 
 
 . 
  Create 
 
 ( 
 projectId 
 ); 
  
  BigQueryInsertRow 
 
 [] 
  
 rows 
  
 = 
  
 new 
  
  BigQueryInsertRow 
 
 [] 
  
 { 
  
 // The insert ID is optional, but can avoid duplicate data 
  
 // when retrying inserts. 
  
 new 
  
  BigQueryInsertRow 
 
 ( 
 insertId 
 : 
  
 "row1" 
 ) 
  
 { 
  
 { 
  
 "name" 
 , 
  
 "Washington" 
  
 }, 
  
 { 
  
 "post_abbr" 
 , 
  
 "WA" 
  
 } 
  
 }, 
  
 new 
  
  BigQueryInsertRow 
 
 ( 
 insertId 
 : 
  
 "row2" 
 ) 
  
 { 
  
 { 
  
 "name" 
 , 
  
 "Colorado" 
  
 }, 
  
 { 
  
 "post_abbr" 
 , 
  
 "CO" 
  
 } 
  
 } 
  
 }; 
  
 client 
 . 
  InsertRows 
 
 ( 
 datasetId 
 , 
  
 tableId 
 , 
  
 rows 
 ); 
  
 } 
 } 
 

Go

Before trying this sample, follow the Go setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Go API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "cloud.google.com/go/bigquery" 
 ) 
 // Item represents a row item. 
 type 
  
 Item 
  
 struct 
  
 { 
  
 Name 
  
 string 
  
 Age 
  
 int 
 } 
 // Save implements the ValueSaver interface. 
 // This example disables best-effort de-duplication, which allows for higher throughput. 
 func 
  
 ( 
 i 
  
 * 
 Item 
 ) 
  
 Save 
 () 
  
 ( 
 map 
 [ 
 string 
 ] 
 bigquery 
 . 
  Value 
 
 , 
  
 string 
 , 
  
 error 
 ) 
  
 { 
  
 return 
  
 map 
 [ 
 string 
 ] 
 bigquery 
 . 
  Value 
 
 { 
  
 "full_name" 
 : 
  
 i 
 . 
 Name 
 , 
  
 "age" 
 : 
  
 i 
 . 
 Age 
 , 
  
 }, 
  
 bigquery 
 . 
  NoDedupeID 
 
 , 
  
 nil 
 } 
 // insertRows demonstrates inserting data into a table using the streaming insert mechanism. 
 func 
  
 insertRows 
 ( 
 projectID 
 , 
  
 datasetID 
 , 
  
 tableID 
  
 string 
 ) 
  
 error 
  
 { 
  
 // projectID := "my-project-id" 
  
 // datasetID := "mydataset" 
  
 // tableID := "mytable" 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 client 
 , 
  
 err 
  
 := 
  
 bigquery 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
 projectID 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "bigquery.NewClient: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 defer 
  
 client 
 . 
 Close 
 () 
  
 inserter 
  
 := 
  
 client 
 . 
 Dataset 
 ( 
 datasetID 
 ). 
 Table 
 ( 
 tableID 
 ). 
 Inserter 
 () 
  
 items 
  
 := 
  
 [] 
 * 
 Item 
 { 
  
 // Item implements the ValueSaver interface. 
  
 { 
 Name 
 : 
  
 "Phred Phlyntstone" 
 , 
  
 Age 
 : 
  
 32 
 }, 
  
 { 
 Name 
 : 
  
 "Wylma Phlyntstone" 
 , 
  
 Age 
 : 
  
 29 
 }, 
  
 } 
  
 if 
  
 err 
  
 := 
  
 inserter 
 . 
  Put 
 
 ( 
 ctx 
 , 
  
 items 
 ); 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 return 
  
 nil 
 } 
 

Java

Before trying this sample, follow the Java setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Java API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 com.google.cloud.bigquery. BigQuery 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryError 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryException 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryOptions 
 
 ; 
 import 
  
 com.google.cloud.bigquery. InsertAllRequest 
 
 ; 
 import 
  
 com.google.cloud.bigquery. InsertAllResponse 
 
 ; 
 import 
  
 com.google.cloud.bigquery. TableId 
 
 ; 
 import 
  
 java.util.HashMap 
 ; 
 import 
  
 java.util.List 
 ; 
 import 
  
 java.util.Map 
 ; 
 // Sample to inserting rows into a table without running a load job. 
 public 
  
 class 
 TableInsertRows 
  
 { 
  
 public 
  
 static 
  
 void 
  
 main 
 ( 
 String 
 [] 
  
 args 
 ) 
  
 { 
  
 // TODO(developer): Replace these variables before running the sample. 
  
 String 
  
 datasetName 
  
 = 
  
 "MY_DATASET_NAME" 
 ; 
  
 String 
  
 tableName 
  
 = 
  
 "MY_TABLE_NAME" 
 ; 
  
 // Create a row to insert 
  
 Map<String 
 , 
  
 Object 
>  
 rowContent 
  
 = 
  
 new 
  
 HashMap 
<> (); 
  
 rowContent 
 . 
 put 
 ( 
 "booleanField" 
 , 
  
 true 
 ); 
  
 rowContent 
 . 
 put 
 ( 
 "numericField" 
 , 
  
 "3.14" 
 ); 
  
 // TODO(developer): Replace the row id with a unique value for each row. 
  
 String 
  
 rowId 
  
 = 
  
 "ROW_ID" 
 ; 
  
 tableInsertRows 
 ( 
 datasetName 
 , 
  
 tableName 
 , 
  
 rowId 
 , 
  
 rowContent 
 ); 
  
 } 
  
 public 
  
 static 
  
 void 
  
 tableInsertRows 
 ( 
  
 String 
  
 datasetName 
 , 
  
 String 
  
 tableName 
 , 
  
 String 
  
 rowId 
 , 
  
 Map<String 
 , 
  
 Object 
>  
 rowContent 
 ) 
  
 { 
  
 try 
  
 { 
  
 // Initialize client that will be used to send requests. This client only needs to be created 
  
 // once, and can be reused for multiple requests. 
  
 BigQuery 
  
 bigquery 
  
 = 
  
 BigQueryOptions 
 . 
 getDefaultInstance 
 (). 
 getService 
 (); 
  
 // Get table 
  
 TableId 
  
 tableId 
  
 = 
  
 TableId 
 . 
 of 
 ( 
 datasetName 
 , 
  
 tableName 
 ); 
  
 // Inserts rowContent into datasetName:tableId. 
  
 InsertAllResponse 
  
 response 
  
 = 
  
 bigquery 
 . 
 insertAll 
 ( 
  
 InsertAllRequest 
 . 
 newBuilder 
 ( 
 tableId 
 ) 
  
 // More rows can be added in the same RPC by invoking .addRow() on the builder. 
  
 // You can omit the unique row ids to disable de-duplication. 
  
 . 
 addRow 
 ( 
 rowId 
 , 
  
 rowContent 
 ) 
  
 . 
 build 
 ()); 
  
 if 
  
 ( 
 response 
 . 
 hasErrors 
 ()) 
  
 { 
  
 // If any of the insertions failed, this lets you inspect the errors 
  
 for 
  
 ( 
 Map 
 . 
 Entry<Long 
 , 
  
 List<BigQueryError> 
>  
 entry 
  
 : 
  
 response 
 . 
 getInsertErrors 
 (). 
 entrySet 
 ()) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Response error: \n" 
  
 + 
  
 entry 
 . 
 getValue 
 ()); 
  
 } 
  
 } 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Rows successfully inserted into table" 
 ); 
  
 } 
  
 catch 
  
 ( 
 BigQueryException 
  
 e 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Insert operation not performed \n" 
  
 + 
  
 e 
 . 
 toString 
 ()); 
  
 } 
  
 } 
 } 
 

Node.js

Before trying this sample, follow the Node.js setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Node.js API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  // Import the Google Cloud client library 
 const 
  
 { 
 BigQuery 
 } 
  
 = 
  
 require 
 ( 
 ' @google-cloud/bigquery 
' 
 ); 
 const 
  
 bigquery 
  
 = 
  
 new 
  
  BigQuery 
 
 (); 
 async 
  
 function 
  
 insertRowsAsStream 
 () 
  
 { 
  
 // Inserts the JSON objects into my_dataset:my_table. 
  
 /** 
 * TODO(developer): Uncomment the following lines before running the sample. 
 */ 
  
 // const datasetId = 'my_dataset'; 
  
 // const tableId = 'my_table'; 
  
 const 
  
 rows 
  
 = 
  
 [ 
  
 { 
 name 
 : 
  
 'Tom' 
 , 
  
 age 
 : 
  
 30 
 }, 
  
 { 
 name 
 : 
  
 'Jane' 
 , 
  
 age 
 : 
  
 32 
 }, 
  
 ]; 
  
 // Insert data into a table 
  
 await 
  
 bigquery 
 . 
 dataset 
 ( 
 datasetId 
 ). 
 table 
 ( 
 tableId 
 ). 
 insert 
 ( 
 rows 
 ); 
  
 console 
 . 
 log 
 ( 
 `Inserted 
 ${ 
 rows 
 . 
 length 
 } 
 rows` 
 ); 
 } 
 

PHP

Before trying this sample, follow the PHP setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery PHP API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  use Google\Cloud\BigQuery\BigQueryClient; 
 /** 
 * Stream data into bigquery 
 * 
 * @param string $projectId The project Id of your Google Cloud Project. 
 * @param string $datasetId The BigQuery dataset ID. 
 * @param string $tableId The BigQuery table ID. 
 * @param string $data Json encoded data For eg, 
 *    $data = json_encode([ 
 *       "field1" => "value1", 
 *       "field2" => "value2", 
 *    ]); 
 */ 
 function stream_row( 
 string $projectId, 
 string $datasetId, 
 string $tableId, 
 string $data 
 ): void { 
 // instantiate the bigquery table service 
 $bigQuery = new BigQueryClient([ 
 'projectId' => $projectId, 
 ]); 
 $dataset = $bigQuery->dataset($datasetId); 
 $table = $dataset->table($tableId); 
 $data = json_decode($data, true); 
 $insertResponse = $table->insertRows([ 
 ['data' => $data], 
 // additional rows can go here 
 ]); 
 if ($insertResponse->isSuccessful()) { 
 print('Data streamed into BigQuery successfully' . PHP_EOL); 
 } else { 
 foreach ($insertResponse->failedRows() as $row) { 
 foreach ($row['errors'] as $error) { 
 printf('%s: %s' . PHP_EOL, $error['reason'], $error['message']); 
 } 
 } 
 } 
 } 
 

Python

Before trying this sample, follow the Python setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Python API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  from 
  
 google.cloud 
  
 import 
  bigquery 
 
 # Construct a BigQuery client object. 
 client 
 = 
  bigquery 
 
 . 
  Client 
 
 () 
 # TODO(developer): Set table_id to the ID of table to append to. 
 # table_id = "your-project.your_dataset.your_table" 
 rows_to_insert 
 = 
 [ 
 { 
 "full_name" 
 : 
 "Phred Phlyntstone" 
 , 
 "age" 
 : 
 32 
 }, 
 { 
 "full_name" 
 : 
 "Wylma Phlyntstone" 
 , 
 "age" 
 : 
 29 
 }, 
 ] 
 errors 
 = 
 client 
 . 
  insert_rows_json 
 
 ( 
 table_id 
 , 
 rows_to_insert 
 ) 
 # Make an API request. 
 if 
 errors 
 == 
 []: 
 print 
 ( 
 "New rows have been added." 
 ) 
 else 
 : 
 print 
 ( 
 "Encountered errors while inserting rows: 
 {} 
 " 
 . 
 format 
 ( 
 errors 
 )) 
 

Ruby

Before trying this sample, follow the Ruby setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Ruby API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  require 
  
 "google/cloud/bigquery" 
 def 
  
 table_insert_rows 
  
 dataset_id 
  
 = 
  
 "your_dataset_id" 
 , 
  
 table_id 
  
 = 
  
 "your_table_id" 
  
 bigquery 
  
 = 
  
 Google 
 :: 
 Cloud 
 :: 
  Bigquery 
 
 . 
  new 
 
  
 dataset 
  
 = 
  
 bigquery 
 . 
 dataset 
  
 dataset_id 
  
 table 
  
 = 
  
 dataset 
 . 
 table 
  
 table_id 
  
 row_data 
  
 = 
  
 [ 
  
 { 
  
 name 
 : 
  
 "Alice" 
 , 
  
 value 
 : 
  
 5 
  
 }, 
  
 { 
  
 name 
 : 
  
 "Bob" 
 , 
  
 value 
 : 
  
 10 
  
 } 
  
 ] 
  
 response 
  
 = 
  
 table 
 . 
 insert 
  
 row_data 
  
 if 
  
 response 
 . 
 success? 
  
 puts 
  
 "Inserted rows successfully" 
  
 else 
  
 puts 
  
 "Failed to insert 
 #{ 
 response 
 . 
 error_rows 
 . 
 count 
 } 
 rows" 
  
 end 
 end 
 

What's next

To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .

Design a Mobile Site
View Site in Mobile | Classic
Share by: