Run a query with batch priority

Run a query job using batch priority.

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

Go

Before trying this sample, follow the Go setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Go API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "io" 
  
 "time" 
  
 "cloud.google.com/go/bigquery" 
 ) 
 // queryBatch demonstrates issuing a query job using batch priority. 
 func 
  
 queryBatch 
 ( 
 w 
  
 io 
 . 
 Writer 
 , 
  
 projectID 
 , 
  
 dstDatasetID 
 , 
  
 dstTableID 
  
 string 
 ) 
  
 error 
  
 { 
  
 // projectID := "my-project-id" 
  
 // dstDatasetID := "mydataset" 
  
 // dstTableID := "mytable" 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 client 
 , 
  
 err 
  
 := 
  
 bigquery 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
 projectID 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "bigquery.NewClient: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 defer 
  
 client 
 . 
 Close 
 () 
  
 // Build an aggregate table. 
  
 q 
  
 := 
  
 client 
 . 
 Query 
 ( 
 ` 
 SELECT 
 corpus, 
 SUM(word_count) as total_words, 
 COUNT(1) as unique_words 
 FROM ` 
  
 + 
  
 "`bigquery-public-data.samples.shakespeare`" 
  
 + 
  
 ` 
 GROUP BY corpus;` 
 ) 
  
 q 
 . 
 Priority 
  
 = 
  
 bigquery 
 . 
  BatchPriority 
 
  
 q 
 . 
  QueryConfig 
 
 . 
 Dst 
  
 = 
  
 client 
 . 
 Dataset 
 ( 
 dstDatasetID 
 ). 
 Table 
 ( 
 dstTableID 
 ) 
  
 // Start the job. 
  
 job 
 , 
  
 err 
  
 := 
  
 q 
 . 
 Run 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 // Job is started and will progress without interaction. 
  
 // To simulate other work being done, sleep a few seconds. 
  
 time 
 . 
 Sleep 
 ( 
 5 
  
 * 
  
 time 
 . 
 Second 
 ) 
  
 status 
 , 
  
 err 
  
 := 
  
 job 
 . 
  Status 
 
 ( 
 ctx 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 err 
  
 } 
  
 state 
  
 := 
  
 "Unknown" 
  
 switch 
  
 status 
 . 
  State 
 
  
 { 
  
 case 
  
 bigquery 
 . 
  Pending 
 
 : 
  
 state 
  
 = 
  
 "Pending" 
  
 case 
  
 bigquery 
 . 
  Running 
 
 : 
  
 state 
  
 = 
  
 "Running" 
  
 case 
  
 bigquery 
 . 
  Done 
 
 : 
  
 state 
  
 = 
  
 "Done" 
  
 } 
  
 // You can continue to monitor job progress until it reaches 
  
 // the Done state by polling periodically.  In this example, 
  
 // we print the latest status. 
  
 fmt 
 . 
 Fprintf 
 ( 
 w 
 , 
  
 "Job %s in Location %s currently in state: %s\n" 
 , 
  
 job 
 . 
  ID 
 
 (), 
  
 job 
 . 
  Location 
 
 (), 
  
 state 
 ) 
  
 return 
  
 nil 
 } 
 

Java

Before trying this sample, follow the Java setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Java API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  import 
  
 com.google.cloud.bigquery. BigQuery 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryException 
 
 ; 
 import 
  
 com.google.cloud.bigquery. BigQueryOptions 
 
 ; 
 import 
  
 com.google.cloud.bigquery. QueryJobConfiguration 
 
 ; 
 import 
  
 com.google.cloud.bigquery. TableResult 
 
 ; 
 // Sample to query batch in a table 
 public 
  
 class 
 QueryBatch 
  
 { 
  
 public 
  
 static 
  
 void 
  
 main 
 ( 
 String 
 [] 
  
 args 
 ) 
  
 { 
  
 // TODO(developer): Replace these variables before running the sample. 
  
 String 
  
 projectId 
  
 = 
  
 "MY_PROJECT_ID" 
 ; 
  
 String 
  
 datasetName 
  
 = 
  
 "MY_DATASET_NAME" 
 ; 
  
 String 
  
 tableName 
  
 = 
  
 "MY_TABLE_NAME" 
 ; 
  
 String 
  
 query 
  
 = 
  
 "SELECT corpus" 
  
 + 
  
 " FROM `" 
  
 + 
  
 projectId 
  
 + 
  
 "." 
  
 + 
  
 datasetName 
  
 + 
  
 "." 
  
 + 
  
 tableName 
  
 + 
  
 " GROUP BY corpus;" 
 ; 
  
 queryBatch 
 ( 
 query 
 ); 
  
 } 
  
 public 
  
 static 
  
 void 
  
 queryBatch 
 ( 
 String 
  
 query 
 ) 
  
 { 
  
 try 
  
 { 
  
 // Initialize client that will be used to send requests. This client only needs to be created 
  
 // once, and can be reused for multiple requests. 
  
  BigQuery 
 
  
 bigquery 
  
 = 
  
  BigQueryOptions 
 
 . 
 getDefaultInstance 
 (). 
 getService 
 (); 
  
  QueryJobConfiguration 
 
  
 queryConfig 
  
 = 
  
  QueryJobConfiguration 
 
 . 
 newBuilder 
 ( 
 query 
 ) 
  
 // Run at batch priority, which won't count toward concurrent rate limit. 
  
 . 
 setPriority 
 ( 
  QueryJobConfiguration 
 
 . 
 Priority 
 . 
 BATCH 
 ) 
  
 . 
 build 
 (); 
  
  TableResult 
 
  
 results 
  
 = 
  
 bigquery 
 . 
  query 
 
 ( 
 queryConfig 
 ); 
  
 results 
  
 . 
  iterateAll 
 
 () 
  
 . 
 forEach 
 ( 
 row 
  
 - 
>  
 row 
 . 
 forEach 
 ( 
 val 
  
 - 
>  
 System 
 . 
 out 
 . 
 printf 
 ( 
 "%s," 
 , 
  
 val 
 . 
 toString 
 ()))); 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Query batch performed successfully." 
 ); 
  
 } 
  
 catch 
  
 ( 
  BigQueryException 
 
  
 | 
  
 InterruptedException 
  
 e 
 ) 
  
 { 
  
 System 
 . 
 out 
 . 
 println 
 ( 
 "Query batch not performed \n" 
  
 + 
  
 e 
 . 
 toString 
 ()); 
  
 } 
  
 } 
 } 
 

Node.js

Before trying this sample, follow the Node.js setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Node.js API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  // Import the Google Cloud client library and create a client 
 const 
  
 { 
 BigQuery 
 } 
  
 = 
  
 require 
 ( 
 ' @google-cloud/bigquery 
' 
 ); 
 const 
  
 bigquery 
  
 = 
  
 new 
  
  BigQuery 
 
 (); 
 async 
  
 function 
  
 queryBatch 
 () 
  
 { 
  
 // Runs a query at batch priority. 
  
 // Create query job configuration. For all options, see 
  
 // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery 
  
 const 
  
 queryJobConfig 
  
 = 
  
 { 
  
 query 
 : 
  
 `SELECT corpus 
 FROM \`bigquery-public-data.samples.shakespeare\` 
 LIMIT 10` 
 , 
  
 useLegacySql 
 : 
  
 false 
 , 
  
 priority 
 : 
  
 ' BATCH 
' 
 , 
  
 }; 
  
 // Create job configuration. For all options, see 
  
 // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfiguration 
  
 const 
  
 jobConfig 
  
 = 
  
 { 
  
 // Specify a job configuration to set optional job resource properties. 
  
 configuration 
 : 
  
 { 
  
 query 
 : 
  
 queryJobConfig 
 , 
  
 }, 
  
 }; 
  
 // Make API request. 
  
 const 
  
 [ 
 job 
 ] 
  
 = 
  
 await 
  
 bigquery 
 . 
  createJob 
 
 ( 
 jobConfig 
 ); 
  
 const 
  
 jobId 
  
 = 
  
  job 
 
 . 
 metadata 
 . 
 id 
 ; 
  
 const 
  
 state 
  
 = 
  
  job 
 
 . 
 metadata 
 . 
 status 
 . 
 state 
 ; 
  
 console 
 . 
 log 
 ( 
 `Job 
 ${ 
 jobId 
 } 
 is currently in state 
 ${ 
 state 
 } 
 ` 
 ); 
 } 
 

Python

Before trying this sample, follow the Python setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Python API reference documentation .

To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .

  from 
  
 google.cloud 
  
 import 
  bigquery 
 
 # Construct a BigQuery client object. 
 client 
 = 
  bigquery 
 
 . 
  Client 
 
 () 
 job_config 
 = 
  bigquery 
 
 . 
  QueryJobConfig 
 
 ( 
 # Run at batch priority, which won't count toward concurrent rate limit. 
 priority 
 = 
  bigquery 
 
 . 
  QueryPriority 
 
 . 
  BATCH 
 
 ) 
 sql 
 = 
 """ 
 SELECT corpus 
 FROM `bigquery-public-data.samples.shakespeare` 
 GROUP BY corpus; 
 """ 
 # Start the query, passing in the extra configuration. 
 query_job 
 = 
 client 
 . 
  query 
 
 ( 
 sql 
 , 
 job_config 
 = 
 job_config 
 ) 
 # Make an API request. 
 # Check on the progress by getting the job's updated state. Once the state 
 # is `DONE`, the results are ready. 
 query_job 
 = 
 typing 
 . 
 cast 
 ( 
 "bigquery.QueryJob" 
 , 
 client 
 . 
  get_job 
 
 ( 
  query_job 
 
 . 
 job_id 
 , 
 location 
 = 
  query_job 
 
 . 
 location 
 ), 
 # Make an API request. 
 ) 
 print 
 ( 
 "Job 
 {} 
 is currently in state 
 {} 
 " 
 . 
 format 
 ( 
  query_job 
 
 . 
 job_id 
 , 
  query_job 
 
 . 
 state 
 )) 
 

What's next

To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .

Design a Mobile Site
View Site in Mobile | Classic
Share by: