Schedule recurring load jobs from Amazon Redshift into BigQuery.
Explore further
For detailed documentation that includes this code sample, see the following:
Code sample
Java
Before trying this sample, follow the Java setup instructions in the BigQuery quickstart using client libraries . For more information, see the BigQuery Java API reference documentation .
To authenticate to BigQuery, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
import
com.google.api.gax.rpc. ApiException
;
import
com.google.cloud.bigquery.datatransfer.v1. CreateTransferConfigRequest
;
import
com.google.cloud.bigquery.datatransfer.v1. DataTransferServiceClient
;
import
com.google.cloud.bigquery.datatransfer.v1. ProjectName
;
import
com.google.cloud.bigquery.datatransfer.v1. TransferConfig
;
import
com.google.protobuf. Struct
;
import
com.google.protobuf. Value
;
import
java.io.IOException
;
import
java.util.HashMap
;
import
java.util.Map
;
// Sample to create redshift transfer config
public
class
CreateRedshiftTransfer
{
public
static
void
main
(
String
[]
args
)
throws
IOException
{
// TODO(developer): Replace these variables before running the sample.
final
String
projectId
=
"MY_PROJECT_ID"
;
String
datasetId
=
"MY_DATASET_ID"
;
String
datasetRegion
=
"US"
;
String
jdbcUrl
=
"MY_JDBC_URL_CONNECTION_REDSHIFT"
;
String
dbUserName
=
"MY_USERNAME"
;
String
dbPassword
=
"MY_PASSWORD"
;
String
accessKeyId
=
"MY_AWS_ACCESS_KEY_ID"
;
String
secretAccessId
=
"MY_AWS_SECRET_ACCESS_ID"
;
String
s3Bucket
=
"MY_S3_BUCKET_URI"
;
String
redShiftSchema
=
"MY_REDSHIFT_SCHEMA"
;
String
tableNamePatterns
=
"*"
;
String
vpcAndReserveIpRange
=
"MY_VPC_AND_IP_RANGE"
;
Map<String
,
Value
>
params
=
new
HashMap
<> ();
params
.
put
(
"jdbc_url"
,
Value
.
newBuilder
().
setStringValue
(
jdbcUrl
).
build
());
params
.
put
(
"database_username"
,
Value
.
newBuilder
().
setStringValue
(
dbUserName
).
build
());
params
.
put
(
"database_password"
,
Value
.
newBuilder
().
setStringValue
(
dbPassword
).
build
());
params
.
put
(
"access_key_id"
,
Value
.
newBuilder
().
setStringValue
(
accessKeyId
).
build
());
params
.
put
(
"secret_access_key"
,
Value
.
newBuilder
().
setStringValue
(
secretAccessId
).
build
());
params
.
put
(
"s3_bucket"
,
Value
.
newBuilder
().
setStringValue
(
s3Bucket
).
build
());
params
.
put
(
"redshift_schema"
,
Value
.
newBuilder
().
setStringValue
(
redShiftSchema
).
build
());
params
.
put
(
"table_name_patterns"
,
Value
.
newBuilder
().
setStringValue
(
tableNamePatterns
).
build
());
params
.
put
(
"migration_infra_cidr"
,
Value
.
newBuilder
().
setStringValue
(
vpcAndReserveIpRange
).
build
());
TransferConfig
transferConfig
=
TransferConfig
.
newBuilder
()
.
setDestinationDatasetId
(
datasetId
)
.
setDatasetRegion
(
datasetRegion
)
.
setDisplayName
(
"Your Redshift Config Name"
)
.
setDataSourceId
(
"redshift"
)
.
setParams
(
Struct
.
newBuilder
().
putAllFields
(
params
).
build
())
.
setSchedule
(
"every 24 hours"
)
.
build
();
createRedshiftTransfer
(
projectId
,
transferConfig
);
}
public
static
void
createRedshiftTransfer
(
String
projectId
,
TransferConfig
transferConfig
)
throws
IOException
{
try
(
DataTransferServiceClient
client
=
DataTransferServiceClient
.
create
())
{
ProjectName
parent
=
ProjectName
.
of
(
projectId
);
CreateTransferConfigRequest
request
=
CreateTransferConfigRequest
.
newBuilder
()
.
setParent
(
parent
.
toString
())
.
setTransferConfig
(
transferConfig
)
.
build
();
TransferConfig
config
=
client
.
createTransferConfig
(
request
);
System
.
out
.
println
(
"Cloud redshift transfer created successfully :"
+
config
.
getName
());
}
catch
(
ApiException
ex
)
{
System
.
out
.
print
(
"Cloud redshift transfer was not created."
+
ex
.
toString
());
}
}
}
What's next
To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .