Skip to content
This repository was archived by the owner on Sep 27, 2023. It is now read-only.

Commit 52ac0a0

Browse files
author
Praful Makani
authored
docs(samples): add create amazon s3 transfer (#416)
1 parent 4eb4530 commit 52ac0a0

File tree

2 files changed

+231
-0
lines changed

2 files changed

+231
-0
lines changed
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
// [START bigquerydatatransfer_create_amazons3_transfer]
20+
import com.google.api.gax.rpc.ApiException;
21+
import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest;
22+
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
23+
import com.google.cloud.bigquery.datatransfer.v1.ProjectName;
24+
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
25+
import com.google.protobuf.Struct;
26+
import com.google.protobuf.Value;
27+
import java.io.IOException;
28+
import java.util.HashMap;
29+
import java.util.Map;
30+
31+
// Sample to create amazon s3 transfer config.
32+
public class CreateAmazonS3Transfer {
33+
34+
public static void main(String[] args) throws IOException {
35+
// TODO(developer): Replace these variables before running the sample.
36+
final String projectId = "MY_PROJECT_ID";
37+
String datasetId = "MY_DATASET_ID";
38+
String tableId = "MY_TABLE_ID";
39+
// Amazon S3 Bucket Uri with read role permission
40+
String sourceUri = "s3://your-bucket-name/*";
41+
String awsAccessKeyId = "MY_AWS_ACCESS_KEY_ID";
42+
String awsSecretAccessId = "AWS_SECRET_ACCESS_ID";
43+
String sourceFormat = "CSV";
44+
String fieldDelimiter = ",";
45+
String skipLeadingRows = "1";
46+
Map<String, Value> params = new HashMap<>();
47+
params.put(
48+
"destination_table_name_template", Value.newBuilder().setStringValue(tableId).build());
49+
params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build());
50+
params.put("access_key_id", Value.newBuilder().setStringValue(awsAccessKeyId).build());
51+
params.put("secret_access_key", Value.newBuilder().setStringValue(awsSecretAccessId).build());
52+
params.put("source_format", Value.newBuilder().setStringValue(sourceFormat).build());
53+
params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build());
54+
params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build());
55+
TransferConfig transferConfig =
56+
TransferConfig.newBuilder()
57+
.setDestinationDatasetId(datasetId)
58+
.setDisplayName("Your Aws S3 Config Name")
59+
.setDataSourceId("amazon_s3")
60+
.setParams(Struct.newBuilder().putAllFields(params).build())
61+
.setSchedule("every 24 hours")
62+
.build();
63+
createAmazonS3Transfer(projectId, transferConfig);
64+
}
65+
66+
public static void createAmazonS3Transfer(String projectId, TransferConfig transferConfig)
67+
throws IOException {
68+
try (DataTransferServiceClient client = DataTransferServiceClient.create()) {
69+
ProjectName parent = ProjectName.of(projectId);
70+
CreateTransferConfigRequest request =
71+
CreateTransferConfigRequest.newBuilder()
72+
.setParent(parent.toString())
73+
.setTransferConfig(transferConfig)
74+
.build();
75+
TransferConfig config = client.createTransferConfig(request);
76+
System.out.println("Amazon s3 transfer created successfully :" + config.getName());
77+
} catch (ApiException ex) {
78+
System.out.print("Amazon s3 transfer was not created." + ex.toString());
79+
}
80+
;
81+
}
82+
}
83+
// [END bigquerydatatransfer_create_amazons3_transfer]
Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,148 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
import static com.google.common.truth.Truth.assertThat;
20+
import static junit.framework.TestCase.assertNotNull;
21+
22+
import com.google.cloud.bigquery.BigQuery;
23+
import com.google.cloud.bigquery.BigQueryOptions;
24+
import com.google.cloud.bigquery.DatasetInfo;
25+
import com.google.cloud.bigquery.Field;
26+
import com.google.cloud.bigquery.Schema;
27+
import com.google.cloud.bigquery.StandardSQLTypeName;
28+
import com.google.cloud.bigquery.StandardTableDefinition;
29+
import com.google.cloud.bigquery.TableDefinition;
30+
import com.google.cloud.bigquery.TableId;
31+
import com.google.cloud.bigquery.TableInfo;
32+
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
33+
import com.google.protobuf.Struct;
34+
import com.google.protobuf.Value;
35+
import java.io.ByteArrayOutputStream;
36+
import java.io.IOException;
37+
import java.io.PrintStream;
38+
import java.util.HashMap;
39+
import java.util.Map;
40+
import java.util.UUID;
41+
import java.util.logging.Level;
42+
import java.util.logging.Logger;
43+
import org.junit.After;
44+
import org.junit.Before;
45+
import org.junit.BeforeClass;
46+
import org.junit.Test;
47+
48+
public class CreateAmazonS3TransferIT {
49+
50+
private static final Logger LOG = Logger.getLogger(CreateAmazonS3TransferIT.class.getName());
51+
private static final String ID = UUID.randomUUID().toString().substring(0, 8);
52+
private BigQuery bigquery;
53+
private ByteArrayOutputStream bout;
54+
private String name;
55+
private String displayName;
56+
private String datasetName;
57+
private String tableName;
58+
private PrintStream out;
59+
private PrintStream originalPrintStream;
60+
61+
private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT");
62+
private static final String AWS_ACCESS_KEY_ID = requireEnvVar("AWS_ACCESS_KEY_ID");
63+
private static final String AWS_SECRET_ACCESS_KEY = requireEnvVar("AWS_SECRET_ACCESS_KEY");
64+
private static final String AWS_BUCKET = requireEnvVar("AWS_BUCKET");
65+
66+
private static String requireEnvVar(String varName) {
67+
String value = System.getenv(varName);
68+
assertNotNull(
69+
"Environment variable " + varName + " is required to perform these tests.",
70+
System.getenv(varName));
71+
return value;
72+
}
73+
74+
@BeforeClass
75+
public static void checkRequirements() {
76+
requireEnvVar("GOOGLE_CLOUD_PROJECT");
77+
requireEnvVar("AWS_ACCESS_KEY_ID");
78+
requireEnvVar("AWS_SECRET_ACCESS_KEY");
79+
requireEnvVar("AWS_BUCKET");
80+
}
81+
82+
@Before
83+
public void setUp() {
84+
displayName = "MY_SCHEDULE_NAME_TEST_" + ID;
85+
datasetName = "MY_DATASET_NAME_TEST_" + ID;
86+
tableName = "MY_TABLE_NAME_TEST_" + ID;
87+
// create a temporary dataset
88+
bigquery = BigQueryOptions.getDefaultInstance().getService();
89+
bigquery.create(DatasetInfo.of(datasetName));
90+
// create a temporary table
91+
Schema schema =
92+
Schema.of(
93+
Field.of("name", StandardSQLTypeName.STRING),
94+
Field.of("post_abbr", StandardSQLTypeName.STRING));
95+
TableDefinition tableDefinition = StandardTableDefinition.of(schema);
96+
TableInfo tableInfo = TableInfo.of(TableId.of(datasetName, tableName), tableDefinition);
97+
bigquery.create(tableInfo);
98+
99+
bout = new ByteArrayOutputStream();
100+
out = new PrintStream(bout);
101+
originalPrintStream = System.out;
102+
System.setOut(out);
103+
}
104+
105+
@After
106+
public void tearDown() throws IOException {
107+
// Clean up
108+
DeleteScheduledQuery.deleteScheduledQuery(name);
109+
// delete a temporary table
110+
bigquery.delete(TableId.of(datasetName, tableName));
111+
// delete a temporary dataset
112+
bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents());
113+
// restores print statements in the original method
114+
System.out.flush();
115+
System.setOut(originalPrintStream);
116+
LOG.log(Level.INFO, bout.toString());
117+
}
118+
119+
@Test
120+
public void testCreateAmazonS3Transfer() throws IOException {
121+
String sourceUri = String.format("s3://%s/*", AWS_BUCKET);
122+
String fileFormat = "CSV";
123+
String fieldDelimiter = ",";
124+
String skipLeadingRows = "1";
125+
Map<String, Value> params = new HashMap<>();
126+
params.put(
127+
"destination_table_name_template", Value.newBuilder().setStringValue(tableName).build());
128+
params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build());
129+
params.put("access_key_id", Value.newBuilder().setStringValue(AWS_ACCESS_KEY_ID).build());
130+
params.put(
131+
"secret_access_key", Value.newBuilder().setStringValue(AWS_SECRET_ACCESS_KEY).build());
132+
params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build());
133+
params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build());
134+
params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build());
135+
TransferConfig transferConfig =
136+
TransferConfig.newBuilder()
137+
.setDestinationDatasetId(datasetName)
138+
.setDisplayName(displayName)
139+
.setDataSourceId("amazon_s3")
140+
.setParams(Struct.newBuilder().putAllFields(params).build())
141+
.setSchedule("every 24 hours")
142+
.build();
143+
CreateAmazonS3Transfer.createAmazonS3Transfer(PROJECT_ID, transferConfig);
144+
String result = bout.toString();
145+
name = result.substring(result.indexOf(":") + 1, result.length() - 1);
146+
assertThat(result).contains("Amazon s3 transfer created successfully :");
147+
}
148+
}

0 commit comments

Comments
 (0)