|
| 1 | +/* |
| 2 | + * Copyright 2020 Google LLC |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +package com.example.bigquerydatatransfer; |
| 18 | + |
| 19 | +import static com.google.common.truth.Truth.assertThat; |
| 20 | +import static junit.framework.TestCase.assertNotNull; |
| 21 | + |
| 22 | +import com.google.cloud.bigquery.BigQuery; |
| 23 | +import com.google.cloud.bigquery.BigQueryOptions; |
| 24 | +import com.google.cloud.bigquery.DatasetInfo; |
| 25 | +import com.google.cloud.bigquery.Field; |
| 26 | +import com.google.cloud.bigquery.Schema; |
| 27 | +import com.google.cloud.bigquery.StandardSQLTypeName; |
| 28 | +import com.google.cloud.bigquery.StandardTableDefinition; |
| 29 | +import com.google.cloud.bigquery.TableDefinition; |
| 30 | +import com.google.cloud.bigquery.TableId; |
| 31 | +import com.google.cloud.bigquery.TableInfo; |
| 32 | +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; |
| 33 | +import com.google.protobuf.Struct; |
| 34 | +import com.google.protobuf.Value; |
| 35 | +import java.io.ByteArrayOutputStream; |
| 36 | +import java.io.IOException; |
| 37 | +import java.io.PrintStream; |
| 38 | +import java.util.HashMap; |
| 39 | +import java.util.Map; |
| 40 | +import java.util.UUID; |
| 41 | +import java.util.logging.Level; |
| 42 | +import java.util.logging.Logger; |
| 43 | +import org.junit.After; |
| 44 | +import org.junit.Before; |
| 45 | +import org.junit.BeforeClass; |
| 46 | +import org.junit.Test; |
| 47 | + |
| 48 | +public class CreateAmazonS3TransferIT { |
| 49 | + |
| 50 | + private static final Logger LOG = Logger.getLogger(CreateAmazonS3TransferIT.class.getName()); |
| 51 | + private static final String ID = UUID.randomUUID().toString().substring(0, 8); |
| 52 | + private BigQuery bigquery; |
| 53 | + private ByteArrayOutputStream bout; |
| 54 | + private String name; |
| 55 | + private String displayName; |
| 56 | + private String datasetName; |
| 57 | + private String tableName; |
| 58 | + private PrintStream out; |
| 59 | + private PrintStream originalPrintStream; |
| 60 | + |
| 61 | + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); |
| 62 | + private static final String AWS_ACCESS_KEY_ID = requireEnvVar("AWS_ACCESS_KEY_ID"); |
| 63 | + private static final String AWS_SECRET_ACCESS_KEY = requireEnvVar("AWS_SECRET_ACCESS_KEY"); |
| 64 | + private static final String AWS_BUCKET = requireEnvVar("AWS_BUCKET"); |
| 65 | + |
| 66 | + private static String requireEnvVar(String varName) { |
| 67 | + String value = System.getenv(varName); |
| 68 | + assertNotNull( |
| 69 | + "Environment variable " + varName + " is required to perform these tests.", |
| 70 | + System.getenv(varName)); |
| 71 | + return value; |
| 72 | + } |
| 73 | + |
| 74 | + @BeforeClass |
| 75 | + public static void checkRequirements() { |
| 76 | + requireEnvVar("GOOGLE_CLOUD_PROJECT"); |
| 77 | + requireEnvVar("AWS_ACCESS_KEY_ID"); |
| 78 | + requireEnvVar("AWS_SECRET_ACCESS_KEY"); |
| 79 | + requireEnvVar("AWS_BUCKET"); |
| 80 | + } |
| 81 | + |
| 82 | + @Before |
| 83 | + public void setUp() { |
| 84 | + displayName = "MY_SCHEDULE_NAME_TEST_" + ID; |
| 85 | + datasetName = "MY_DATASET_NAME_TEST_" + ID; |
| 86 | + tableName = "MY_TABLE_NAME_TEST_" + ID; |
| 87 | + // create a temporary dataset |
| 88 | + bigquery = BigQueryOptions.getDefaultInstance().getService(); |
| 89 | + bigquery.create(DatasetInfo.of(datasetName)); |
| 90 | + // create a temporary table |
| 91 | + Schema schema = |
| 92 | + Schema.of( |
| 93 | + Field.of("name", StandardSQLTypeName.STRING), |
| 94 | + Field.of("post_abbr", StandardSQLTypeName.STRING)); |
| 95 | + TableDefinition tableDefinition = StandardTableDefinition.of(schema); |
| 96 | + TableInfo tableInfo = TableInfo.of(TableId.of(datasetName, tableName), tableDefinition); |
| 97 | + bigquery.create(tableInfo); |
| 98 | + |
| 99 | + bout = new ByteArrayOutputStream(); |
| 100 | + out = new PrintStream(bout); |
| 101 | + originalPrintStream = System.out; |
| 102 | + System.setOut(out); |
| 103 | + } |
| 104 | + |
| 105 | + @After |
| 106 | + public void tearDown() throws IOException { |
| 107 | + // Clean up |
| 108 | + DeleteScheduledQuery.deleteScheduledQuery(name); |
| 109 | + // delete a temporary table |
| 110 | + bigquery.delete(TableId.of(datasetName, tableName)); |
| 111 | + // delete a temporary dataset |
| 112 | + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); |
| 113 | + // restores print statements in the original method |
| 114 | + System.out.flush(); |
| 115 | + System.setOut(originalPrintStream); |
| 116 | + LOG.log(Level.INFO, bout.toString()); |
| 117 | + } |
| 118 | + |
| 119 | + @Test |
| 120 | + public void testCreateAmazonS3Transfer() throws IOException { |
| 121 | + String sourceUri = String.format("s3://%s/*", AWS_BUCKET); |
| 122 | + String fileFormat = "CSV"; |
| 123 | + String fieldDelimiter = ","; |
| 124 | + String skipLeadingRows = "1"; |
| 125 | + Map<String, Value> params = new HashMap<>(); |
| 126 | + params.put( |
| 127 | + "destination_table_name_template", Value.newBuilder().setStringValue(tableName).build()); |
| 128 | + params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build()); |
| 129 | + params.put("access_key_id", Value.newBuilder().setStringValue(AWS_ACCESS_KEY_ID).build()); |
| 130 | + params.put( |
| 131 | + "secret_access_key", Value.newBuilder().setStringValue(AWS_SECRET_ACCESS_KEY).build()); |
| 132 | + params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build()); |
| 133 | + params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build()); |
| 134 | + params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build()); |
| 135 | + TransferConfig transferConfig = |
| 136 | + TransferConfig.newBuilder() |
| 137 | + .setDestinationDatasetId(datasetName) |
| 138 | + .setDisplayName(displayName) |
| 139 | + .setDataSourceId("amazon_s3") |
| 140 | + .setParams(Struct.newBuilder().putAllFields(params).build()) |
| 141 | + .setSchedule("every 24 hours") |
| 142 | + .build(); |
| 143 | + CreateAmazonS3Transfer.createAmazonS3Transfer(PROJECT_ID, transferConfig); |
| 144 | + String result = bout.toString(); |
| 145 | + name = result.substring(result.indexOf(":") + 1, result.length() - 1); |
| 146 | + assertThat(result).contains("Amazon s3 transfer created successfully :"); |
| 147 | + } |
| 148 | +} |
0 commit comments