|
| 1 | +/* |
| 2 | + * Copyright 2020 Google LLC |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +package com.example.bigquerydatatransfer; |
| 18 | + |
| 19 | +import static com.google.common.truth.Truth.assertThat; |
| 20 | +import static junit.framework.TestCase.assertNotNull; |
| 21 | + |
| 22 | +import com.google.cloud.bigquery.BigQuery; |
| 23 | +import com.google.cloud.bigquery.BigQueryOptions; |
| 24 | +import com.google.cloud.bigquery.DatasetInfo; |
| 25 | +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; |
| 26 | +import com.google.protobuf.Struct; |
| 27 | +import com.google.protobuf.Value; |
| 28 | +import java.io.ByteArrayOutputStream; |
| 29 | +import java.io.IOException; |
| 30 | +import java.io.PrintStream; |
| 31 | +import java.util.HashMap; |
| 32 | +import java.util.Map; |
| 33 | +import java.util.UUID; |
| 34 | +import org.junit.After; |
| 35 | +import org.junit.Before; |
| 36 | +import org.junit.BeforeClass; |
| 37 | +import org.junit.Test; |
| 38 | + |
| 39 | +public class GetTransferConfigInfoIT { |
| 40 | + |
| 41 | + private BigQuery bigquery; |
| 42 | + private ByteArrayOutputStream bout; |
| 43 | + private String name; |
| 44 | + private String displayName; |
| 45 | + private String datasetName; |
| 46 | + private PrintStream out; |
| 47 | + private PrintStream originalPrintStream; |
| 48 | + |
| 49 | + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); |
| 50 | + |
| 51 | + private static String requireEnvVar(String varName) { |
| 52 | + String value = System.getenv(varName); |
| 53 | + assertNotNull( |
| 54 | + "Environment variable " + varName + " is required to perform these tests.", |
| 55 | + System.getenv(varName)); |
| 56 | + return value; |
| 57 | + } |
| 58 | + |
| 59 | + @BeforeClass |
| 60 | + public static void checkRequirements() { |
| 61 | + requireEnvVar("GOOGLE_CLOUD_PROJECT"); |
| 62 | + } |
| 63 | + |
| 64 | + @Before |
| 65 | + public void setUp() { |
| 66 | + bout = new ByteArrayOutputStream(); |
| 67 | + out = new PrintStream(bout); |
| 68 | + originalPrintStream = System.out; |
| 69 | + System.setOut(out); |
| 70 | + |
| 71 | + displayName = "MY_SCHEDULE_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); |
| 72 | + datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); |
| 73 | + |
| 74 | + // create a temporary dataset |
| 75 | + bigquery = BigQueryOptions.getDefaultInstance().getService(); |
| 76 | + bigquery.create(DatasetInfo.of(datasetName)); |
| 77 | + |
| 78 | + // create a scheduled query |
| 79 | + String query = |
| 80 | + "SELECT CURRENT_TIMESTAMP() as current_time, @run_time as intended_run_time, " |
| 81 | + + "@run_date as intended_run_date, 17 as some_integer"; |
| 82 | + String destinationTableName = |
| 83 | + "MY_DESTINATION_TABLE_" + UUID.randomUUID().toString().substring(0, 8) + "_{run_date}"; |
| 84 | + Map<String, Value> params = new HashMap<>(); |
| 85 | + params.put("query", Value.newBuilder().setStringValue(query).build()); |
| 86 | + params.put( |
| 87 | + "destination_table_name_template", |
| 88 | + Value.newBuilder().setStringValue(destinationTableName).build()); |
| 89 | + params.put("write_disposition", Value.newBuilder().setStringValue("WRITE_TRUNCATE").build()); |
| 90 | + params.put("partitioning_field", Value.newBuilder().setStringValue("").build()); |
| 91 | + TransferConfig transferConfig = |
| 92 | + TransferConfig.newBuilder() |
| 93 | + .setDestinationDatasetId(datasetName) |
| 94 | + .setDisplayName(displayName) |
| 95 | + .setDataSourceId("scheduled_query") |
| 96 | + .setParams(Struct.newBuilder().putAllFields(params).build()) |
| 97 | + .setSchedule("every 24 hours") |
| 98 | + .build(); |
| 99 | + CreateScheduledQuery.createScheduledQuery(PROJECT_ID, transferConfig); |
| 100 | + String result = bout.toString(); |
| 101 | + name = result.substring(result.indexOf(".") + 1); |
| 102 | + } |
| 103 | + |
| 104 | + @After |
| 105 | + public void tearDown() { |
| 106 | + // delete scheduled query that was just created |
| 107 | + DeleteScheduledQuery.deleteScheduledQuery(name); |
| 108 | + // delete a temporary dataset |
| 109 | + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); |
| 110 | + |
| 111 | + // restores print statements in the original method |
| 112 | + System.setOut(originalPrintStream); |
| 113 | + String output = new String(bout.toByteArray()); |
| 114 | + System.out.println(output); |
| 115 | + } |
| 116 | + |
| 117 | + @Test |
| 118 | + public void testGetTransferConfigInfo() throws IOException { |
| 119 | + GetTransferConfigInfo.getTransferConfigInfo(name); |
| 120 | + assertThat(bout.toString()).contains("Config info retrieved successfully."); |
| 121 | + } |
| 122 | +} |
0 commit comments