Skip to content
This repository was archived by the owner on Sep 27, 2023. It is now read-only.

Commit ddf567a

Browse files
author
Praful Makani
authored
docs(samples): add run history (#333)
1 parent 5a09efb commit ddf567a

File tree

2 files changed

+183
-0
lines changed

2 files changed

+183
-0
lines changed
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
// [START bigquerydatatransfer_get_run_history]
20+
import com.google.api.gax.rpc.ApiException;
21+
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
22+
import com.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest;
23+
import java.io.IOException;
24+
25+
// Sample to get run history from transfer config.
26+
public class RunHistory {
27+
28+
public static void main(String[] args) throws IOException {
29+
// TODO(developer): Replace these variables before running the sample.
30+
String configId = "MY_CONFIG_ID";
31+
// i.e projects/{project_id}/transferConfigs/{config_id}` or
32+
// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`
33+
runHistory(configId);
34+
}
35+
36+
public static void runHistory(String configId) throws IOException {
37+
try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) {
38+
ListTransferRunsRequest request =
39+
ListTransferRunsRequest.newBuilder().setParent(configId).build();
40+
dataTransferServiceClient
41+
.listTransferRuns(request)
42+
.iterateAll()
43+
.forEach(run -> System.out.print("Success! Run ID :" + run.getName() + "\n"));
44+
} catch (ApiException ex) {
45+
System.out.println("Run history not found due to error." + ex.toString());
46+
}
47+
}
48+
}
49+
// [END bigquerydatatransfer_get_run_history]
Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
import static com.google.common.truth.Truth.assertThat;
20+
import static junit.framework.TestCase.assertNotNull;
21+
22+
import com.google.cloud.bigquery.BigQuery;
23+
import com.google.cloud.bigquery.BigQueryOptions;
24+
import com.google.cloud.bigquery.DatasetInfo;
25+
import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest;
26+
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
27+
import com.google.cloud.bigquery.datatransfer.v1.ProjectName;
28+
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
29+
import com.google.protobuf.Struct;
30+
import com.google.protobuf.Value;
31+
import java.io.ByteArrayOutputStream;
32+
import java.io.IOException;
33+
import java.io.PrintStream;
34+
import java.util.HashMap;
35+
import java.util.Map;
36+
import java.util.UUID;
37+
import java.util.logging.Level;
38+
import java.util.logging.Logger;
39+
import org.junit.After;
40+
import org.junit.Before;
41+
import org.junit.BeforeClass;
42+
import org.junit.Test;
43+
44+
public class RunHistoryIT {
45+
46+
private static final Logger LOG = Logger.getLogger(GetTransferConfigInfoIT.class.getName());
47+
private static final String ID = UUID.randomUUID().toString().substring(0, 8);
48+
private BigQuery bigquery;
49+
private ByteArrayOutputStream bout;
50+
private String name;
51+
private String displayName;
52+
private String datasetName;
53+
private PrintStream out;
54+
private PrintStream originalPrintStream;
55+
56+
private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT");
57+
58+
private static String requireEnvVar(String varName) {
59+
String value = System.getenv(varName);
60+
assertNotNull(
61+
"Environment variable " + varName + " is required to perform these tests.",
62+
System.getenv(varName));
63+
return value;
64+
}
65+
66+
@BeforeClass
67+
public static void checkRequirements() {
68+
requireEnvVar("GOOGLE_CLOUD_PROJECT");
69+
}
70+
71+
@Before
72+
public void setUp() throws IOException {
73+
bout = new ByteArrayOutputStream();
74+
out = new PrintStream(bout);
75+
originalPrintStream = System.out;
76+
System.setOut(out);
77+
78+
displayName = "MY_SCHEDULE_NAME_TEST_" + ID;
79+
datasetName = "MY_DATASET_NAME_TEST_" + ID;
80+
// create a temporary dataset
81+
bigquery = BigQueryOptions.getDefaultInstance().getService();
82+
bigquery.create(DatasetInfo.of(datasetName));
83+
// create a scheduled query
84+
String query =
85+
"SELECT CURRENT_TIMESTAMP() as current_time, @run_time as intended_run_time, "
86+
+ "@run_date as intended_run_date, 17 as some_integer";
87+
String destinationTableName =
88+
"MY_DESTINATION_TABLE_" + UUID.randomUUID().toString().substring(0, 8) + "_{run_date}";
89+
Map<String, Value> params = new HashMap<>();
90+
params.put("query", Value.newBuilder().setStringValue(query).build());
91+
params.put(
92+
"destination_table_name_template",
93+
Value.newBuilder().setStringValue(destinationTableName).build());
94+
params.put("write_disposition", Value.newBuilder().setStringValue("WRITE_TRUNCATE").build());
95+
params.put("partitioning_field", Value.newBuilder().setStringValue("").build());
96+
TransferConfig transferConfig =
97+
TransferConfig.newBuilder()
98+
.setDestinationDatasetId(datasetName)
99+
.setDisplayName(displayName)
100+
.setDataSourceId("scheduled_query")
101+
.setParams(Struct.newBuilder().putAllFields(params).build())
102+
.setSchedule("every 24 hours")
103+
.build();
104+
try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) {
105+
ProjectName parent = ProjectName.of(PROJECT_ID);
106+
CreateTransferConfigRequest request =
107+
CreateTransferConfigRequest.newBuilder()
108+
.setParent(parent.toString())
109+
.setTransferConfig(transferConfig)
110+
.build();
111+
name = dataTransferServiceClient.createTransferConfig(request).getName();
112+
System.out.println("\nScheduled query created successfully :" + name);
113+
}
114+
}
115+
116+
@After
117+
public void tearDown() throws IOException {
118+
// delete scheduled query that was just created
119+
DeleteScheduledQuery.deleteScheduledQuery(name);
120+
// delete a temporary dataset
121+
bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents());
122+
123+
// restores print statements in the original method
124+
System.out.flush();
125+
System.setOut(originalPrintStream);
126+
LOG.log(Level.INFO, bout.toString());
127+
}
128+
129+
@Test
130+
public void testRunHistory() throws IOException {
131+
RunHistory.runHistory(name);
132+
assertThat(bout.toString()).contains("Success! Run ID :");
133+
}
134+
}

0 commit comments

Comments
 (0)