Skip to content
This repository was archived by the owner on Sep 27, 2023. It is now read-only.

Commit 37097f8

Browse files
author
Praful Makani
authored
docs(samples): add copy dataset (#389)
1 parent 5ad1751 commit 37097f8

File tree

2 files changed

+183
-0
lines changed

2 files changed

+183
-0
lines changed
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
// [START bigquerydatatransfer_copy_dataset]
20+
import com.google.api.gax.rpc.ApiException;
21+
import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest;
22+
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
23+
import com.google.cloud.bigquery.datatransfer.v1.ProjectName;
24+
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
25+
import com.google.protobuf.Struct;
26+
import com.google.protobuf.Value;
27+
import java.io.IOException;
28+
import java.util.HashMap;
29+
import java.util.Map;
30+
31+
// Sample to copy dataset from another gcp project
32+
public class CopyDataset {
33+
34+
public static void main(String[] args) throws IOException {
35+
// TODO(developer): Replace these variables before running the sample.
36+
final String destinationProjectId = "MY_DESTINATION_PROJECT_ID";
37+
final String destinationDatasetId = "MY_DESTINATION_DATASET_ID";
38+
final String sourceProjectId = "MY_SOURCE_PROJECT_ID";
39+
final String sourceDatasetId = "MY_SOURCE_DATASET_ID";
40+
Map<String, Value> params = new HashMap<>();
41+
params.put("source_project_id", Value.newBuilder().setStringValue(sourceProjectId).build());
42+
params.put("source_dataset_id", Value.newBuilder().setStringValue(sourceDatasetId).build());
43+
TransferConfig transferConfig =
44+
TransferConfig.newBuilder()
45+
.setDestinationDatasetId(destinationDatasetId)
46+
.setDisplayName("Your Dataset Copy Name")
47+
.setDataSourceId("cross_region_copy")
48+
.setParams(Struct.newBuilder().putAllFields(params).build())
49+
.setSchedule("every 24 hours")
50+
.build();
51+
copyDataset(destinationProjectId, transferConfig);
52+
}
53+
54+
public static void copyDataset(String projectId, TransferConfig transferConfig)
55+
throws IOException {
56+
try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) {
57+
ProjectName parent = ProjectName.of(projectId);
58+
CreateTransferConfigRequest request =
59+
CreateTransferConfigRequest.newBuilder()
60+
.setParent(parent.toString())
61+
.setTransferConfig(transferConfig)
62+
.build();
63+
TransferConfig config = dataTransferServiceClient.createTransferConfig(request);
64+
System.out.println("Copy dataset created successfully :" + config.getName());
65+
} catch (ApiException ex) {
66+
System.out.print("Copy dataset was not created." + ex.toString());
67+
}
68+
}
69+
}
70+
// [END bigquerydatatransfer_copy_dataset]
Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
/*
2+
* Copyright 2020 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.example.bigquerydatatransfer;
18+
19+
import static com.google.common.truth.Truth.assertThat;
20+
import static junit.framework.TestCase.assertNotNull;
21+
22+
import com.google.cloud.bigquery.BigQuery;
23+
import com.google.cloud.bigquery.BigQueryOptions;
24+
import com.google.cloud.bigquery.DatasetInfo;
25+
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
26+
import com.google.protobuf.Struct;
27+
import com.google.protobuf.Value;
28+
import java.io.ByteArrayOutputStream;
29+
import java.io.IOException;
30+
import java.io.PrintStream;
31+
import java.util.HashMap;
32+
import java.util.Map;
33+
import java.util.UUID;
34+
import java.util.logging.Level;
35+
import java.util.logging.Logger;
36+
import org.junit.After;
37+
import org.junit.Before;
38+
import org.junit.BeforeClass;
39+
import org.junit.Test;
40+
41+
public class CopyDatasetIT {
42+
43+
private static final Logger LOG = Logger.getLogger(CopyDatasetIT.class.getName());
44+
private BigQuery bigquery;
45+
private ByteArrayOutputStream bout;
46+
private String name;
47+
private String displayName;
48+
private String datasetName;
49+
private PrintStream out;
50+
private PrintStream originalPrintStream;
51+
52+
private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT");
53+
54+
private static String requireEnvVar(String varName) {
55+
String value = System.getenv(varName);
56+
assertNotNull(
57+
"Environment variable " + varName + " is required to perform these tests.",
58+
System.getenv(varName));
59+
return value;
60+
}
61+
62+
@BeforeClass
63+
public static void checkRequirements() {
64+
requireEnvVar("GOOGLE_CLOUD_PROJECT");
65+
}
66+
67+
@Before
68+
public void setUp() {
69+
displayName = "MY_COPY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8);
70+
datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8);
71+
// create a temporary dataset
72+
bigquery = BigQueryOptions.getDefaultInstance().getService();
73+
bigquery.create(DatasetInfo.of(datasetName));
74+
75+
bout = new ByteArrayOutputStream();
76+
out = new PrintStream(bout);
77+
originalPrintStream = System.out;
78+
System.setOut(out);
79+
}
80+
81+
@After
82+
public void tearDown() throws IOException {
83+
// TODO(pmakani) replace DeleteTransferConfig once PR merged.
84+
// Clean up
85+
DeleteScheduledQuery.deleteScheduledQuery(name);
86+
// delete a temporary dataset
87+
bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents());
88+
// restores print statements in the original method
89+
System.out.flush();
90+
System.setOut(originalPrintStream);
91+
LOG.log(Level.INFO, bout.toString());
92+
}
93+
94+
@Test
95+
public void testCopyDataset() throws IOException {
96+
Map<String, Value> params = new HashMap<>();
97+
params.put(
98+
"source_project_id", Value.newBuilder().setStringValue("bigquery-public-data").build());
99+
params.put("source_dataset_id", Value.newBuilder().setStringValue("usa_names").build());
100+
TransferConfig transferConfig =
101+
TransferConfig.newBuilder()
102+
.setDestinationDatasetId(datasetName)
103+
.setDisplayName(displayName)
104+
.setDataSourceId("cross_region_copy")
105+
.setParams(Struct.newBuilder().putAllFields(params).build())
106+
.setSchedule("every 24 hours")
107+
.build();
108+
CopyDataset.copyDataset(PROJECT_ID, transferConfig);
109+
String result = bout.toString();
110+
name = result.substring(result.indexOf(":") + 1, result.length() - 1);
111+
assertThat(result).contains("Copy dataset created successfully :");
112+
}
113+
}

0 commit comments

Comments
 (0)