diff --git a/.github/readme/synth.metadata/synth.metadata b/.github/readme/synth.metadata/synth.metadata index b3cbc5caa..ce8feb672 100644 --- a/.github/readme/synth.metadata/synth.metadata +++ b/.github/readme/synth.metadata/synth.metadata @@ -4,14 +4,14 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-aiplatform.git", - "sha": "43e69a4025d88eeaa80fb0341b76465baeb53367" + "sha": "beaa1f046abb15de9b1979314aa0f2910af6e311" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "8c5628b86cfa8386de7b8fc1675e6b528b552d57" + "sha": "d0bdade9a962042dc0f770cf631086f3db59b5b0" } } ] diff --git a/.github/workflows/auto-release.yaml b/.github/workflows/auto-release.yaml index 7c8816a7d..9b4fd4d83 100644 --- a/.github/workflows/auto-release.yaml +++ b/.github/workflows/auto-release.yaml @@ -16,8 +16,8 @@ jobs: return; } - // only approve PRs like "chore(master): release " - if ( !context.payload.pull_request.title.startsWith("chore(master): release") ) { + // only approve PRs like "chore: release " + if ( !context.payload.pull_request.title.startsWith("chore: release") ) { return; } diff --git a/.kokoro/release/publish_javadoc.cfg b/.kokoro/release/publish_javadoc.cfg index ac7264203..c5f59fec6 100644 --- a/.kokoro/release/publish_javadoc.cfg +++ b/.kokoro/release/publish_javadoc.cfg @@ -7,12 +7,6 @@ env_vars: { value: "docs-staging" } -# cloud-rad staging -env_vars: { - key: "STAGING_BUCKET_V2" - value: "docs-staging-v2-staging" -} - env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/java-aiplatform/.kokoro/release/publish_javadoc.sh" @@ -26,4 +20,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/.kokoro/release/publish_javadoc.sh b/.kokoro/release/publish_javadoc.sh index d8ad859f3..b502d6e79 100755 --- a/.kokoro/release/publish_javadoc.sh +++ b/.kokoro/release/publish_javadoc.sh @@ -56,22 +56,3 @@ python3 -m docuploader create-metadata \ python3 -m docuploader upload . \ --credentials ${CREDENTIALS} \ --staging-bucket ${STAGING_BUCKET} - -popd - -# V2 due to problems w/ the released javadoc plugin doclava, Java 8 is required. Beware of accidental updates. - -mvn clean site -B -q -Ddevsite.template="${KOKORO_GFILE_DIR}/java/" - -pushd target/devsite/reference - -# create metadata -python3 -m docuploader create-metadata \ - --name ${NAME} \ - --version ${VERSION} \ - --language java - -# upload docs to staging bucket -python3 -m docuploader upload . \ - --credentials ${CREDENTIALS} \ - --staging-bucket ${STAGING_BUCKET_V2} diff --git a/.kokoro/release/publish_javadoc11.sh b/.kokoro/release/publish_javadoc11.sh index 5905726d8..c8c307e0f 100755 --- a/.kokoro/release/publish_javadoc11.sh +++ b/.kokoro/release/publish_javadoc11.sh @@ -40,6 +40,9 @@ export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3) # generate yml mvn clean site -B -q -P docFX +# copy README to docfx-yml dir and rename index.md +cp README.md target/docfx-yml/index.md + pushd target/docfx-yml # create metadata @@ -52,4 +55,4 @@ python3 -m docuploader create-metadata \ python3 -m docuploader upload . \ --credentials ${CREDENTIALS} \ --staging-bucket ${STAGING_BUCKET_V2} \ - --destination-prefix docfx- + --destination-prefix docfx diff --git a/CHANGELOG.md b/CHANGELOG.md index fb859d939..078c81339 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.4.0](https://www.github.com/googleapis/java-aiplatform/compare/v0.3.0...v0.4.0) (2021-03-12) + + +### Features + +* add encryption_spec to aiplatform v1beta1 ([#176](https://www.github.com/googleapis/java-aiplatform/issues/176)) ([faff19b](https://www.github.com/googleapis/java-aiplatform/commit/faff19b2820146d6cf49525f49ae187449171641)) + + +### Dependencies + +* update dependency com.google.api.grpc:proto-google-cloud-aiplatform-v1beta1 to v0.3.0 ([#189](https://www.github.com/googleapis/java-aiplatform/issues/189)) ([5bdf3d0](https://www.github.com/googleapis/java-aiplatform/commit/5bdf3d04f365cb27446456e6df2a691ce1441434)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.20.1 ([#203](https://www.github.com/googleapis/java-aiplatform/issues/203)) ([761e878](https://www.github.com/googleapis/java-aiplatform/commit/761e878b02eb0268b16d1325749d399e66a487df)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.12 ([#186](https://www.github.com/googleapis/java-aiplatform/issues/186)) ([3fae117](https://www.github.com/googleapis/java-aiplatform/commit/3fae117a6c9d73d292b879c05ba849f0846af73d)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.14 ([#202](https://www.github.com/googleapis/java-aiplatform/issues/202)) ([767cf1d](https://www.github.com/googleapis/java-aiplatform/commit/767cf1d9624e65970f30094484bbc571bd8c858b)) + ## [0.3.0](https://www.github.com/googleapis/java-aiplatform/compare/v0.2.0...v0.3.0) (2021-02-26) diff --git a/README.md b/README.md index e2017d681..846a70ed9 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file com.google.cloud google-cloud-aiplatform - 0.2.0 + 0.3.0 ``` @@ -30,25 +30,25 @@ If you are using Maven without BOM, add this to your dependencies: com.google.cloud google-cloud-aiplatform - 0.2.0 + 0.3.0 ``` If you are using Gradle 5.x or later, add this to your dependencies ```Groovy -implementation platform('com.google.cloud:libraries-bom:18.0.0') +implementation platform('com.google.cloud:libraries-bom:19.0.0') compile 'com.google.cloud:google-cloud-aiplatform' ``` If you are using Gradle without BOM, add this to your dependencies ```Groovy -compile 'com.google.cloud:google-cloud-aiplatform:0.2.0' +compile 'com.google.cloud:google-cloud-aiplatform:0.3.0' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-aiplatform" % "0.2.0" +libraryDependencies += "com.google.cloud" % "google-cloud-aiplatform" % "0.3.0" ``` ## Authentication diff --git a/google-cloud-aiplatform-bom/pom.xml b/google-cloud-aiplatform-bom/pom.xml index 23ea97d79..3c00f5cdf 100644 --- a/google-cloud-aiplatform-bom/pom.xml +++ b/google-cloud-aiplatform-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-aiplatform-bom - 0.3.0 + 0.4.0 pom com.google.cloud @@ -68,27 +68,27 @@ com.google.cloud google-cloud-aiplatform - 0.3.0 + 0.4.0 com.google.api.grpc proto-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 com.google.api.grpc proto-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 diff --git a/google-cloud-aiplatform/pom.xml b/google-cloud-aiplatform/pom.xml index b0f8852fc..64c2067dd 100644 --- a/google-cloud-aiplatform/pom.xml +++ b/google-cloud-aiplatform/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-aiplatform - 0.3.0 + 0.4.0 jar Google AI Platform https://github.com/googleapis/java-aiplatform @@ -11,7 +11,7 @@ com.google.cloud google-cloud-aiplatform-parent - 0.3.0 + 0.4.0 google-cloud-aiplatform diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/DatasetServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/DatasetServiceClient.java index 7811aa7ab..a2453f430 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/DatasetServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/DatasetServiceClient.java @@ -45,6 +45,13 @@ * This class provides the ability to make remote calls to the backing service through method calls * that map to API methods. Sample code to get started: * + *
{@code
+ * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+ *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+ *   Dataset response = datasetServiceClient.getDataset(name);
+ * }
+ * }
+ * *

Note: close() needs to be called on the DatasetServiceClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * @@ -159,6 +166,16 @@ public final OperationsClient getOperationsClient() { /** * Creates a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   Dataset dataset = Dataset.newBuilder().build();
+   *   Dataset response = datasetServiceClient.createDatasetAsync(parent, dataset).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the Dataset in. Format: * `projects/{project}/locations/{location}` * @param dataset Required. The Dataset to create. @@ -178,6 +195,16 @@ public final OperationFuture createData /** * Creates a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   Dataset dataset = Dataset.newBuilder().build();
+   *   Dataset response = datasetServiceClient.createDatasetAsync(parent, dataset).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the Dataset in. Format: * `projects/{project}/locations/{location}` * @param dataset Required. The Dataset to create. @@ -194,6 +221,19 @@ public final OperationFuture createData /** * Creates a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   CreateDatasetRequest request =
+   *       CreateDatasetRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setDataset(Dataset.newBuilder().build())
+   *           .build();
+   *   Dataset response = datasetServiceClient.createDatasetAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -207,6 +247,20 @@ public final OperationFuture createData * Creates a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   CreateDatasetRequest request =
+   *       CreateDatasetRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setDataset(Dataset.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       datasetServiceClient.createDatasetOperationCallable().futureCall(request);
+   *   // Do something.
+   *   Dataset response = future.get();
+   * }
+   * }
*/ public final OperationCallable createDatasetOperationCallable() { @@ -218,6 +272,20 @@ public final OperationFuture createData * Creates a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   CreateDatasetRequest request =
+   *       CreateDatasetRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setDataset(Dataset.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.createDatasetCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createDatasetCallable() { return stub.createDatasetCallable(); @@ -227,6 +295,15 @@ public final UnaryCallable createDatasetCallabl /** * Gets a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+   *   Dataset response = datasetServiceClient.getDataset(name);
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -240,6 +317,15 @@ public final Dataset getDataset(DatasetName name) { /** * Gets a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString();
+   *   Dataset response = datasetServiceClient.getDataset(name);
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -252,6 +338,19 @@ public final Dataset getDataset(String name) { /** * Gets a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   GetDatasetRequest request =
+   *       GetDatasetRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Dataset response = datasetServiceClient.getDataset(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -264,6 +363,19 @@ public final Dataset getDataset(GetDatasetRequest request) { * Gets a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   GetDatasetRequest request =
+   *       GetDatasetRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = datasetServiceClient.getDatasetCallable().futureCall(request);
+   *   // Do something.
+   *   Dataset response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getDatasetCallable() { return stub.getDatasetCallable(); @@ -273,6 +385,16 @@ public final UnaryCallable getDatasetCallable() { /** * Updates a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   Dataset dataset = Dataset.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Dataset response = datasetServiceClient.updateDataset(dataset, updateMask);
+   * }
+   * }
+ * * @param dataset Required. The Dataset which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. For the `FieldMask` * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). @@ -290,6 +412,19 @@ public final Dataset updateDataset(Dataset dataset, FieldMask updateMask) { /** * Updates a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   UpdateDatasetRequest request =
+   *       UpdateDatasetRequest.newBuilder()
+   *           .setDataset(Dataset.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Dataset response = datasetServiceClient.updateDataset(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -302,6 +437,19 @@ public final Dataset updateDataset(UpdateDatasetRequest request) { * Updates a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   UpdateDatasetRequest request =
+   *       UpdateDatasetRequest.newBuilder()
+   *           .setDataset(Dataset.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = datasetServiceClient.updateDatasetCallable().futureCall(request);
+   *   // Do something.
+   *   Dataset response = future.get();
+   * }
+   * }
*/ public final UnaryCallable updateDatasetCallable() { return stub.updateDatasetCallable(); @@ -311,6 +459,17 @@ public final UnaryCallable updateDatasetCallable( /** * Lists Datasets in a Location. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (Dataset element : datasetServiceClient.listDatasets(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The name of the Dataset's parent resource. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -327,6 +486,17 @@ public final ListDatasetsPagedResponse listDatasets(LocationName parent) { /** * Lists Datasets in a Location. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (Dataset element : datasetServiceClient.listDatasets(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The name of the Dataset's parent resource. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -340,6 +510,25 @@ public final ListDatasetsPagedResponse listDatasets(String parent) { /** * Lists Datasets in a Location. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListDatasetsRequest request =
+   *       ListDatasetsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (Dataset element : datasetServiceClient.listDatasets(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -352,6 +541,26 @@ public final ListDatasetsPagedResponse listDatasets(ListDatasetsRequest request) * Lists Datasets in a Location. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListDatasetsRequest request =
+   *       ListDatasetsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.listDatasetsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Dataset element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDatasetsPagedCallable() { @@ -363,6 +572,23 @@ public final ListDatasetsPagedResponse listDatasets(ListDatasetsRequest request) * Lists Datasets in a Location. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   while (true) {
+   *     ListDatasetsResponse response = datasetServiceClient.listDatasetsCallable().call(request);
+   *     for (Dataset element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDatasetsCallable() { return stub.listDatasetsCallable(); @@ -372,6 +598,15 @@ public final UnaryCallable listDatase /** * Deletes a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+   *   datasetServiceClient.deleteDatasetAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the Dataset to delete. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -387,6 +622,15 @@ public final OperationFuture deleteDatasetAsync( /** * Deletes a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString();
+   *   datasetServiceClient.deleteDatasetAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the Dataset to delete. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -400,6 +644,18 @@ public final OperationFuture deleteDatasetAsync( /** * Deletes a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DeleteDatasetRequest request =
+   *       DeleteDatasetRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .build();
+   *   datasetServiceClient.deleteDatasetAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -413,6 +669,19 @@ public final OperationFuture deleteDatasetAsync( * Deletes a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DeleteDatasetRequest request =
+   *       DeleteDatasetRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .build();
+   *   OperationFuture future =
+   *       datasetServiceClient.deleteDatasetOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteDatasetOperationCallable() { @@ -424,6 +693,19 @@ public final OperationFuture deleteDatasetAsync( * Deletes a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DeleteDatasetRequest request =
+   *       DeleteDatasetRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.deleteDatasetCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteDatasetCallable() { return stub.deleteDatasetCallable(); @@ -433,6 +715,16 @@ public final UnaryCallable deleteDatasetCallabl /** * Imports data into a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+   *   List importConfigs = new ArrayList<>();
+   *   ImportDataResponse response = datasetServiceClient.importDataAsync(name, importConfigs).get();
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @param importConfigs Required. The desired input locations. The contents of all input locations @@ -453,6 +745,16 @@ public final OperationFuture im /** * Imports data into a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString();
+   *   List importConfigs = new ArrayList<>();
+   *   ImportDataResponse response = datasetServiceClient.importDataAsync(name, importConfigs).get();
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @param importConfigs Required. The desired input locations. The contents of all input locations @@ -470,6 +772,19 @@ public final OperationFuture im /** * Imports data into a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ImportDataRequest request =
+   *       ImportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .addAllImportConfigs(new ArrayList())
+   *           .build();
+   *   ImportDataResponse response = datasetServiceClient.importDataAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -483,6 +798,20 @@ public final OperationFuture im * Imports data into a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ImportDataRequest request =
+   *       ImportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .addAllImportConfigs(new ArrayList())
+   *           .build();
+   *   OperationFuture future =
+   *       datasetServiceClient.importDataOperationCallable().futureCall(request);
+   *   // Do something.
+   *   ImportDataResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable importDataOperationCallable() { @@ -494,6 +823,19 @@ public final OperationFuture im * Imports data into a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ImportDataRequest request =
+   *       ImportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .addAllImportConfigs(new ArrayList())
+   *           .build();
+   *   ApiFuture future = datasetServiceClient.importDataCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable importDataCallable() { return stub.importDataCallable(); @@ -503,6 +845,16 @@ public final UnaryCallable importDataCallable() { /** * Exports data from a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+   *   ExportDataConfig exportConfig = ExportDataConfig.newBuilder().build();
+   *   ExportDataResponse response = datasetServiceClient.exportDataAsync(name, exportConfig).get();
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @param exportConfig Required. The desired output location. @@ -522,6 +874,16 @@ public final OperationFuture ex /** * Exports data from a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString();
+   *   ExportDataConfig exportConfig = ExportDataConfig.newBuilder().build();
+   *   ExportDataResponse response = datasetServiceClient.exportDataAsync(name, exportConfig).get();
+   * }
+   * }
+ * * @param name Required. The name of the Dataset resource. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @param exportConfig Required. The desired output location. @@ -538,6 +900,19 @@ public final OperationFuture ex /** * Exports data from a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ExportDataRequest request =
+   *       ExportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setExportConfig(ExportDataConfig.newBuilder().build())
+   *           .build();
+   *   ExportDataResponse response = datasetServiceClient.exportDataAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -551,6 +926,20 @@ public final OperationFuture ex * Exports data from a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ExportDataRequest request =
+   *       ExportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setExportConfig(ExportDataConfig.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       datasetServiceClient.exportDataOperationCallable().futureCall(request);
+   *   // Do something.
+   *   ExportDataResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable exportDataOperationCallable() { @@ -562,6 +951,19 @@ public final OperationFuture ex * Exports data from a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ExportDataRequest request =
+   *       ExportDataRequest.newBuilder()
+   *           .setName(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setExportConfig(ExportDataConfig.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = datasetServiceClient.exportDataCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable exportDataCallable() { return stub.exportDataCallable(); @@ -571,6 +973,17 @@ public final UnaryCallable exportDataCallable() { /** * Lists DataItems in a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DatasetName parent = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+   *   for (DataItem element : datasetServiceClient.listDataItems(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Dataset to list DataItems from. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -587,6 +1000,17 @@ public final ListDataItemsPagedResponse listDataItems(DatasetName parent) { /** * Lists DataItems in a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String parent = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString();
+   *   for (DataItem element : datasetServiceClient.listDataItems(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Dataset to list DataItems from. Format: * `projects/{project}/locations/{location}/datasets/{dataset}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -600,6 +1024,25 @@ public final ListDataItemsPagedResponse listDataItems(String parent) { /** * Lists DataItems in a Dataset. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListDataItemsRequest request =
+   *       ListDataItemsRequest.newBuilder()
+   *           .setParent(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (DataItem element : datasetServiceClient.listDataItems(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -612,6 +1055,26 @@ public final ListDataItemsPagedResponse listDataItems(ListDataItemsRequest reque * Lists DataItems in a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListDataItemsRequest request =
+   *       ListDataItemsRequest.newBuilder()
+   *           .setParent(DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.listDataItemsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (DataItem element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDataItemsPagedCallable() { @@ -623,6 +1086,23 @@ public final ListDataItemsPagedResponse listDataItems(ListDataItemsRequest reque * Lists DataItems in a Dataset. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   while (true) {
+   *     ListDataItemsResponse response = datasetServiceClient.listDataItemsCallable().call(request);
+   *     for (DataItem element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDataItemsCallable() { return stub.listDataItemsCallable(); @@ -632,6 +1112,16 @@ public final UnaryCallable listData /** * Gets an AnnotationSpec. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   AnnotationSpecName name =
+   *       AnnotationSpecName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[ANNOTATION_SPEC]");
+   *   AnnotationSpec response = datasetServiceClient.getAnnotationSpec(name);
+   * }
+   * }
+ * * @param name Required. The name of the AnnotationSpec resource. Format: *

`projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -648,6 +1138,17 @@ public final AnnotationSpec getAnnotationSpec(AnnotationSpecName name) { /** * Gets an AnnotationSpec. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String name =
+   *       AnnotationSpecName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[ANNOTATION_SPEC]")
+   *           .toString();
+   *   AnnotationSpec response = datasetServiceClient.getAnnotationSpec(name);
+   * }
+   * }
+ * * @param name Required. The name of the AnnotationSpec resource. Format: *

`projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -661,6 +1162,21 @@ public final AnnotationSpec getAnnotationSpec(String name) { /** * Gets an AnnotationSpec. * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   GetAnnotationSpecRequest request =
+   *       GetAnnotationSpecRequest.newBuilder()
+   *           .setName(
+   *               AnnotationSpecName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[ANNOTATION_SPEC]")
+   *                   .toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   AnnotationSpec response = datasetServiceClient.getAnnotationSpec(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -673,6 +1189,22 @@ public final AnnotationSpec getAnnotationSpec(GetAnnotationSpecRequest request) * Gets an AnnotationSpec. * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   GetAnnotationSpecRequest request =
+   *       GetAnnotationSpecRequest.newBuilder()
+   *           .setName(
+   *               AnnotationSpecName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[ANNOTATION_SPEC]")
+   *                   .toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.getAnnotationSpecCallable().futureCall(request);
+   *   // Do something.
+   *   AnnotationSpec response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getAnnotationSpecCallable() { return stub.getAnnotationSpecCallable(); @@ -682,6 +1214,17 @@ public final UnaryCallable getAnnotati /** * Lists Annotations belongs to a dataitem * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   DataItemName parent = DataItemName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[DATA_ITEM]");
+   *   for (Annotation element : datasetServiceClient.listAnnotations(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the DataItem to list Annotations from. Format: *

`projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -698,6 +1241,18 @@ public final ListAnnotationsPagedResponse listAnnotations(DataItemName parent) { /** * Lists Annotations belongs to a dataitem * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   String parent =
+   *       DataItemName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[DATA_ITEM]").toString();
+   *   for (Annotation element : datasetServiceClient.listAnnotations(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the DataItem to list Annotations from. Format: *

`projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -711,6 +1266,26 @@ public final ListAnnotationsPagedResponse listAnnotations(String parent) { /** * Lists Annotations belongs to a dataitem * + *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListAnnotationsRequest request =
+   *       ListAnnotationsRequest.newBuilder()
+   *           .setParent(
+   *               DataItemName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[DATA_ITEM]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (Annotation element : datasetServiceClient.listAnnotations(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -723,6 +1298,27 @@ public final ListAnnotationsPagedResponse listAnnotations(ListAnnotationsRequest * Lists Annotations belongs to a dataitem * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   ListAnnotationsRequest request =
+   *       ListAnnotationsRequest.newBuilder()
+   *           .setParent(
+   *               DataItemName.of("[PROJECT]", "[LOCATION]", "[DATASET]", "[DATA_ITEM]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future =
+   *       datasetServiceClient.listAnnotationsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Annotation element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listAnnotationsPagedCallable() { @@ -734,6 +1330,24 @@ public final ListAnnotationsPagedResponse listAnnotations(ListAnnotationsRequest * Lists Annotations belongs to a dataitem * *

Sample code: + * + *

{@code
+   * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+   *   while (true) {
+   *     ListAnnotationsResponse response =
+   *         datasetServiceClient.listAnnotationsCallable().call(request);
+   *     for (Annotation element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listAnnotationsCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/EndpointServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/EndpointServiceClient.java index 32a714218..b677f6e99 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/EndpointServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/EndpointServiceClient.java @@ -46,6 +46,13 @@ * This class provides the ability to make remote calls to the backing service through method calls * that map to API methods. Sample code to get started: * + *
{@code
+ * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+ *   EndpointName name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+ *   Endpoint response = endpointServiceClient.getEndpoint(name);
+ * }
+ * }
+ * *

Note: close() needs to be called on the EndpointServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -163,6 +170,16 @@ public final OperationsClient getOperationsClient() { /** * Creates an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   Endpoint endpoint = Endpoint.newBuilder().build();
+   *   Endpoint response = endpointServiceClient.createEndpointAsync(parent, endpoint).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the Endpoint in. Format: * `projects/{project}/locations/{location}` * @param endpoint Required. The Endpoint to create. @@ -182,6 +199,16 @@ public final OperationFuture createEn /** * Creates an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   Endpoint endpoint = Endpoint.newBuilder().build();
+   *   Endpoint response = endpointServiceClient.createEndpointAsync(parent, endpoint).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the Endpoint in. Format: * `projects/{project}/locations/{location}` * @param endpoint Required. The Endpoint to create. @@ -198,6 +225,19 @@ public final OperationFuture createEn /** * Creates an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   CreateEndpointRequest request =
+   *       CreateEndpointRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setEndpoint(Endpoint.newBuilder().build())
+   *           .build();
+   *   Endpoint response = endpointServiceClient.createEndpointAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -211,6 +251,20 @@ public final OperationFuture createEn * Creates an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   CreateEndpointRequest request =
+   *       CreateEndpointRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setEndpoint(Endpoint.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       endpointServiceClient.createEndpointOperationCallable().futureCall(request);
+   *   // Do something.
+   *   Endpoint response = future.get();
+   * }
+   * }
*/ public final OperationCallable createEndpointOperationCallable() { @@ -222,6 +276,20 @@ public final OperationFuture createEn * Creates an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   CreateEndpointRequest request =
+   *       CreateEndpointRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setEndpoint(Endpoint.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       endpointServiceClient.createEndpointCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createEndpointCallable() { return stub.createEndpointCallable(); @@ -231,6 +299,15 @@ public final UnaryCallable createEndpointCalla /** * Gets an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   EndpointName name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+   *   Endpoint response = endpointServiceClient.getEndpoint(name);
+   * }
+   * }
+ * * @param name Required. The name of the Endpoint resource. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -245,6 +322,15 @@ public final Endpoint getEndpoint(EndpointName name) { /** * Gets an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString();
+   *   Endpoint response = endpointServiceClient.getEndpoint(name);
+   * }
+   * }
+ * * @param name Required. The name of the Endpoint resource. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -258,6 +344,18 @@ public final Endpoint getEndpoint(String name) { /** * Gets an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   GetEndpointRequest request =
+   *       GetEndpointRequest.newBuilder()
+   *           .setName(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .build();
+   *   Endpoint response = endpointServiceClient.getEndpoint(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -270,6 +368,18 @@ public final Endpoint getEndpoint(GetEndpointRequest request) { * Gets an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   GetEndpointRequest request =
+   *       GetEndpointRequest.newBuilder()
+   *           .setName(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .build();
+   *   ApiFuture future = endpointServiceClient.getEndpointCallable().futureCall(request);
+   *   // Do something.
+   *   Endpoint response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getEndpointCallable() { return stub.getEndpointCallable(); @@ -279,6 +389,17 @@ public final UnaryCallable getEndpointCallable() { /** * Lists Endpoints in a Location. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (Endpoint element : endpointServiceClient.listEndpoints(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location from which to list the Endpoints. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -295,6 +416,17 @@ public final ListEndpointsPagedResponse listEndpoints(LocationName parent) { /** * Lists Endpoints in a Location. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (Endpoint element : endpointServiceClient.listEndpoints(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location from which to list the Endpoints. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -308,6 +440,25 @@ public final ListEndpointsPagedResponse listEndpoints(String parent) { /** * Lists Endpoints in a Location. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   ListEndpointsRequest request =
+   *       ListEndpointsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (Endpoint element : endpointServiceClient.listEndpoints(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -320,6 +471,26 @@ public final ListEndpointsPagedResponse listEndpoints(ListEndpointsRequest reque * Lists Endpoints in a Location. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   ListEndpointsRequest request =
+   *       ListEndpointsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future =
+   *       endpointServiceClient.listEndpointsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Endpoint element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listEndpointsPagedCallable() { @@ -331,6 +502,24 @@ public final ListEndpointsPagedResponse listEndpoints(ListEndpointsRequest reque * Lists Endpoints in a Location. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   while (true) {
+   *     ListEndpointsResponse response =
+   *         endpointServiceClient.listEndpointsCallable().call(request);
+   *     for (Endpoint element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listEndpointsCallable() { return stub.listEndpointsCallable(); @@ -340,6 +529,16 @@ public final UnaryCallable listEndp /** * Updates an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   Endpoint endpoint = Endpoint.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Endpoint response = endpointServiceClient.updateEndpoint(endpoint, updateMask);
+   * }
+   * }
+ * * @param endpoint Required. The Endpoint which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. See * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). @@ -355,6 +554,19 @@ public final Endpoint updateEndpoint(Endpoint endpoint, FieldMask updateMask) { /** * Updates an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   UpdateEndpointRequest request =
+   *       UpdateEndpointRequest.newBuilder()
+   *           .setEndpoint(Endpoint.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Endpoint response = endpointServiceClient.updateEndpoint(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -367,6 +579,20 @@ public final Endpoint updateEndpoint(UpdateEndpointRequest request) { * Updates an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   UpdateEndpointRequest request =
+   *       UpdateEndpointRequest.newBuilder()
+   *           .setEndpoint(Endpoint.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       endpointServiceClient.updateEndpointCallable().futureCall(request);
+   *   // Do something.
+   *   Endpoint response = future.get();
+   * }
+   * }
*/ public final UnaryCallable updateEndpointCallable() { return stub.updateEndpointCallable(); @@ -376,6 +602,15 @@ public final UnaryCallable updateEndpointCallab /** * Deletes an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   EndpointName name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+   *   endpointServiceClient.deleteEndpointAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the Endpoint resource to be deleted. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -391,6 +626,15 @@ public final OperationFuture deleteEndpointAsync /** * Deletes an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString();
+   *   endpointServiceClient.deleteEndpointAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the Endpoint resource to be deleted. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -404,6 +648,18 @@ public final OperationFuture deleteEndpointAsync /** * Deletes an Endpoint. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeleteEndpointRequest request =
+   *       DeleteEndpointRequest.newBuilder()
+   *           .setName(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .build();
+   *   endpointServiceClient.deleteEndpointAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -417,6 +673,19 @@ public final OperationFuture deleteEndpointAsync * Deletes an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeleteEndpointRequest request =
+   *       DeleteEndpointRequest.newBuilder()
+   *           .setName(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .build();
+   *   OperationFuture future =
+   *       endpointServiceClient.deleteEndpointOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteEndpointOperationCallable() { @@ -428,6 +697,19 @@ public final OperationFuture deleteEndpointAsync * Deletes an Endpoint. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeleteEndpointRequest request =
+   *       DeleteEndpointRequest.newBuilder()
+   *           .setName(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       endpointServiceClient.deleteEndpointCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteEndpointCallable() { return stub.deleteEndpointCallable(); @@ -437,6 +719,18 @@ public final UnaryCallable deleteEndpointCalla /** * Deploys a Model into this Endpoint, creating a DeployedModel within it. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   EndpointName endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+   *   DeployedModel deployedModel = DeployedModel.newBuilder().build();
+   *   Map trafficSplit = new HashMap<>();
+   *   DeployModelResponse response =
+   *       endpointServiceClient.deployModelAsync(endpoint, deployedModel, trafficSplit).get();
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint resource into which to deploy a Model. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param deployedModel Required. The DeployedModel to be created within the Endpoint. Note that @@ -469,6 +763,18 @@ public final OperationFuture /** * Deploys a Model into this Endpoint, creating a DeployedModel within it. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString();
+   *   DeployedModel deployedModel = DeployedModel.newBuilder().build();
+   *   Map trafficSplit = new HashMap<>();
+   *   DeployModelResponse response =
+   *       endpointServiceClient.deployModelAsync(endpoint, deployedModel, trafficSplit).get();
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint resource into which to deploy a Model. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param deployedModel Required. The DeployedModel to be created within the Endpoint. Note that @@ -501,6 +807,20 @@ public final OperationFuture /** * Deploys a Model into this Endpoint, creating a DeployedModel within it. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeployModelRequest request =
+   *       DeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModel(DeployedModel.newBuilder().build())
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   DeployModelResponse response = endpointServiceClient.deployModelAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -514,6 +834,21 @@ public final OperationFuture * Deploys a Model into this Endpoint, creating a DeployedModel within it. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeployModelRequest request =
+   *       DeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModel(DeployedModel.newBuilder().build())
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   OperationFuture future =
+   *       endpointServiceClient.deployModelOperationCallable().futureCall(request);
+   *   // Do something.
+   *   DeployModelResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable< DeployModelRequest, DeployModelResponse, DeployModelOperationMetadata> @@ -526,6 +861,20 @@ public final OperationFuture * Deploys a Model into this Endpoint, creating a DeployedModel within it. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   DeployModelRequest request =
+   *       DeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModel(DeployedModel.newBuilder().build())
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   ApiFuture future = endpointServiceClient.deployModelCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable deployModelCallable() { return stub.deployModelCallable(); @@ -536,6 +885,18 @@ public final UnaryCallable deployModelCallable() * Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources * it's using. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   EndpointName endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+   *   String deployedModelId = "deployedModelId-1817547906";
+   *   Map trafficSplit = new HashMap<>();
+   *   UndeployModelResponse response =
+   *       endpointServiceClient.undeployModelAsync(endpoint, deployedModelId, trafficSplit).get();
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint resource from which to undeploy a Model. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param deployedModelId Required. The ID of the DeployedModel to be undeployed from the @@ -565,6 +926,18 @@ public final UnaryCallable deployModelCallable() * Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources * it's using. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   String endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString();
+   *   String deployedModelId = "deployedModelId-1817547906";
+   *   Map trafficSplit = new HashMap<>();
+   *   UndeployModelResponse response =
+   *       endpointServiceClient.undeployModelAsync(endpoint, deployedModelId, trafficSplit).get();
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint resource from which to undeploy a Model. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param deployedModelId Required. The ID of the DeployedModel to be undeployed from the @@ -594,6 +967,20 @@ public final UnaryCallable deployModelCallable() * Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources * it's using. * + *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   UndeployModelRequest request =
+   *       UndeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModelId("deployedModelId-1817547906")
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   UndeployModelResponse response = endpointServiceClient.undeployModelAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -608,6 +995,21 @@ public final UnaryCallable deployModelCallable() * it's using. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   UndeployModelRequest request =
+   *       UndeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModelId("deployedModelId-1817547906")
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   OperationFuture future =
+   *       endpointServiceClient.undeployModelOperationCallable().futureCall(request);
+   *   // Do something.
+   *   UndeployModelResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable< UndeployModelRequest, UndeployModelResponse, UndeployModelOperationMetadata> @@ -621,6 +1023,21 @@ public final UnaryCallable deployModelCallable() * it's using. * *

Sample code: + * + *

{@code
+   * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+   *   UndeployModelRequest request =
+   *       UndeployModelRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .setDeployedModelId("deployedModelId-1817547906")
+   *           .putAllTrafficSplit(new HashMap())
+   *           .build();
+   *   ApiFuture future =
+   *       endpointServiceClient.undeployModelCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable undeployModelCallable() { return stub.undeployModelCallable(); diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/JobServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/JobServiceClient.java index 28ea8877f..77f96ee3d 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/JobServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/JobServiceClient.java @@ -46,6 +46,14 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   CustomJob customJob = CustomJob.newBuilder().build();
+ *   CustomJob response = jobServiceClient.createCustomJob(parent, customJob);
+ * }
+ * }
+ * *

Note: close() needs to be called on the JobServiceClient object to clean up resources such as * threads. In the example above, try-with-resources is used, which automatically calls close(). * @@ -158,6 +166,16 @@ public final OperationsClient getOperationsClient() { /** * Creates a CustomJob. A created CustomJob right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   CustomJob customJob = CustomJob.newBuilder().build();
+   *   CustomJob response = jobServiceClient.createCustomJob(parent, customJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the CustomJob in. Format: * `projects/{project}/locations/{location}` * @param customJob Required. The CustomJob to create. @@ -176,6 +194,16 @@ public final CustomJob createCustomJob(LocationName parent, CustomJob customJob) /** * Creates a CustomJob. A created CustomJob right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   CustomJob customJob = CustomJob.newBuilder().build();
+   *   CustomJob response = jobServiceClient.createCustomJob(parent, customJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the CustomJob in. Format: * `projects/{project}/locations/{location}` * @param customJob Required. The CustomJob to create. @@ -191,6 +219,19 @@ public final CustomJob createCustomJob(String parent, CustomJob customJob) { /** * Creates a CustomJob. A created CustomJob right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateCustomJobRequest request =
+   *       CreateCustomJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setCustomJob(CustomJob.newBuilder().build())
+   *           .build();
+   *   CustomJob response = jobServiceClient.createCustomJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -203,6 +244,19 @@ public final CustomJob createCustomJob(CreateCustomJobRequest request) { * Creates a CustomJob. A created CustomJob right away will be attempted to be run. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateCustomJobRequest request =
+   *       CreateCustomJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setCustomJob(CustomJob.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = jobServiceClient.createCustomJobCallable().futureCall(request);
+   *   // Do something.
+   *   CustomJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createCustomJobCallable() { return stub.createCustomJobCallable(); @@ -212,6 +266,15 @@ public final UnaryCallable createCustomJobCal /** * Gets a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CustomJobName name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]");
+   *   CustomJob response = jobServiceClient.getCustomJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob resource. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -226,6 +289,15 @@ public final CustomJob getCustomJob(CustomJobName name) { /** * Gets a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString();
+   *   CustomJob response = jobServiceClient.getCustomJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob resource. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -239,6 +311,18 @@ public final CustomJob getCustomJob(String name) { /** * Gets a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetCustomJobRequest request =
+   *       GetCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   CustomJob response = jobServiceClient.getCustomJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -251,6 +335,18 @@ public final CustomJob getCustomJob(GetCustomJobRequest request) { * Gets a CustomJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetCustomJobRequest request =
+   *       GetCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   ApiFuture future = jobServiceClient.getCustomJobCallable().futureCall(request);
+   *   // Do something.
+   *   CustomJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getCustomJobCallable() { return stub.getCustomJobCallable(); @@ -260,6 +356,17 @@ public final UnaryCallable getCustomJobCallable( /** * Lists CustomJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (CustomJob element : jobServiceClient.listCustomJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the CustomJobs from. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -276,6 +383,17 @@ public final ListCustomJobsPagedResponse listCustomJobs(LocationName parent) { /** * Lists CustomJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (CustomJob element : jobServiceClient.listCustomJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the CustomJobs from. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -289,6 +407,24 @@ public final ListCustomJobsPagedResponse listCustomJobs(String parent) { /** * Lists CustomJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListCustomJobsRequest request =
+   *       ListCustomJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (CustomJob element : jobServiceClient.listCustomJobs(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -301,6 +437,25 @@ public final ListCustomJobsPagedResponse listCustomJobs(ListCustomJobsRequest re * Lists CustomJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListCustomJobsRequest request =
+   *       ListCustomJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.listCustomJobsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (CustomJob element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listCustomJobsPagedCallable() { @@ -312,6 +467,23 @@ public final ListCustomJobsPagedResponse listCustomJobs(ListCustomJobsRequest re * Lists CustomJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   while (true) {
+   *     ListCustomJobsResponse response = jobServiceClient.listCustomJobsCallable().call(request);
+   *     for (CustomJob element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listCustomJobsCallable() { @@ -322,6 +494,15 @@ public final ListCustomJobsPagedResponse listCustomJobs(ListCustomJobsRequest re /** * Deletes a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CustomJobName name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]");
+   *   jobServiceClient.deleteCustomJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob resource to be deleted. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -337,6 +518,15 @@ public final OperationFuture deleteCustomJobAsyn /** * Deletes a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString();
+   *   jobServiceClient.deleteCustomJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob resource to be deleted. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -350,6 +540,18 @@ public final OperationFuture deleteCustomJobAsyn /** * Deletes a CustomJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteCustomJobRequest request =
+   *       DeleteCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   jobServiceClient.deleteCustomJobAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -363,6 +565,19 @@ public final OperationFuture deleteCustomJobAsyn * Deletes a CustomJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteCustomJobRequest request =
+   *       DeleteCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   OperationFuture future =
+   *       jobServiceClient.deleteCustomJobOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteCustomJobOperationCallable() { @@ -374,6 +589,18 @@ public final OperationFuture deleteCustomJobAsyn * Deletes a CustomJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteCustomJobRequest request =
+   *       DeleteCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   ApiFuture future = jobServiceClient.deleteCustomJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteCustomJobCallable() { return stub.deleteCustomJobCallable(); @@ -390,6 +617,15 @@ public final UnaryCallable deleteCustomJobCal * [google.rpc.Status.code][google.rpc.Status.code] of 1, corresponding to `Code.CANCELLED`, and * [CustomJob.state][google.cloud.aiplatform.v1.CustomJob.state] is set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CustomJobName name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]");
+   *   jobServiceClient.cancelCustomJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob to cancel. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -411,6 +647,15 @@ public final void cancelCustomJob(CustomJobName name) { * [google.rpc.Status.code][google.rpc.Status.code] of 1, corresponding to `Code.CANCELLED`, and * [CustomJob.state][google.cloud.aiplatform.v1.CustomJob.state] is set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name = CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString();
+   *   jobServiceClient.cancelCustomJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the CustomJob to cancel. Format: * `projects/{project}/locations/{location}/customJobs/{custom_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -431,6 +676,18 @@ public final void cancelCustomJob(String name) { * [google.rpc.Status.code][google.rpc.Status.code] of 1, corresponding to `Code.CANCELLED`, and * [CustomJob.state][google.cloud.aiplatform.v1.CustomJob.state] is set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelCustomJobRequest request =
+   *       CancelCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   jobServiceClient.cancelCustomJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -450,6 +707,18 @@ public final void cancelCustomJob(CancelCustomJobRequest request) { * [CustomJob.state][google.cloud.aiplatform.v1.CustomJob.state] is set to `CANCELLED`. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelCustomJobRequest request =
+   *       CancelCustomJobRequest.newBuilder()
+   *           .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString())
+   *           .build();
+   *   ApiFuture future = jobServiceClient.cancelCustomJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable cancelCustomJobCallable() { return stub.cancelCustomJobCallable(); @@ -459,6 +728,16 @@ public final UnaryCallable cancelCustomJobCallabl /** * Creates a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   DataLabelingJob dataLabelingJob = DataLabelingJob.newBuilder().build();
+   *   DataLabelingJob response = jobServiceClient.createDataLabelingJob(parent, dataLabelingJob);
+   * }
+   * }
+ * * @param parent Required. The parent of the DataLabelingJob. Format: * `projects/{project}/locations/{location}` * @param dataLabelingJob Required. The DataLabelingJob to create. @@ -478,6 +757,16 @@ public final DataLabelingJob createDataLabelingJob( /** * Creates a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   DataLabelingJob dataLabelingJob = DataLabelingJob.newBuilder().build();
+   *   DataLabelingJob response = jobServiceClient.createDataLabelingJob(parent, dataLabelingJob);
+   * }
+   * }
+ * * @param parent Required. The parent of the DataLabelingJob. Format: * `projects/{project}/locations/{location}` * @param dataLabelingJob Required. The DataLabelingJob to create. @@ -497,6 +786,19 @@ public final DataLabelingJob createDataLabelingJob( /** * Creates a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateDataLabelingJobRequest request =
+   *       CreateDataLabelingJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setDataLabelingJob(DataLabelingJob.newBuilder().build())
+   *           .build();
+   *   DataLabelingJob response = jobServiceClient.createDataLabelingJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -509,6 +811,20 @@ public final DataLabelingJob createDataLabelingJob(CreateDataLabelingJobRequest * Creates a DataLabelingJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateDataLabelingJobRequest request =
+   *       CreateDataLabelingJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setDataLabelingJob(DataLabelingJob.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.createDataLabelingJobCallable().futureCall(request);
+   *   // Do something.
+   *   DataLabelingJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createDataLabelingJobCallable() { @@ -519,6 +835,16 @@ public final DataLabelingJob createDataLabelingJob(CreateDataLabelingJobRequest /** * Gets a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DataLabelingJobName name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]");
+   *   DataLabelingJob response = jobServiceClient.getDataLabelingJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -535,6 +861,16 @@ public final DataLabelingJob getDataLabelingJob(DataLabelingJobName name) { /** * Gets a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]").toString();
+   *   DataLabelingJob response = jobServiceClient.getDataLabelingJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -549,6 +885,20 @@ public final DataLabelingJob getDataLabelingJob(String name) { /** * Gets a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetDataLabelingJobRequest request =
+   *       GetDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   DataLabelingJob response = jobServiceClient.getDataLabelingJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -561,6 +911,21 @@ public final DataLabelingJob getDataLabelingJob(GetDataLabelingJobRequest reques * Gets a DataLabelingJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetDataLabelingJobRequest request =
+   *       GetDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.getDataLabelingJobCallable().futureCall(request);
+   *   // Do something.
+   *   DataLabelingJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getDataLabelingJobCallable() { @@ -571,6 +936,17 @@ public final DataLabelingJob getDataLabelingJob(GetDataLabelingJobRequest reques /** * Lists DataLabelingJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (DataLabelingJob element : jobServiceClient.listDataLabelingJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The parent of the DataLabelingJob. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -587,6 +963,17 @@ public final ListDataLabelingJobsPagedResponse listDataLabelingJobs(LocationName /** * Lists DataLabelingJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (DataLabelingJob element : jobServiceClient.listDataLabelingJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The parent of the DataLabelingJob. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -601,6 +988,25 @@ public final ListDataLabelingJobsPagedResponse listDataLabelingJobs(String paren /** * Lists DataLabelingJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListDataLabelingJobsRequest request =
+   *       ListDataLabelingJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (DataLabelingJob element : jobServiceClient.listDataLabelingJobs(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -614,6 +1020,26 @@ public final ListDataLabelingJobsPagedResponse listDataLabelingJobs( * Lists DataLabelingJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListDataLabelingJobsRequest request =
+   *       ListDataLabelingJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.listDataLabelingJobsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (DataLabelingJob element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDataLabelingJobsPagedCallable() { @@ -625,6 +1051,24 @@ public final ListDataLabelingJobsPagedResponse listDataLabelingJobs( * Lists DataLabelingJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   while (true) {
+   *     ListDataLabelingJobsResponse response =
+   *         jobServiceClient.listDataLabelingJobsCallable().call(request);
+   *     for (DataLabelingJob element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listDataLabelingJobsCallable() { @@ -635,6 +1079,16 @@ public final ListDataLabelingJobsPagedResponse listDataLabelingJobs( /** * Deletes a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DataLabelingJobName name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]");
+   *   jobServiceClient.deleteDataLabelingJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob to be deleted. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -652,6 +1106,16 @@ public final OperationFuture deleteDataLabelingJ /** * Deletes a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]").toString();
+   *   jobServiceClient.deleteDataLabelingJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob to be deleted. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -667,6 +1131,20 @@ public final OperationFuture deleteDataLabelingJ /** * Deletes a DataLabelingJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteDataLabelingJobRequest request =
+   *       DeleteDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.deleteDataLabelingJobAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -680,6 +1158,21 @@ public final OperationFuture deleteDataLabelingJ * Deletes a DataLabelingJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteDataLabelingJobRequest request =
+   *       DeleteDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   OperationFuture future =
+   *       jobServiceClient.deleteDataLabelingJobOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteDataLabelingJobOperationCallable() { @@ -691,6 +1184,21 @@ public final OperationFuture deleteDataLabelingJ * Deletes a DataLabelingJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteDataLabelingJobRequest request =
+   *       DeleteDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.deleteDataLabelingJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteDataLabelingJobCallable() { @@ -701,6 +1209,16 @@ public final OperationFuture deleteDataLabelingJ /** * Cancels a DataLabelingJob. Success of cancellation is not guaranteed. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DataLabelingJobName name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]");
+   *   jobServiceClient.cancelDataLabelingJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -717,6 +1235,16 @@ public final void cancelDataLabelingJob(DataLabelingJobName name) { /** * Cancels a DataLabelingJob. Success of cancellation is not guaranteed. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]").toString();
+   *   jobServiceClient.cancelDataLabelingJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the DataLabelingJob. Format: *

`projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -731,6 +1259,20 @@ public final void cancelDataLabelingJob(String name) { /** * Cancels a DataLabelingJob. Success of cancellation is not guaranteed. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelDataLabelingJobRequest request =
+   *       CancelDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.cancelDataLabelingJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -743,6 +1285,21 @@ public final void cancelDataLabelingJob(CancelDataLabelingJobRequest request) { * Cancels a DataLabelingJob. Success of cancellation is not guaranteed. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelDataLabelingJobRequest request =
+   *       CancelDataLabelingJobRequest.newBuilder()
+   *           .setName(
+   *               DataLabelingJobName.of("[PROJECT]", "[LOCATION]", "[DATA_LABELING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.cancelDataLabelingJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable cancelDataLabelingJobCallable() { return stub.cancelDataLabelingJobCallable(); @@ -752,6 +1309,18 @@ public final UnaryCallable cancelDataLabeli /** * Creates a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   HyperparameterTuningJob hyperparameterTuningJob =
+   *       HyperparameterTuningJob.newBuilder().build();
+   *   HyperparameterTuningJob response =
+   *       jobServiceClient.createHyperparameterTuningJob(parent, hyperparameterTuningJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the HyperparameterTuningJob * in. Format: `projects/{project}/locations/{location}` * @param hyperparameterTuningJob Required. The HyperparameterTuningJob to create. @@ -771,6 +1340,18 @@ public final HyperparameterTuningJob createHyperparameterTuningJob( /** * Creates a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   HyperparameterTuningJob hyperparameterTuningJob =
+   *       HyperparameterTuningJob.newBuilder().build();
+   *   HyperparameterTuningJob response =
+   *       jobServiceClient.createHyperparameterTuningJob(parent, hyperparameterTuningJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the HyperparameterTuningJob * in. Format: `projects/{project}/locations/{location}` * @param hyperparameterTuningJob Required. The HyperparameterTuningJob to create. @@ -790,6 +1371,19 @@ public final HyperparameterTuningJob createHyperparameterTuningJob( /** * Creates a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateHyperparameterTuningJobRequest request =
+   *       CreateHyperparameterTuningJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setHyperparameterTuningJob(HyperparameterTuningJob.newBuilder().build())
+   *           .build();
+   *   HyperparameterTuningJob response = jobServiceClient.createHyperparameterTuningJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -803,6 +1397,20 @@ public final HyperparameterTuningJob createHyperparameterTuningJob( * Creates a HyperparameterTuningJob * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateHyperparameterTuningJobRequest request =
+   *       CreateHyperparameterTuningJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setHyperparameterTuningJob(HyperparameterTuningJob.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.createHyperparameterTuningJobCallable().futureCall(request);
+   *   // Do something.
+   *   HyperparameterTuningJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createHyperparameterTuningJobCallable() { @@ -813,6 +1421,16 @@ public final HyperparameterTuningJob createHyperparameterTuningJob( /** * Gets a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   HyperparameterTuningJobName name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]");
+   *   HyperparameterTuningJob response = jobServiceClient.getHyperparameterTuningJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob resource. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -830,6 +1448,17 @@ public final HyperparameterTuningJob getHyperparameterTuningJob( /** * Gets a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *           .toString();
+   *   HyperparameterTuningJob response = jobServiceClient.getHyperparameterTuningJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob resource. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -844,6 +1473,21 @@ public final HyperparameterTuningJob getHyperparameterTuningJob(String name) { /** * Gets a HyperparameterTuningJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetHyperparameterTuningJobRequest request =
+   *       GetHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   HyperparameterTuningJob response = jobServiceClient.getHyperparameterTuningJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -857,6 +1501,22 @@ public final HyperparameterTuningJob getHyperparameterTuningJob( * Gets a HyperparameterTuningJob * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetHyperparameterTuningJobRequest request =
+   *       GetHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.getHyperparameterTuningJobCallable().futureCall(request);
+   *   // Do something.
+   *   HyperparameterTuningJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getHyperparameterTuningJobCallable() { @@ -867,6 +1527,18 @@ public final HyperparameterTuningJob getHyperparameterTuningJob( /** * Lists HyperparameterTuningJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (HyperparameterTuningJob element :
+   *       jobServiceClient.listHyperparameterTuningJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the HyperparameterTuningJobs * from. Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -884,6 +1556,18 @@ public final ListHyperparameterTuningJobsPagedResponse listHyperparameterTuningJ /** * Lists HyperparameterTuningJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (HyperparameterTuningJob element :
+   *       jobServiceClient.listHyperparameterTuningJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the HyperparameterTuningJobs * from. Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -899,6 +1583,25 @@ public final ListHyperparameterTuningJobsPagedResponse listHyperparameterTuningJ /** * Lists HyperparameterTuningJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListHyperparameterTuningJobsRequest request =
+   *       ListHyperparameterTuningJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (HyperparameterTuningJob element :
+   *       jobServiceClient.listHyperparameterTuningJobs(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -912,6 +1615,25 @@ public final ListHyperparameterTuningJobsPagedResponse listHyperparameterTuningJ * Lists HyperparameterTuningJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListHyperparameterTuningJobsRequest request =
+   *       ListHyperparameterTuningJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.listHyperparameterTuningJobsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (HyperparameterTuningJob element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable< ListHyperparameterTuningJobsRequest, ListHyperparameterTuningJobsPagedResponse> @@ -924,6 +1646,24 @@ public final ListHyperparameterTuningJobsPagedResponse listHyperparameterTuningJ * Lists HyperparameterTuningJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   while (true) {
+   *     ListHyperparameterTuningJobsResponse response =
+   *         jobServiceClient.listHyperparameterTuningJobsCallable().call(request);
+   *     for (HyperparameterTuningJob element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable< ListHyperparameterTuningJobsRequest, ListHyperparameterTuningJobsResponse> @@ -935,6 +1675,16 @@ public final ListHyperparameterTuningJobsPagedResponse listHyperparameterTuningJ /** * Deletes a HyperparameterTuningJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   HyperparameterTuningJobName name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]");
+   *   jobServiceClient.deleteHyperparameterTuningJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob resource to be deleted. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -952,6 +1702,17 @@ public final OperationFuture deleteHyperparamete /** * Deletes a HyperparameterTuningJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *           .toString();
+   *   jobServiceClient.deleteHyperparameterTuningJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob resource to be deleted. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -967,6 +1728,21 @@ public final OperationFuture deleteHyperparamete /** * Deletes a HyperparameterTuningJob. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteHyperparameterTuningJobRequest request =
+   *       DeleteHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.deleteHyperparameterTuningJobAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -980,6 +1756,22 @@ public final OperationFuture deleteHyperparamete * Deletes a HyperparameterTuningJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteHyperparameterTuningJobRequest request =
+   *       DeleteHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   OperationFuture future =
+   *       jobServiceClient.deleteHyperparameterTuningJobOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable< DeleteHyperparameterTuningJobRequest, Empty, DeleteOperationMetadata> @@ -992,6 +1784,22 @@ public final OperationFuture deleteHyperparamete * Deletes a HyperparameterTuningJob. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteHyperparameterTuningJobRequest request =
+   *       DeleteHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.deleteHyperparameterTuningJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteHyperparameterTuningJobCallable() { @@ -1013,6 +1821,16 @@ public final OperationFuture deleteHyperparamete * [HyperparameterTuningJob.state][google.cloud.aiplatform.v1.HyperparameterTuningJob.state] is * set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   HyperparameterTuningJobName name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]");
+   *   jobServiceClient.cancelHyperparameterTuningJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob to cancel. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1040,6 +1858,17 @@ public final void cancelHyperparameterTuningJob(HyperparameterTuningJobName name * [HyperparameterTuningJob.state][google.cloud.aiplatform.v1.HyperparameterTuningJob.state] is * set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       HyperparameterTuningJobName.of("[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *           .toString();
+   *   jobServiceClient.cancelHyperparameterTuningJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the HyperparameterTuningJob to cancel. Format: *

`projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1065,6 +1894,21 @@ public final void cancelHyperparameterTuningJob(String name) { * [HyperparameterTuningJob.state][google.cloud.aiplatform.v1.HyperparameterTuningJob.state] is * set to `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelHyperparameterTuningJobRequest request =
+   *       CancelHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.cancelHyperparameterTuningJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1088,6 +1932,22 @@ public final void cancelHyperparameterTuningJob(CancelHyperparameterTuningJobReq * set to `CANCELLED`. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelHyperparameterTuningJobRequest request =
+   *       CancelHyperparameterTuningJobRequest.newBuilder()
+   *           .setName(
+   *               HyperparameterTuningJobName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[HYPERPARAMETER_TUNING_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.cancelHyperparameterTuningJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable cancelHyperparameterTuningJobCallable() { @@ -1099,6 +1959,17 @@ public final void cancelHyperparameterTuningJob(CancelHyperparameterTuningJobReq * Creates a BatchPredictionJob. A BatchPredictionJob once created will right away be attempted to * start. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().build();
+   *   BatchPredictionJob response =
+   *       jobServiceClient.createBatchPredictionJob(parent, batchPredictionJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the BatchPredictionJob in. * Format: `projects/{project}/locations/{location}` * @param batchPredictionJob Required. The BatchPredictionJob to create. @@ -1119,6 +1990,17 @@ public final BatchPredictionJob createBatchPredictionJob( * Creates a BatchPredictionJob. A BatchPredictionJob once created will right away be attempted to * start. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().build();
+   *   BatchPredictionJob response =
+   *       jobServiceClient.createBatchPredictionJob(parent, batchPredictionJob);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the BatchPredictionJob in. * Format: `projects/{project}/locations/{location}` * @param batchPredictionJob Required. The BatchPredictionJob to create. @@ -1139,6 +2021,19 @@ public final BatchPredictionJob createBatchPredictionJob( * Creates a BatchPredictionJob. A BatchPredictionJob once created will right away be attempted to * start. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateBatchPredictionJobRequest request =
+   *       CreateBatchPredictionJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setBatchPredictionJob(BatchPredictionJob.newBuilder().build())
+   *           .build();
+   *   BatchPredictionJob response = jobServiceClient.createBatchPredictionJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1153,6 +2048,20 @@ public final BatchPredictionJob createBatchPredictionJob( * start. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CreateBatchPredictionJobRequest request =
+   *       CreateBatchPredictionJobRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setBatchPredictionJob(BatchPredictionJob.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.createBatchPredictionJobCallable().futureCall(request);
+   *   // Do something.
+   *   BatchPredictionJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createBatchPredictionJobCallable() { @@ -1163,6 +2072,16 @@ public final BatchPredictionJob createBatchPredictionJob( /** * Gets a BatchPredictionJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   BatchPredictionJobName name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]");
+   *   BatchPredictionJob response = jobServiceClient.getBatchPredictionJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob resource. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1179,6 +2098,16 @@ public final BatchPredictionJob getBatchPredictionJob(BatchPredictionJobName nam /** * Gets a BatchPredictionJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]").toString();
+   *   BatchPredictionJob response = jobServiceClient.getBatchPredictionJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob resource. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1193,6 +2122,20 @@ public final BatchPredictionJob getBatchPredictionJob(String name) { /** * Gets a BatchPredictionJob * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetBatchPredictionJobRequest request =
+   *       GetBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   BatchPredictionJob response = jobServiceClient.getBatchPredictionJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1205,6 +2148,21 @@ public final BatchPredictionJob getBatchPredictionJob(GetBatchPredictionJobReque * Gets a BatchPredictionJob * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   GetBatchPredictionJobRequest request =
+   *       GetBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.getBatchPredictionJobCallable().futureCall(request);
+   *   // Do something.
+   *   BatchPredictionJob response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getBatchPredictionJobCallable() { @@ -1215,6 +2173,18 @@ public final BatchPredictionJob getBatchPredictionJob(GetBatchPredictionJobReque /** * Lists BatchPredictionJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (BatchPredictionJob element :
+   *       jobServiceClient.listBatchPredictionJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the BatchPredictionJobs from. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1231,6 +2201,18 @@ public final ListBatchPredictionJobsPagedResponse listBatchPredictionJobs(Locati /** * Lists BatchPredictionJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (BatchPredictionJob element :
+   *       jobServiceClient.listBatchPredictionJobs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the BatchPredictionJobs from. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1245,6 +2227,25 @@ public final ListBatchPredictionJobsPagedResponse listBatchPredictionJobs(String /** * Lists BatchPredictionJobs in a Location. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListBatchPredictionJobsRequest request =
+   *       ListBatchPredictionJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (BatchPredictionJob element :
+   *       jobServiceClient.listBatchPredictionJobs(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1258,6 +2259,25 @@ public final ListBatchPredictionJobsPagedResponse listBatchPredictionJobs( * Lists BatchPredictionJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   ListBatchPredictionJobsRequest request =
+   *       ListBatchPredictionJobsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.listBatchPredictionJobsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (BatchPredictionJob element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listBatchPredictionJobsPagedCallable() { @@ -1269,6 +2289,24 @@ public final ListBatchPredictionJobsPagedResponse listBatchPredictionJobs( * Lists BatchPredictionJobs in a Location. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   while (true) {
+   *     ListBatchPredictionJobsResponse response =
+   *         jobServiceClient.listBatchPredictionJobsCallable().call(request);
+   *     for (BatchPredictionJob element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listBatchPredictionJobsCallable() { @@ -1279,6 +2317,16 @@ public final ListBatchPredictionJobsPagedResponse listBatchPredictionJobs( /** * Deletes a BatchPredictionJob. Can only be called on jobs that already finished. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   BatchPredictionJobName name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]");
+   *   jobServiceClient.deleteBatchPredictionJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob resource to be deleted. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1296,6 +2344,16 @@ public final OperationFuture deleteBatchPredicti /** * Deletes a BatchPredictionJob. Can only be called on jobs that already finished. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]").toString();
+   *   jobServiceClient.deleteBatchPredictionJobAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob resource to be deleted. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1311,6 +2369,20 @@ public final OperationFuture deleteBatchPredicti /** * Deletes a BatchPredictionJob. Can only be called on jobs that already finished. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteBatchPredictionJobRequest request =
+   *       DeleteBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.deleteBatchPredictionJobAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1324,6 +2396,21 @@ public final OperationFuture deleteBatchPredicti * Deletes a BatchPredictionJob. Can only be called on jobs that already finished. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteBatchPredictionJobRequest request =
+   *       DeleteBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   OperationFuture future =
+   *       jobServiceClient.deleteBatchPredictionJobOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteBatchPredictionJobOperationCallable() { @@ -1335,6 +2422,21 @@ public final OperationFuture deleteBatchPredicti * Deletes a BatchPredictionJob. Can only be called on jobs that already finished. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   DeleteBatchPredictionJobRequest request =
+   *       DeleteBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.deleteBatchPredictionJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteBatchPredictionJobCallable() { @@ -1354,6 +2456,16 @@ public final OperationFuture deleteBatchPredicti * [BatchPredictionJob.state][google.cloud.aiplatform.v1.BatchPredictionJob.state] is set to * `CANCELLED`. Any files already outputted by the job are not deleted. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   BatchPredictionJobName name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]");
+   *   jobServiceClient.cancelBatchPredictionJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob to cancel. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1379,6 +2491,16 @@ public final void cancelBatchPredictionJob(BatchPredictionJobName name) { * [BatchPredictionJob.state][google.cloud.aiplatform.v1.BatchPredictionJob.state] is set to * `CANCELLED`. Any files already outputted by the job are not deleted. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   String name =
+   *       BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]").toString();
+   *   jobServiceClient.cancelBatchPredictionJob(name);
+   * }
+   * }
+ * * @param name Required. The name of the BatchPredictionJob to cancel. Format: *

`projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1402,6 +2524,20 @@ public final void cancelBatchPredictionJob(String name) { * [BatchPredictionJob.state][google.cloud.aiplatform.v1.BatchPredictionJob.state] is set to * `CANCELLED`. Any files already outputted by the job are not deleted. * + *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelBatchPredictionJobRequest request =
+   *       CancelBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   jobServiceClient.cancelBatchPredictionJob(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1423,6 +2559,21 @@ public final void cancelBatchPredictionJob(CancelBatchPredictionJobRequest reque * `CANCELLED`. Any files already outputted by the job are not deleted. * *

Sample code: + * + *

{@code
+   * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+   *   CancelBatchPredictionJobRequest request =
+   *       CancelBatchPredictionJobRequest.newBuilder()
+   *           .setName(
+   *               BatchPredictionJobName.of("[PROJECT]", "[LOCATION]", "[BATCH_PREDICTION_JOB]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       jobServiceClient.cancelBatchPredictionJobCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable cancelBatchPredictionJobCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/MigrationServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/MigrationServiceClient.java index 646448a57..2fe2ed227 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/MigrationServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/MigrationServiceClient.java @@ -46,6 +46,16 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   for (MigratableResource element :
+ *       migrationServiceClient.searchMigratableResources(parent).iterateAll()) {
+ *     // doThingsWith(element);
+ *   }
+ * }
+ * }
+ * *

Note: close() needs to be called on the MigrationServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -164,6 +174,18 @@ public final OperationsClient getOperationsClient() { * Searches all of the resources in automl.googleapis.com, datalabeling.googleapis.com and * ml.googleapis.com that can be migrated to AI Platform's given location. * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (MigratableResource element :
+   *       migrationServiceClient.searchMigratableResources(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The location that the migratable resources should be searched from. * It's the AI Platform location that the resources can be migrated to, not the resources' * original location. Format: `projects/{project}/locations/{location}` @@ -183,6 +205,18 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * Searches all of the resources in automl.googleapis.com, datalabeling.googleapis.com and * ml.googleapis.com that can be migrated to AI Platform's given location. * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (MigratableResource element :
+   *       migrationServiceClient.searchMigratableResources(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The location that the migratable resources should be searched from. * It's the AI Platform location that the resources can be migrated to, not the resources' * original location. Format: `projects/{project}/locations/{location}` @@ -199,6 +233,24 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources(St * Searches all of the resources in automl.googleapis.com, datalabeling.googleapis.com and * ml.googleapis.com that can be migrated to AI Platform's given location. * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   SearchMigratableResourcesRequest request =
+   *       SearchMigratableResourcesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setFilter("filter-1274492040")
+   *           .build();
+   *   for (MigratableResource element :
+   *       migrationServiceClient.searchMigratableResources(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -213,6 +265,24 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * ml.googleapis.com that can be migrated to AI Platform's given location. * *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   SearchMigratableResourcesRequest request =
+   *       SearchMigratableResourcesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setFilter("filter-1274492040")
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.searchMigratableResourcesPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (MigratableResource element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable< SearchMigratableResourcesRequest, SearchMigratableResourcesPagedResponse> @@ -226,6 +296,24 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * ml.googleapis.com that can be migrated to AI Platform's given location. * *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   while (true) {
+   *     SearchMigratableResourcesResponse response =
+   *         migrationServiceClient.searchMigratableResourcesCallable().call(request);
+   *     for (MigratableResource element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable searchMigratableResourcesCallable() { @@ -237,6 +325,17 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * Batch migrates resources from ml.googleapis.com, automl.googleapis.com, and * datalabeling.googleapis.com to AI Platform (Unified). * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   List migrateResourceRequests = new ArrayList<>();
+   *   BatchMigrateResourcesResponse response =
+   *       migrationServiceClient.batchMigrateResourcesAsync(parent, migrateResourceRequests).get();
+   * }
+   * }
+ * * @param parent Required. The location of the migrated resource will live in. Format: * `projects/{project}/locations/{location}` * @param migrateResourceRequests Required. The request messages specifying the resources to @@ -261,6 +360,17 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * Batch migrates resources from ml.googleapis.com, automl.googleapis.com, and * datalabeling.googleapis.com to AI Platform (Unified). * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   List migrateResourceRequests = new ArrayList<>();
+   *   BatchMigrateResourcesResponse response =
+   *       migrationServiceClient.batchMigrateResourcesAsync(parent, migrateResourceRequests).get();
+   * }
+   * }
+ * * @param parent Required. The location of the migrated resource will live in. Format: * `projects/{project}/locations/{location}` * @param migrateResourceRequests Required. The request messages specifying the resources to @@ -285,6 +395,20 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * Batch migrates resources from ml.googleapis.com, automl.googleapis.com, and * datalabeling.googleapis.com to AI Platform (Unified). * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   BatchMigrateResourcesRequest request =
+   *       BatchMigrateResourcesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .addAllMigrateResourceRequests(new ArrayList())
+   *           .build();
+   *   BatchMigrateResourcesResponse response =
+   *       migrationServiceClient.batchMigrateResourcesAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -300,6 +424,21 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * datalabeling.googleapis.com to AI Platform (Unified). * *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   BatchMigrateResourcesRequest request =
+   *       BatchMigrateResourcesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .addAllMigrateResourceRequests(new ArrayList())
+   *           .build();
+   *   OperationFuture
+   *       future =
+   *           migrationServiceClient.batchMigrateResourcesOperationCallable().futureCall(request);
+   *   // Do something.
+   *   BatchMigrateResourcesResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable< BatchMigrateResourcesRequest, @@ -315,6 +454,20 @@ public final SearchMigratableResourcesPagedResponse searchMigratableResources( * datalabeling.googleapis.com to AI Platform (Unified). * *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   BatchMigrateResourcesRequest request =
+   *       BatchMigrateResourcesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .addAllMigrateResourceRequests(new ArrayList())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.batchMigrateResourcesCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable batchMigrateResourcesCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/ModelServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/ModelServiceClient.java index ea2d1cc0b..9fb1ca376 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/ModelServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/ModelServiceClient.java @@ -47,6 +47,13 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+ *   ModelName name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+ *   Model response = modelServiceClient.getModel(name);
+ * }
+ * }
+ * *

Note: close() needs to be called on the ModelServiceClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * @@ -160,6 +167,16 @@ public final OperationsClient getOperationsClient() { /** * Uploads a Model artifact into AI Platform. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   Model model = Model.newBuilder().build();
+   *   UploadModelResponse response = modelServiceClient.uploadModelAsync(parent, model).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location into which to upload the Model. * Format: `projects/{project}/locations/{location}` * @param model Required. The Model to create. @@ -179,6 +196,16 @@ public final OperationFuture /** * Uploads a Model artifact into AI Platform. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   Model model = Model.newBuilder().build();
+   *   UploadModelResponse response = modelServiceClient.uploadModelAsync(parent, model).get();
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location into which to upload the Model. * Format: `projects/{project}/locations/{location}` * @param model Required. The Model to create. @@ -195,6 +222,19 @@ public final OperationFuture /** * Uploads a Model artifact into AI Platform. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   UploadModelRequest request =
+   *       UploadModelRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setModel(Model.newBuilder().build())
+   *           .build();
+   *   UploadModelResponse response = modelServiceClient.uploadModelAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -208,6 +248,20 @@ public final OperationFuture * Uploads a Model artifact into AI Platform. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   UploadModelRequest request =
+   *       UploadModelRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setModel(Model.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       modelServiceClient.uploadModelOperationCallable().futureCall(request);
+   *   // Do something.
+   *   UploadModelResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable< UploadModelRequest, UploadModelResponse, UploadModelOperationMetadata> @@ -220,6 +274,19 @@ public final OperationFuture * Uploads a Model artifact into AI Platform. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   UploadModelRequest request =
+   *       UploadModelRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setModel(Model.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = modelServiceClient.uploadModelCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable uploadModelCallable() { return stub.uploadModelCallable(); @@ -229,6 +296,15 @@ public final UnaryCallable uploadModelCallable() /** * Gets a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelName name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+   *   Model response = modelServiceClient.getModel(name);
+   * }
+   * }
+ * * @param name Required. The name of the Model resource. Format: * `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -243,6 +319,15 @@ public final Model getModel(ModelName name) { /** * Gets a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString();
+   *   Model response = modelServiceClient.getModel(name);
+   * }
+   * }
+ * * @param name Required. The name of the Model resource. Format: * `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -256,6 +341,18 @@ public final Model getModel(String name) { /** * Gets a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelRequest request =
+   *       GetModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .build();
+   *   Model response = modelServiceClient.getModel(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -268,6 +365,18 @@ public final Model getModel(GetModelRequest request) { * Gets a Model. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelRequest request =
+   *       GetModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .build();
+   *   ApiFuture future = modelServiceClient.getModelCallable().futureCall(request);
+   *   // Do something.
+   *   Model response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getModelCallable() { return stub.getModelCallable(); @@ -277,6 +386,17 @@ public final UnaryCallable getModelCallable() { /** * Lists Models in a Location. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (Model element : modelServiceClient.listModels(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the Models from. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -291,6 +411,17 @@ public final ListModelsPagedResponse listModels(LocationName parent) { /** * Lists Models in a Location. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (Model element : modelServiceClient.listModels(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the Models from. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -304,6 +435,25 @@ public final ListModelsPagedResponse listModels(String parent) { /** * Lists Models in a Location. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelsRequest request =
+   *       ListModelsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   for (Model element : modelServiceClient.listModels(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -316,6 +466,25 @@ public final ListModelsPagedResponse listModels(ListModelsRequest request) { * Lists Models in a Location. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelsRequest request =
+   *       ListModelsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setOrderBy("orderBy-1207110587")
+   *           .build();
+   *   ApiFuture future = modelServiceClient.listModelsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Model element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listModelsPagedCallable() { return stub.listModelsPagedCallable(); @@ -326,6 +495,23 @@ public final UnaryCallable listModel * Lists Models in a Location. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   while (true) {
+   *     ListModelsResponse response = modelServiceClient.listModelsCallable().call(request);
+   *     for (Model element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listModelsCallable() { return stub.listModelsCallable(); @@ -335,6 +521,16 @@ public final UnaryCallable listModelsCall /** * Updates a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   Model model = Model.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Model response = modelServiceClient.updateModel(model, updateMask);
+   * }
+   * }
+ * * @param model Required. The Model which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. For the `FieldMask` * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). @@ -350,6 +546,19 @@ public final Model updateModel(Model model, FieldMask updateMask) { /** * Updates a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   UpdateModelRequest request =
+   *       UpdateModelRequest.newBuilder()
+   *           .setModel(Model.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Model response = modelServiceClient.updateModel(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -362,6 +571,19 @@ public final Model updateModel(UpdateModelRequest request) { * Updates a Model. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   UpdateModelRequest request =
+   *       UpdateModelRequest.newBuilder()
+   *           .setModel(Model.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = modelServiceClient.updateModelCallable().futureCall(request);
+   *   // Do something.
+   *   Model response = future.get();
+   * }
+   * }
*/ public final UnaryCallable updateModelCallable() { return stub.updateModelCallable(); @@ -372,6 +594,15 @@ public final UnaryCallable updateModelCallable() { * Deletes a Model. Note: Model can only be deleted if there are no DeployedModels created from * it. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelName name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+   *   modelServiceClient.deleteModelAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the Model resource to be deleted. Format: * `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -387,6 +618,15 @@ public final OperationFuture deleteModelAsync(Mo * Deletes a Model. Note: Model can only be deleted if there are no DeployedModels created from * it. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString();
+   *   modelServiceClient.deleteModelAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the Model resource to be deleted. Format: * `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -401,6 +641,18 @@ public final OperationFuture deleteModelAsync(St * Deletes a Model. Note: Model can only be deleted if there are no DeployedModels created from * it. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   DeleteModelRequest request =
+   *       DeleteModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .build();
+   *   modelServiceClient.deleteModelAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -415,6 +667,19 @@ public final OperationFuture deleteModelAsync( * it. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   DeleteModelRequest request =
+   *       DeleteModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .build();
+   *   OperationFuture future =
+   *       modelServiceClient.deleteModelOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteModelOperationCallable() { @@ -427,6 +692,18 @@ public final OperationFuture deleteModelAsync( * it. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   DeleteModelRequest request =
+   *       DeleteModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .build();
+   *   ApiFuture future = modelServiceClient.deleteModelCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteModelCallable() { return stub.deleteModelCallable(); @@ -438,6 +715,17 @@ public final UnaryCallable deleteModelCallable() * to be exportable if it has at least one [supported export * format][google.cloud.aiplatform.v1.Model.supported_export_formats]. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelName name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+   *   ExportModelRequest.OutputConfig outputConfig =
+   *       ExportModelRequest.OutputConfig.newBuilder().build();
+   *   ExportModelResponse response = modelServiceClient.exportModelAsync(name, outputConfig).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the Model to export. Format: * `projects/{project}/locations/{location}/models/{model}` * @param outputConfig Required. The desired output location and configuration. @@ -459,6 +747,17 @@ public final OperationFuture * to be exportable if it has at least one [supported export * format][google.cloud.aiplatform.v1.Model.supported_export_formats]. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString();
+   *   ExportModelRequest.OutputConfig outputConfig =
+   *       ExportModelRequest.OutputConfig.newBuilder().build();
+   *   ExportModelResponse response = modelServiceClient.exportModelAsync(name, outputConfig).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the Model to export. Format: * `projects/{project}/locations/{location}/models/{model}` * @param outputConfig Required. The desired output location and configuration. @@ -477,6 +776,19 @@ public final OperationFuture * to be exportable if it has at least one [supported export * format][google.cloud.aiplatform.v1.Model.supported_export_formats]. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ExportModelRequest request =
+   *       ExportModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .setOutputConfig(ExportModelRequest.OutputConfig.newBuilder().build())
+   *           .build();
+   *   ExportModelResponse response = modelServiceClient.exportModelAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -492,6 +804,20 @@ public final OperationFuture * format][google.cloud.aiplatform.v1.Model.supported_export_formats]. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ExportModelRequest request =
+   *       ExportModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .setOutputConfig(ExportModelRequest.OutputConfig.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       modelServiceClient.exportModelOperationCallable().futureCall(request);
+   *   // Do something.
+   *   ExportModelResponse response = future.get();
+   * }
+   * }
*/ public final OperationCallable< ExportModelRequest, ExportModelResponse, ExportModelOperationMetadata> @@ -506,6 +832,19 @@ public final OperationFuture * format][google.cloud.aiplatform.v1.Model.supported_export_formats]. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ExportModelRequest request =
+   *       ExportModelRequest.newBuilder()
+   *           .setName(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .setOutputConfig(ExportModelRequest.OutputConfig.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = modelServiceClient.exportModelCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable exportModelCallable() { return stub.exportModelCallable(); @@ -515,6 +854,16 @@ public final UnaryCallable exportModelCallable() /** * Gets a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelEvaluationName name =
+   *       ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]");
+   *   ModelEvaluation response = modelServiceClient.getModelEvaluation(name);
+   * }
+   * }
+ * * @param name Required. The name of the ModelEvaluation resource. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -531,6 +880,16 @@ public final ModelEvaluation getModelEvaluation(ModelEvaluationName name) { /** * Gets a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String name =
+   *       ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]").toString();
+   *   ModelEvaluation response = modelServiceClient.getModelEvaluation(name);
+   * }
+   * }
+ * * @param name Required. The name of the ModelEvaluation resource. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -545,6 +904,20 @@ public final ModelEvaluation getModelEvaluation(String name) { /** * Gets a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelEvaluationRequest request =
+   *       GetModelEvaluationRequest.newBuilder()
+   *           .setName(
+   *               ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]")
+   *                   .toString())
+   *           .build();
+   *   ModelEvaluation response = modelServiceClient.getModelEvaluation(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -557,6 +930,21 @@ public final ModelEvaluation getModelEvaluation(GetModelEvaluationRequest reques * Gets a ModelEvaluation. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelEvaluationRequest request =
+   *       GetModelEvaluationRequest.newBuilder()
+   *           .setName(
+   *               ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       modelServiceClient.getModelEvaluationCallable().futureCall(request);
+   *   // Do something.
+   *   ModelEvaluation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getModelEvaluationCallable() { @@ -567,6 +955,17 @@ public final ModelEvaluation getModelEvaluation(GetModelEvaluationRequest reques /** * Lists ModelEvaluations in a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelName parent = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+   *   for (ModelEvaluation element : modelServiceClient.listModelEvaluations(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Model to list the ModelEvaluations from. * Format: `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -583,6 +982,17 @@ public final ListModelEvaluationsPagedResponse listModelEvaluations(ModelName pa /** * Lists ModelEvaluations in a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String parent = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString();
+   *   for (ModelEvaluation element : modelServiceClient.listModelEvaluations(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Model to list the ModelEvaluations from. * Format: `projects/{project}/locations/{location}/models/{model}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -597,6 +1007,25 @@ public final ListModelEvaluationsPagedResponse listModelEvaluations(String paren /** * Lists ModelEvaluations in a Model. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelEvaluationsRequest request =
+   *       ListModelEvaluationsRequest.newBuilder()
+   *           .setParent(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (ModelEvaluation element :
+   *       modelServiceClient.listModelEvaluations(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -610,6 +1039,25 @@ public final ListModelEvaluationsPagedResponse listModelEvaluations( * Lists ModelEvaluations in a Model. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelEvaluationsRequest request =
+   *       ListModelEvaluationsRequest.newBuilder()
+   *           .setParent(ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       modelServiceClient.listModelEvaluationsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (ModelEvaluation element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listModelEvaluationsPagedCallable() { @@ -621,6 +1069,24 @@ public final ListModelEvaluationsPagedResponse listModelEvaluations( * Lists ModelEvaluations in a Model. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   while (true) {
+   *     ListModelEvaluationsResponse response =
+   *         modelServiceClient.listModelEvaluationsCallable().call(request);
+   *     for (ModelEvaluation element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listModelEvaluationsCallable() { @@ -631,6 +1097,17 @@ public final ListModelEvaluationsPagedResponse listModelEvaluations( /** * Gets a ModelEvaluationSlice. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelEvaluationSliceName name =
+   *       ModelEvaluationSliceName.of(
+   *           "[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]");
+   *   ModelEvaluationSlice response = modelServiceClient.getModelEvaluationSlice(name);
+   * }
+   * }
+ * * @param name Required. The name of the ModelEvaluationSlice resource. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -647,6 +1124,18 @@ public final ModelEvaluationSlice getModelEvaluationSlice(ModelEvaluationSliceNa /** * Gets a ModelEvaluationSlice. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String name =
+   *       ModelEvaluationSliceName.of(
+   *               "[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]")
+   *           .toString();
+   *   ModelEvaluationSlice response = modelServiceClient.getModelEvaluationSlice(name);
+   * }
+   * }
+ * * @param name Required. The name of the ModelEvaluationSlice resource. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -661,6 +1150,21 @@ public final ModelEvaluationSlice getModelEvaluationSlice(String name) { /** * Gets a ModelEvaluationSlice. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelEvaluationSliceRequest request =
+   *       GetModelEvaluationSliceRequest.newBuilder()
+   *           .setName(
+   *               ModelEvaluationSliceName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]")
+   *                   .toString())
+   *           .build();
+   *   ModelEvaluationSlice response = modelServiceClient.getModelEvaluationSlice(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -674,6 +1178,22 @@ public final ModelEvaluationSlice getModelEvaluationSlice( * Gets a ModelEvaluationSlice. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   GetModelEvaluationSliceRequest request =
+   *       GetModelEvaluationSliceRequest.newBuilder()
+   *           .setName(
+   *               ModelEvaluationSliceName.of(
+   *                       "[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       modelServiceClient.getModelEvaluationSliceCallable().futureCall(request);
+   *   // Do something.
+   *   ModelEvaluationSlice response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getModelEvaluationSliceCallable() { @@ -684,6 +1204,19 @@ public final ModelEvaluationSlice getModelEvaluationSlice( /** * Lists ModelEvaluationSlices in a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ModelEvaluationName parent =
+   *       ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]");
+   *   for (ModelEvaluationSlice element :
+   *       modelServiceClient.listModelEvaluationSlices(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the ModelEvaluation to list the * ModelEvaluationSlices from. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}` @@ -702,6 +1235,19 @@ public final ListModelEvaluationSlicesPagedResponse listModelEvaluationSlices( /** * Lists ModelEvaluationSlices in a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   String parent =
+   *       ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]").toString();
+   *   for (ModelEvaluationSlice element :
+   *       modelServiceClient.listModelEvaluationSlices(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the ModelEvaluation to list the * ModelEvaluationSlices from. Format: *

`projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}` @@ -717,6 +1263,27 @@ public final ListModelEvaluationSlicesPagedResponse listModelEvaluationSlices(St /** * Lists ModelEvaluationSlices in a ModelEvaluation. * + *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelEvaluationSlicesRequest request =
+   *       ListModelEvaluationSlicesRequest.newBuilder()
+   *           .setParent(
+   *               ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]")
+   *                   .toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (ModelEvaluationSlice element :
+   *       modelServiceClient.listModelEvaluationSlices(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -730,6 +1297,27 @@ public final ListModelEvaluationSlicesPagedResponse listModelEvaluationSlices( * Lists ModelEvaluationSlices in a ModelEvaluation. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   ListModelEvaluationSlicesRequest request =
+   *       ListModelEvaluationSlicesRequest.newBuilder()
+   *           .setParent(
+   *               ModelEvaluationName.of("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]")
+   *                   .toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       modelServiceClient.listModelEvaluationSlicesPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (ModelEvaluationSlice element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable< ListModelEvaluationSlicesRequest, ListModelEvaluationSlicesPagedResponse> @@ -742,6 +1330,24 @@ public final ListModelEvaluationSlicesPagedResponse listModelEvaluationSlices( * Lists ModelEvaluationSlices in a ModelEvaluation. * *

Sample code: + * + *

{@code
+   * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+   *   while (true) {
+   *     ListModelEvaluationSlicesResponse response =
+   *         modelServiceClient.listModelEvaluationSlicesCallable().call(request);
+   *     for (ModelEvaluationSlice element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listModelEvaluationSlicesCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PipelineServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PipelineServiceClient.java index b95b13edb..c9b3a0ae8 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PipelineServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PipelineServiceClient.java @@ -46,6 +46,15 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().build();
+ *   TrainingPipeline response =
+ *       pipelineServiceClient.createTrainingPipeline(parent, trainingPipeline);
+ * }
+ * }
+ * *

Note: close() needs to be called on the PipelineServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -163,6 +172,17 @@ public final OperationsClient getOperationsClient() { /** * Creates a TrainingPipeline. A created TrainingPipeline right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().build();
+   *   TrainingPipeline response =
+   *       pipelineServiceClient.createTrainingPipeline(parent, trainingPipeline);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the TrainingPipeline in. * Format: `projects/{project}/locations/{location}` * @param trainingPipeline Required. The TrainingPipeline to create. @@ -182,6 +202,17 @@ public final TrainingPipeline createTrainingPipeline( /** * Creates a TrainingPipeline. A created TrainingPipeline right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().build();
+   *   TrainingPipeline response =
+   *       pipelineServiceClient.createTrainingPipeline(parent, trainingPipeline);
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to create the TrainingPipeline in. * Format: `projects/{project}/locations/{location}` * @param trainingPipeline Required. The TrainingPipeline to create. @@ -201,6 +232,19 @@ public final TrainingPipeline createTrainingPipeline( /** * Creates a TrainingPipeline. A created TrainingPipeline right away will be attempted to be run. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   CreateTrainingPipelineRequest request =
+   *       CreateTrainingPipelineRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setTrainingPipeline(TrainingPipeline.newBuilder().build())
+   *           .build();
+   *   TrainingPipeline response = pipelineServiceClient.createTrainingPipeline(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -213,6 +257,20 @@ public final TrainingPipeline createTrainingPipeline(CreateTrainingPipelineReque * Creates a TrainingPipeline. A created TrainingPipeline right away will be attempted to be run. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   CreateTrainingPipelineRequest request =
+   *       CreateTrainingPipelineRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setTrainingPipeline(TrainingPipeline.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       pipelineServiceClient.createTrainingPipelineCallable().futureCall(request);
+   *   // Do something.
+   *   TrainingPipeline response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createTrainingPipelineCallable() { @@ -223,6 +281,16 @@ public final TrainingPipeline createTrainingPipeline(CreateTrainingPipelineReque /** * Gets a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   TrainingPipelineName name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]");
+   *   TrainingPipeline response = pipelineServiceClient.getTrainingPipeline(name);
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline resource. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -239,6 +307,16 @@ public final TrainingPipeline getTrainingPipeline(TrainingPipelineName name) { /** * Gets a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   String name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]").toString();
+   *   TrainingPipeline response = pipelineServiceClient.getTrainingPipeline(name);
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline resource. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -253,6 +331,20 @@ public final TrainingPipeline getTrainingPipeline(String name) { /** * Gets a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   GetTrainingPipelineRequest request =
+   *       GetTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   TrainingPipeline response = pipelineServiceClient.getTrainingPipeline(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -265,6 +357,21 @@ public final TrainingPipeline getTrainingPipeline(GetTrainingPipelineRequest req * Gets a TrainingPipeline. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   GetTrainingPipelineRequest request =
+   *       GetTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       pipelineServiceClient.getTrainingPipelineCallable().futureCall(request);
+   *   // Do something.
+   *   TrainingPipeline response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getTrainingPipelineCallable() { @@ -275,6 +382,18 @@ public final TrainingPipeline getTrainingPipeline(GetTrainingPipelineRequest req /** * Lists TrainingPipelines in a Location. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (TrainingPipeline element :
+   *       pipelineServiceClient.listTrainingPipelines(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the TrainingPipelines from. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -291,6 +410,18 @@ public final ListTrainingPipelinesPagedResponse listTrainingPipelines(LocationNa /** * Lists TrainingPipelines in a Location. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (TrainingPipeline element :
+   *       pipelineServiceClient.listTrainingPipelines(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The resource name of the Location to list the TrainingPipelines from. * Format: `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -305,6 +436,25 @@ public final ListTrainingPipelinesPagedResponse listTrainingPipelines(String par /** * Lists TrainingPipelines in a Location. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   ListTrainingPipelinesRequest request =
+   *       ListTrainingPipelinesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (TrainingPipeline element :
+   *       pipelineServiceClient.listTrainingPipelines(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -318,6 +468,25 @@ public final ListTrainingPipelinesPagedResponse listTrainingPipelines( * Lists TrainingPipelines in a Location. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   ListTrainingPipelinesRequest request =
+   *       ListTrainingPipelinesRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setFilter("filter-1274492040")
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       pipelineServiceClient.listTrainingPipelinesPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (TrainingPipeline element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listTrainingPipelinesPagedCallable() { @@ -329,6 +498,24 @@ public final ListTrainingPipelinesPagedResponse listTrainingPipelines( * Lists TrainingPipelines in a Location. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   while (true) {
+   *     ListTrainingPipelinesResponse response =
+   *         pipelineServiceClient.listTrainingPipelinesCallable().call(request);
+   *     for (TrainingPipeline element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listTrainingPipelinesCallable() { @@ -339,6 +526,16 @@ public final ListTrainingPipelinesPagedResponse listTrainingPipelines( /** * Deletes a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   TrainingPipelineName name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]");
+   *   pipelineServiceClient.deleteTrainingPipelineAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline resource to be deleted. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -356,6 +553,16 @@ public final OperationFuture deleteTrainingPipel /** * Deletes a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   String name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]").toString();
+   *   pipelineServiceClient.deleteTrainingPipelineAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline resource to be deleted. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -371,6 +578,20 @@ public final OperationFuture deleteTrainingPipel /** * Deletes a TrainingPipeline. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   DeleteTrainingPipelineRequest request =
+   *       DeleteTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   pipelineServiceClient.deleteTrainingPipelineAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -384,6 +605,21 @@ public final OperationFuture deleteTrainingPipel * Deletes a TrainingPipeline. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   DeleteTrainingPipelineRequest request =
+   *       DeleteTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   OperationFuture future =
+   *       pipelineServiceClient.deleteTrainingPipelineOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteTrainingPipelineOperationCallable() { @@ -395,6 +631,21 @@ public final OperationFuture deleteTrainingPipel * Deletes a TrainingPipeline. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   DeleteTrainingPipelineRequest request =
+   *       DeleteTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       pipelineServiceClient.deleteTrainingPipelineCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteTrainingPipelineCallable() { @@ -415,6 +666,16 @@ public final OperationFuture deleteTrainingPipel * [TrainingPipeline.state][google.cloud.aiplatform.v1.TrainingPipeline.state] is set to * `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   TrainingPipelineName name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]");
+   *   pipelineServiceClient.cancelTrainingPipeline(name);
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline to cancel. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -441,6 +702,16 @@ public final void cancelTrainingPipeline(TrainingPipelineName name) { * [TrainingPipeline.state][google.cloud.aiplatform.v1.TrainingPipeline.state] is set to * `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   String name =
+   *       TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]").toString();
+   *   pipelineServiceClient.cancelTrainingPipeline(name);
+   * }
+   * }
+ * * @param name Required. The name of the TrainingPipeline to cancel. Format: *

`projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -465,6 +736,20 @@ public final void cancelTrainingPipeline(String name) { * [TrainingPipeline.state][google.cloud.aiplatform.v1.TrainingPipeline.state] is set to * `CANCELLED`. * + *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   CancelTrainingPipelineRequest request =
+   *       CancelTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   pipelineServiceClient.cancelTrainingPipeline(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -487,6 +772,21 @@ public final void cancelTrainingPipeline(CancelTrainingPipelineRequest request) * `CANCELLED`. * *

Sample code: + * + *

{@code
+   * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+   *   CancelTrainingPipelineRequest request =
+   *       CancelTrainingPipelineRequest.newBuilder()
+   *           .setName(
+   *               TrainingPipelineName.of("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future =
+   *       pipelineServiceClient.cancelTrainingPipelineCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable cancelTrainingPipelineCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PredictionServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PredictionServiceClient.java index e08a935f6..75fd35e55 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PredictionServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/PredictionServiceClient.java @@ -34,6 +34,15 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+ *   EndpointName endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+ *   List instances = new ArrayList<>();
+ *   Value parameters = Value.newBuilder().build();
+ *   PredictResponse response = predictionServiceClient.predict(endpoint, instances, parameters);
+ * }
+ * }
+ * *

Note: close() needs to be called on the PredictionServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -140,6 +149,17 @@ public PredictionServiceStub getStub() { /** * Perform an online prediction. * + *

Sample code: + * + *

{@code
+   * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+   *   EndpointName endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+   *   List instances = new ArrayList<>();
+   *   Value parameters = Value.newBuilder().build();
+   *   PredictResponse response = predictionServiceClient.predict(endpoint, instances, parameters);
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint requested to serve the prediction. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param instances Required. The instances that are the input to the prediction call. A @@ -172,6 +192,17 @@ public final PredictResponse predict( /** * Perform an online prediction. * + *

Sample code: + * + *

{@code
+   * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+   *   String endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString();
+   *   List instances = new ArrayList<>();
+   *   Value parameters = Value.newBuilder().build();
+   *   PredictResponse response = predictionServiceClient.predict(endpoint, instances, parameters);
+   * }
+   * }
+ * * @param endpoint Required. The name of the Endpoint requested to serve the prediction. Format: * `projects/{project}/locations/{location}/endpoints/{endpoint}` * @param instances Required. The instances that are the input to the prediction call. A @@ -203,6 +234,20 @@ public final PredictResponse predict(String endpoint, List instances, Val /** * Perform an online prediction. * + *

Sample code: + * + *

{@code
+   * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+   *   PredictRequest request =
+   *       PredictRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .addAllInstances(new ArrayList())
+   *           .setParameters(Value.newBuilder().build())
+   *           .build();
+   *   PredictResponse response = predictionServiceClient.predict(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -215,6 +260,21 @@ public final PredictResponse predict(PredictRequest request) { * Perform an online prediction. * *

Sample code: + * + *

{@code
+   * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+   *   PredictRequest request =
+   *       PredictRequest.newBuilder()
+   *           .setEndpoint(EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]").toString())
+   *           .addAllInstances(new ArrayList())
+   *           .setParameters(Value.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       predictionServiceClient.predictCallable().futureCall(request);
+   *   // Do something.
+   *   PredictResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable predictCallable() { return stub.predictCallable(); diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/SpecialistPoolServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/SpecialistPoolServiceClient.java index d0a5235b4..3e340b129 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/SpecialistPoolServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/SpecialistPoolServiceClient.java @@ -51,6 +51,15 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+ *     SpecialistPoolServiceClient.create()) {
+ *   SpecialistPoolName name =
+ *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]");
+ *   SpecialistPool response = specialistPoolServiceClient.getSpecialistPool(name);
+ * }
+ * }
+ * *

Note: close() needs to be called on the SpecialistPoolServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). @@ -168,6 +177,18 @@ public final OperationsClient getOperationsClient() { /** * Creates a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   SpecialistPool specialistPool = SpecialistPool.newBuilder().build();
+   *   SpecialistPool response =
+   *       specialistPoolServiceClient.createSpecialistPoolAsync(parent, specialistPool).get();
+   * }
+   * }
+ * * @param parent Required. The parent Project name for the new SpecialistPool. The form is * `projects/{project}/locations/{location}`. * @param specialistPool Required. The SpecialistPool to create. @@ -187,6 +208,18 @@ public final OperationsClient getOperationsClient() { /** * Creates a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   SpecialistPool specialistPool = SpecialistPool.newBuilder().build();
+   *   SpecialistPool response =
+   *       specialistPoolServiceClient.createSpecialistPoolAsync(parent, specialistPool).get();
+   * }
+   * }
+ * * @param parent Required. The parent Project name for the new SpecialistPool. The form is * `projects/{project}/locations/{location}`. * @param specialistPool Required. The SpecialistPool to create. @@ -206,6 +239,21 @@ public final OperationsClient getOperationsClient() { /** * Creates a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   CreateSpecialistPoolRequest request =
+   *       CreateSpecialistPoolRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .build();
+   *   SpecialistPool response =
+   *       specialistPoolServiceClient.createSpecialistPoolAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -219,6 +267,21 @@ public final OperationsClient getOperationsClient() { * Creates a SpecialistPool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   CreateSpecialistPoolRequest request =
+   *       CreateSpecialistPoolRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       specialistPoolServiceClient.createSpecialistPoolOperationCallable().futureCall(request);
+   *   // Do something.
+   *   SpecialistPool response = future.get();
+   * }
+   * }
*/ public final OperationCallable< CreateSpecialistPoolRequest, SpecialistPool, CreateSpecialistPoolOperationMetadata> @@ -231,6 +294,21 @@ public final OperationsClient getOperationsClient() { * Creates a SpecialistPool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   CreateSpecialistPoolRequest request =
+   *       CreateSpecialistPoolRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       specialistPoolServiceClient.createSpecialistPoolCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createSpecialistPoolCallable() { @@ -241,6 +319,17 @@ public final OperationsClient getOperationsClient() { /** * Gets a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   SpecialistPoolName name =
+   *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]");
+   *   SpecialistPool response = specialistPoolServiceClient.getSpecialistPool(name);
+   * }
+   * }
+ * * @param name Required. The name of the SpecialistPool resource. The form is *

`projects/{project}/locations/{location}/specialistPools/{specialist_pool}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -257,6 +346,17 @@ public final SpecialistPool getSpecialistPool(SpecialistPoolName name) { /** * Gets a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   String name =
+   *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString();
+   *   SpecialistPool response = specialistPoolServiceClient.getSpecialistPool(name);
+   * }
+   * }
+ * * @param name Required. The name of the SpecialistPool resource. The form is *

`projects/{project}/locations/{location}/specialistPools/{specialist_pool}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -270,6 +370,20 @@ public final SpecialistPool getSpecialistPool(String name) { /** * Gets a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   GetSpecialistPoolRequest request =
+   *       GetSpecialistPoolRequest.newBuilder()
+   *           .setName(
+   *               SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString())
+   *           .build();
+   *   SpecialistPool response = specialistPoolServiceClient.getSpecialistPool(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -282,6 +396,21 @@ public final SpecialistPool getSpecialistPool(GetSpecialistPoolRequest request) * Gets a SpecialistPool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   GetSpecialistPoolRequest request =
+   *       GetSpecialistPoolRequest.newBuilder()
+   *           .setName(
+   *               SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       specialistPoolServiceClient.getSpecialistPoolCallable().futureCall(request);
+   *   // Do something.
+   *   SpecialistPool response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getSpecialistPoolCallable() { return stub.getSpecialistPoolCallable(); @@ -291,6 +420,19 @@ public final UnaryCallable getSpeciali /** * Lists SpecialistPools in a Location. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (SpecialistPool element :
+   *       specialistPoolServiceClient.listSpecialistPools(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The name of the SpecialistPool's parent resource. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -307,6 +449,19 @@ public final ListSpecialistPoolsPagedResponse listSpecialistPools(LocationName p /** * Lists SpecialistPools in a Location. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (SpecialistPool element :
+   *       specialistPoolServiceClient.listSpecialistPools(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param parent Required. The name of the SpecialistPool's parent resource. Format: * `projects/{project}/locations/{location}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -321,6 +476,25 @@ public final ListSpecialistPoolsPagedResponse listSpecialistPools(String parent) /** * Lists SpecialistPools in a Location. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   ListSpecialistPoolsRequest request =
+   *       ListSpecialistPoolsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   for (SpecialistPool element :
+   *       specialistPoolServiceClient.listSpecialistPools(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -334,6 +508,25 @@ public final ListSpecialistPoolsPagedResponse listSpecialistPools( * Lists SpecialistPools in a Location. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   ListSpecialistPoolsRequest request =
+   *       ListSpecialistPoolsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       specialistPoolServiceClient.listSpecialistPoolsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (SpecialistPool element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listSpecialistPoolsPagedCallable() { @@ -345,6 +538,25 @@ public final ListSpecialistPoolsPagedResponse listSpecialistPools( * Lists SpecialistPools in a Location. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   while (true) {
+   *     ListSpecialistPoolsResponse response =
+   *         specialistPoolServiceClient.listSpecialistPoolsCallable().call(request);
+   *     for (SpecialistPool element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
*/ public final UnaryCallable listSpecialistPoolsCallable() { @@ -355,6 +567,17 @@ public final ListSpecialistPoolsPagedResponse listSpecialistPools( /** * Deletes a SpecialistPool as well as all Specialists in the pool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   SpecialistPoolName name =
+   *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]");
+   *   specialistPoolServiceClient.deleteSpecialistPoolAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the SpecialistPool to delete. Format: * `projects/{project}/locations/{location}/specialistPools/{specialist_pool}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -372,6 +595,17 @@ public final OperationFuture deleteSpecialistPoo /** * Deletes a SpecialistPool as well as all Specialists in the pool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   String name =
+   *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString();
+   *   specialistPoolServiceClient.deleteSpecialistPoolAsync(name).get();
+   * }
+   * }
+ * * @param name Required. The resource name of the SpecialistPool to delete. Format: * `projects/{project}/locations/{location}/specialistPools/{specialist_pool}` * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -387,6 +621,21 @@ public final OperationFuture deleteSpecialistPoo /** * Deletes a SpecialistPool as well as all Specialists in the pool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   DeleteSpecialistPoolRequest request =
+   *       DeleteSpecialistPoolRequest.newBuilder()
+   *           .setName(
+   *               SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString())
+   *           .setForce(true)
+   *           .build();
+   *   specialistPoolServiceClient.deleteSpecialistPoolAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -400,6 +649,22 @@ public final OperationFuture deleteSpecialistPoo * Deletes a SpecialistPool as well as all Specialists in the pool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   DeleteSpecialistPoolRequest request =
+   *       DeleteSpecialistPoolRequest.newBuilder()
+   *           .setName(
+   *               SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString())
+   *           .setForce(true)
+   *           .build();
+   *   OperationFuture future =
+   *       specialistPoolServiceClient.deleteSpecialistPoolOperationCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final OperationCallable deleteSpecialistPoolOperationCallable() { @@ -411,6 +676,22 @@ public final OperationFuture deleteSpecialistPoo * Deletes a SpecialistPool as well as all Specialists in the pool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   DeleteSpecialistPoolRequest request =
+   *       DeleteSpecialistPoolRequest.newBuilder()
+   *           .setName(
+   *               SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]").toString())
+   *           .setForce(true)
+   *           .build();
+   *   ApiFuture future =
+   *       specialistPoolServiceClient.deleteSpecialistPoolCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable deleteSpecialistPoolCallable() { @@ -421,6 +702,18 @@ public final OperationFuture deleteSpecialistPoo /** * Updates a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   SpecialistPool specialistPool = SpecialistPool.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   SpecialistPool response =
+   *       specialistPoolServiceClient.updateSpecialistPoolAsync(specialistPool, updateMask).get();
+   * }
+   * }
+ * * @param specialistPool Required. The SpecialistPool which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -439,6 +732,21 @@ public final OperationFuture deleteSpecialistPoo /** * Updates a SpecialistPool. * + *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   UpdateSpecialistPoolRequest request =
+   *       UpdateSpecialistPoolRequest.newBuilder()
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   SpecialistPool response =
+   *       specialistPoolServiceClient.updateSpecialistPoolAsync(request).get();
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -452,6 +760,21 @@ public final OperationFuture deleteSpecialistPoo * Updates a SpecialistPool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   UpdateSpecialistPoolRequest request =
+   *       UpdateSpecialistPoolRequest.newBuilder()
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   OperationFuture future =
+   *       specialistPoolServiceClient.updateSpecialistPoolOperationCallable().futureCall(request);
+   *   // Do something.
+   *   SpecialistPool response = future.get();
+   * }
+   * }
*/ public final OperationCallable< UpdateSpecialistPoolRequest, SpecialistPool, UpdateSpecialistPoolOperationMetadata> @@ -464,6 +787,21 @@ public final OperationFuture deleteSpecialistPoo * Updates a SpecialistPool. * *

Sample code: + * + *

{@code
+   * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+   *     SpecialistPoolServiceClient.create()) {
+   *   UpdateSpecialistPoolRequest request =
+   *       UpdateSpecialistPoolRequest.newBuilder()
+   *           .setSpecialistPool(SpecialistPool.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       specialistPoolServiceClient.updateSpecialistPoolCallable().futureCall(request);
+   *   // Do something.
+   *   Operation response = future.get();
+   * }
+   * }
*/ public final UnaryCallable updateSpecialistPoolCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/package-info.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/package-info.java index 1065b05de..8d67171cc 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/package-info.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/package-info.java @@ -21,16 +21,38 @@ * *

Sample for DatasetServiceClient: * + *

{@code
+ * try (DatasetServiceClient datasetServiceClient = DatasetServiceClient.create()) {
+ *   DatasetName name = DatasetName.of("[PROJECT]", "[LOCATION]", "[DATASET]");
+ *   Dataset response = datasetServiceClient.getDataset(name);
+ * }
+ * }
+ * *

======================= EndpointServiceClient ======================= * *

Sample for EndpointServiceClient: * + *

{@code
+ * try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
+ *   EndpointName name = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+ *   Endpoint response = endpointServiceClient.getEndpoint(name);
+ * }
+ * }
+ * *

======================= JobServiceClient ======================= * *

Service Description: A service for creating and managing AI Platform's jobs. * *

Sample for JobServiceClient: * + *

{@code
+ * try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   CustomJob customJob = CustomJob.newBuilder().build();
+ *   CustomJob response = jobServiceClient.createCustomJob(parent, customJob);
+ * }
+ * }
+ * *

======================= MigrationServiceClient ======================= * *

Service Description: A service that migrates resources from automl.googleapis.com, @@ -38,24 +60,59 @@ * *

Sample for MigrationServiceClient: * + *

{@code
+ * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   for (MigratableResource element :
+ *       migrationServiceClient.searchMigratableResources(parent).iterateAll()) {
+ *     // doThingsWith(element);
+ *   }
+ * }
+ * }
+ * *

======================= ModelServiceClient ======================= * *

Service Description: A service for managing AI Platform's machine learning Models. * *

Sample for ModelServiceClient: * + *

{@code
+ * try (ModelServiceClient modelServiceClient = ModelServiceClient.create()) {
+ *   ModelName name = ModelName.of("[PROJECT]", "[LOCATION]", "[MODEL]");
+ *   Model response = modelServiceClient.getModel(name);
+ * }
+ * }
+ * *

======================= PipelineServiceClient ======================= * *

Service Description: A service for creating and managing AI Platform's pipelines. * *

Sample for PipelineServiceClient: * + *

{@code
+ * try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().build();
+ *   TrainingPipeline response =
+ *       pipelineServiceClient.createTrainingPipeline(parent, trainingPipeline);
+ * }
+ * }
+ * *

======================= PredictionServiceClient ======================= * *

Service Description: A service for online predictions and explanations. * *

Sample for PredictionServiceClient: * + *

{@code
+ * try (PredictionServiceClient predictionServiceClient = PredictionServiceClient.create()) {
+ *   EndpointName endpoint = EndpointName.of("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
+ *   List instances = new ArrayList<>();
+ *   Value parameters = Value.newBuilder().build();
+ *   PredictResponse response = predictionServiceClient.predict(endpoint, instances, parameters);
+ * }
+ * }
+ * *

======================= SpecialistPoolServiceClient ======================= * *

Service Description: A service for creating and managing Customer SpecialistPools. When @@ -65,6 +122,15 @@ * CrowdCompute console. * *

Sample for SpecialistPoolServiceClient: + * + *

{@code
+ * try (SpecialistPoolServiceClient specialistPoolServiceClient =
+ *     SpecialistPoolServiceClient.create()) {
+ *   SpecialistPoolName name =
+ *       SpecialistPoolName.of("[PROJECT]", "[LOCATION]", "[SPECIALIST_POOL]");
+ *   SpecialistPool response = specialistPoolServiceClient.getSpecialistPool(name);
+ * }
+ * }
*/ @Generated("by gapic-generator-java") package com.google.cloud.aiplatform.v1; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcDatasetServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcDatasetServiceStub.java index f24e1ce12..c7ec877b0 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcDatasetServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcDatasetServiceStub.java @@ -442,75 +442,92 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable createDatasetCallable() { return createDatasetCallable; } + @Override public OperationCallable createDatasetOperationCallable() { return createDatasetOperationCallable; } + @Override public UnaryCallable getDatasetCallable() { return getDatasetCallable; } + @Override public UnaryCallable updateDatasetCallable() { return updateDatasetCallable; } + @Override public UnaryCallable listDatasetsCallable() { return listDatasetsCallable; } + @Override public UnaryCallable listDatasetsPagedCallable() { return listDatasetsPagedCallable; } + @Override public UnaryCallable deleteDatasetCallable() { return deleteDatasetCallable; } + @Override public OperationCallable deleteDatasetOperationCallable() { return deleteDatasetOperationCallable; } + @Override public UnaryCallable importDataCallable() { return importDataCallable; } + @Override public OperationCallable importDataOperationCallable() { return importDataOperationCallable; } + @Override public UnaryCallable exportDataCallable() { return exportDataCallable; } + @Override public OperationCallable exportDataOperationCallable() { return exportDataOperationCallable; } + @Override public UnaryCallable listDataItemsCallable() { return listDataItemsCallable; } + @Override public UnaryCallable listDataItemsPagedCallable() { return listDataItemsPagedCallable; } + @Override public UnaryCallable getAnnotationSpecCallable() { return getAnnotationSpecCallable; } + @Override public UnaryCallable listAnnotationsCallable() { return listAnnotationsCallable; } + @Override public UnaryCallable listAnnotationsPagedCallable() { return listAnnotationsPagedCallable; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcEndpointServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcEndpointServiceStub.java index c6c4098ec..43f103873 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcEndpointServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcEndpointServiceStub.java @@ -340,54 +340,66 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable createEndpointCallable() { return createEndpointCallable; } + @Override public OperationCallable createEndpointOperationCallable() { return createEndpointOperationCallable; } + @Override public UnaryCallable getEndpointCallable() { return getEndpointCallable; } + @Override public UnaryCallable listEndpointsCallable() { return listEndpointsCallable; } + @Override public UnaryCallable listEndpointsPagedCallable() { return listEndpointsPagedCallable; } + @Override public UnaryCallable updateEndpointCallable() { return updateEndpointCallable; } + @Override public UnaryCallable deleteEndpointCallable() { return deleteEndpointCallable; } + @Override public OperationCallable deleteEndpointOperationCallable() { return deleteEndpointOperationCallable; } + @Override public UnaryCallable deployModelCallable() { return deployModelCallable; } + @Override public OperationCallable deployModelOperationCallable() { return deployModelOperationCallable; } + @Override public UnaryCallable undeployModelCallable() { return undeployModelCallable; } + @Override public OperationCallable< UndeployModelRequest, UndeployModelResponse, UndeployModelOperationMetadata> undeployModelOperationCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcJobServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcJobServiceStub.java index 8f5a1b98c..f42279d82 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcJobServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcJobServiceStub.java @@ -816,134 +816,162 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable createCustomJobCallable() { return createCustomJobCallable; } + @Override public UnaryCallable getCustomJobCallable() { return getCustomJobCallable; } + @Override public UnaryCallable listCustomJobsCallable() { return listCustomJobsCallable; } + @Override public UnaryCallable listCustomJobsPagedCallable() { return listCustomJobsPagedCallable; } + @Override public UnaryCallable deleteCustomJobCallable() { return deleteCustomJobCallable; } + @Override public OperationCallable deleteCustomJobOperationCallable() { return deleteCustomJobOperationCallable; } + @Override public UnaryCallable cancelCustomJobCallable() { return cancelCustomJobCallable; } + @Override public UnaryCallable createDataLabelingJobCallable() { return createDataLabelingJobCallable; } + @Override public UnaryCallable getDataLabelingJobCallable() { return getDataLabelingJobCallable; } + @Override public UnaryCallable listDataLabelingJobsCallable() { return listDataLabelingJobsCallable; } + @Override public UnaryCallable listDataLabelingJobsPagedCallable() { return listDataLabelingJobsPagedCallable; } + @Override public UnaryCallable deleteDataLabelingJobCallable() { return deleteDataLabelingJobCallable; } + @Override public OperationCallable deleteDataLabelingJobOperationCallable() { return deleteDataLabelingJobOperationCallable; } + @Override public UnaryCallable cancelDataLabelingJobCallable() { return cancelDataLabelingJobCallable; } + @Override public UnaryCallable createHyperparameterTuningJobCallable() { return createHyperparameterTuningJobCallable; } + @Override public UnaryCallable getHyperparameterTuningJobCallable() { return getHyperparameterTuningJobCallable; } + @Override public UnaryCallable listHyperparameterTuningJobsCallable() { return listHyperparameterTuningJobsCallable; } + @Override public UnaryCallable< ListHyperparameterTuningJobsRequest, ListHyperparameterTuningJobsPagedResponse> listHyperparameterTuningJobsPagedCallable() { return listHyperparameterTuningJobsPagedCallable; } + @Override public UnaryCallable deleteHyperparameterTuningJobCallable() { return deleteHyperparameterTuningJobCallable; } + @Override public OperationCallable deleteHyperparameterTuningJobOperationCallable() { return deleteHyperparameterTuningJobOperationCallable; } + @Override public UnaryCallable cancelHyperparameterTuningJobCallable() { return cancelHyperparameterTuningJobCallable; } + @Override public UnaryCallable createBatchPredictionJobCallable() { return createBatchPredictionJobCallable; } + @Override public UnaryCallable getBatchPredictionJobCallable() { return getBatchPredictionJobCallable; } + @Override public UnaryCallable listBatchPredictionJobsCallable() { return listBatchPredictionJobsCallable; } + @Override public UnaryCallable listBatchPredictionJobsPagedCallable() { return listBatchPredictionJobsPagedCallable; } + @Override public UnaryCallable deleteBatchPredictionJobCallable() { return deleteBatchPredictionJobCallable; } + @Override public OperationCallable deleteBatchPredictionJobOperationCallable() { return deleteBatchPredictionJobOperationCallable; } + @Override public UnaryCallable cancelBatchPredictionJobCallable() { return cancelBatchPredictionJobCallable; } diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcMigrationServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcMigrationServiceStub.java index 16ebb249e..95c837a57 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcMigrationServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcMigrationServiceStub.java @@ -191,20 +191,24 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable searchMigratableResourcesCallable() { return searchMigratableResourcesCallable; } + @Override public UnaryCallable searchMigratableResourcesPagedCallable() { return searchMigratableResourcesPagedCallable; } + @Override public UnaryCallable batchMigrateResourcesCallable() { return batchMigrateResourcesCallable; } + @Override public OperationCallable< BatchMigrateResourcesRequest, BatchMigrateResourcesResponse, diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcModelServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcModelServiceStub.java index df0595958..0661fc459 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcModelServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcModelServiceStub.java @@ -448,73 +448,89 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable uploadModelCallable() { return uploadModelCallable; } + @Override public OperationCallable uploadModelOperationCallable() { return uploadModelOperationCallable; } + @Override public UnaryCallable getModelCallable() { return getModelCallable; } + @Override public UnaryCallable listModelsCallable() { return listModelsCallable; } + @Override public UnaryCallable listModelsPagedCallable() { return listModelsPagedCallable; } + @Override public UnaryCallable updateModelCallable() { return updateModelCallable; } + @Override public UnaryCallable deleteModelCallable() { return deleteModelCallable; } + @Override public OperationCallable deleteModelOperationCallable() { return deleteModelOperationCallable; } + @Override public UnaryCallable exportModelCallable() { return exportModelCallable; } + @Override public OperationCallable exportModelOperationCallable() { return exportModelOperationCallable; } + @Override public UnaryCallable getModelEvaluationCallable() { return getModelEvaluationCallable; } + @Override public UnaryCallable listModelEvaluationsCallable() { return listModelEvaluationsCallable; } + @Override public UnaryCallable listModelEvaluationsPagedCallable() { return listModelEvaluationsPagedCallable; } + @Override public UnaryCallable getModelEvaluationSliceCallable() { return getModelEvaluationSliceCallable; } + @Override public UnaryCallable listModelEvaluationSlicesCallable() { return listModelEvaluationSlicesCallable; } + @Override public UnaryCallable listModelEvaluationSlicesPagedCallable() { return listModelEvaluationSlicesPagedCallable; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPipelineServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPipelineServiceStub.java index 75c253dea..a557d1a79 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPipelineServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPipelineServiceStub.java @@ -281,34 +281,41 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable createTrainingPipelineCallable() { return createTrainingPipelineCallable; } + @Override public UnaryCallable getTrainingPipelineCallable() { return getTrainingPipelineCallable; } + @Override public UnaryCallable listTrainingPipelinesCallable() { return listTrainingPipelinesCallable; } + @Override public UnaryCallable listTrainingPipelinesPagedCallable() { return listTrainingPipelinesPagedCallable; } + @Override public UnaryCallable deleteTrainingPipelineCallable() { return deleteTrainingPipelineCallable; } + @Override public OperationCallable deleteTrainingPipelineOperationCallable() { return deleteTrainingPipelineOperationCallable; } + @Override public UnaryCallable cancelTrainingPipelineCallable() { return cancelTrainingPipelineCallable; } diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPredictionServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPredictionServiceStub.java index b38c6f40a..81945ce07 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPredictionServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcPredictionServiceStub.java @@ -122,6 +122,7 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable predictCallable() { return predictCallable; } diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcSpecialistPoolServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcSpecialistPoolServiceStub.java index 853f40f91..5b0b80664 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcSpecialistPoolServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/stub/GrpcSpecialistPoolServiceStub.java @@ -298,43 +298,52 @@ public GrpcOperationsStub getOperationsStub() { return operationsStub; } + @Override public UnaryCallable createSpecialistPoolCallable() { return createSpecialistPoolCallable; } + @Override public OperationCallable< CreateSpecialistPoolRequest, SpecialistPool, CreateSpecialistPoolOperationMetadata> createSpecialistPoolOperationCallable() { return createSpecialistPoolOperationCallable; } + @Override public UnaryCallable getSpecialistPoolCallable() { return getSpecialistPoolCallable; } + @Override public UnaryCallable listSpecialistPoolsCallable() { return listSpecialistPoolsCallable; } + @Override public UnaryCallable listSpecialistPoolsPagedCallable() { return listSpecialistPoolsPagedCallable; } + @Override public UnaryCallable deleteSpecialistPoolCallable() { return deleteSpecialistPoolCallable; } + @Override public OperationCallable deleteSpecialistPoolOperationCallable() { return deleteSpecialistPoolOperationCallable; } + @Override public UnaryCallable updateSpecialistPoolCallable() { return updateSpecialistPoolCallable; } + @Override public OperationCallable< UpdateSpecialistPoolRequest, SpecialistPool, UpdateSpecialistPoolOperationMetadata> updateSpecialistPoolOperationCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java index 5b8927965..f23b81c29 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java @@ -99,7 +99,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class DatasetServiceClient implements BackgroundResource { private final DatasetServiceSettings settings; private final DatasetServiceStub stub; @@ -398,8 +398,7 @@ public final UnaryCallable getDatasetCallable() { * * @param dataset Required. The Dataset which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. For the `FieldMask` - * definition, see - *

[FieldMask](https: //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask). + * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). * Updatable fields: *

* `display_name` * `description` * `labels` * @throws com.google.api.gax.rpc.ApiException if the remote call fails diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java index dfc367edc..1f03ae479 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java @@ -71,6 +71,7 @@ * DatasetServiceSettings datasetServiceSettings = datasetServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class DatasetServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClient.java index eb1647f5e..5b5723f92 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClient.java @@ -103,7 +103,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class EndpointServiceClient implements BackgroundResource { private final EndpointServiceSettings settings; private final EndpointServiceStub stub; @@ -539,7 +539,8 @@ public final UnaryCallable listEndp * } * * @param endpoint Required. The Endpoint which replaces the resource on the server. - * @param updateMask Required. The update mask applies to the resource. + * @param updateMask Required. The update mask applies to the resource. See + * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Endpoint updateEndpoint(Endpoint endpoint, FieldMask updateMask) { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceSettings.java index 0d18654ac..2a6a8584a 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceSettings.java @@ -69,6 +69,7 @@ * EndpointServiceSettings endpointServiceSettings = endpointServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class EndpointServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceClient.java index 37dc51504..3512c29cc 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceClient.java @@ -101,7 +101,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class JobServiceClient implements BackgroundResource { private final JobServiceSettings settings; private final JobServiceStub stub; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java index 3887fa8f6..643c8a2db 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java @@ -71,6 +71,7 @@ * JobServiceSettings jobServiceSettings = jobServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class JobServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceClient.java index 736f83a56..b71de9648 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceClient.java @@ -106,7 +106,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class MigrationServiceClient implements BackgroundResource { private final MigrationServiceSettings settings; private final MigrationServiceStub stub; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java index 37d770f56..8f084e1e6 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java @@ -68,6 +68,7 @@ * MigrationServiceSettings migrationServiceSettings = migrationServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class MigrationServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClient.java index 9cd80ed85..b64a4d615 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClient.java @@ -101,7 +101,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class ModelServiceClient implements BackgroundResource { private final ModelServiceSettings settings; private final ModelServiceStub stub; @@ -532,9 +532,7 @@ public final UnaryCallable listModelsCall * * @param model Required. The Model which replaces the resource on the server. * @param updateMask Required. The update mask applies to the resource. For the `FieldMask` - * definition, see - *

[FieldMask](https: //developers.google.com/protocol-buffers // - * /docs/reference/google.protobuf#fieldmask). + * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Model updateModel(Model model, FieldMask updateMask) { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java index 333a9ee9b..8c7dbb2c3 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java @@ -70,6 +70,7 @@ * ModelServiceSettings modelServiceSettings = modelServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class ModelServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClient.java index 6e5afd9d9..c4e20c14e 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClient.java @@ -105,7 +105,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class PipelineServiceClient implements BackgroundResource { private final PipelineServiceSettings settings; private final PipelineServiceStub stub; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java index 9672468ae..4f1e9d135 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java @@ -69,6 +69,7 @@ * PipelineServiceSettings pipelineServiceSettings = pipelineServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class PipelineServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceClient.java index 2ed582342..fd0edc1ad 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceClient.java @@ -93,7 +93,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class PredictionServiceClient implements BackgroundResource { private final PredictionServiceSettings settings; private final PredictionServiceStub stub; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java index d094cbc0c..403cd46ce 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java @@ -63,6 +63,7 @@ * PredictionServiceSettings predictionServiceSettings = predictionServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class PredictionServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceClient.java index 5e04a15ba..be259ff39 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceClient.java @@ -110,7 +110,7 @@ *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") public class SpecialistPoolServiceClient implements BackgroundResource { private final SpecialistPoolServiceSettings settings; private final SpecialistPoolServiceStub stub; diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java index 0f1f1fecc..6ff9a5274 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java @@ -70,6 +70,7 @@ * specialistPoolServiceSettingsBuilder.build(); * } */ +@BetaApi @Generated("by gapic-generator-java") public class SpecialistPoolServiceSettings extends ClientSettings { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/DatasetServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/DatasetServiceStub.java index 43e242b74..45de98a21 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/DatasetServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/DatasetServiceStub.java @@ -20,6 +20,7 @@ import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDataItemsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDatasetsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -55,7 +56,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class DatasetServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java index 72d4c8e7e..77a61562b 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.EndpointServiceClient.ListEndpointsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -47,7 +48,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class EndpointServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java index 867fbc8b4..131004953 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcDatasetServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java index 4ad34026c..a7b221de8 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java @@ -20,6 +20,7 @@ import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDataItemsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDatasetsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -66,6 +67,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcDatasetServiceStub extends DatasetServiceStub { private static final MethodDescriptor diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceCallableFactory.java index 67f8d8310..1433e3b21 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcEndpointServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java index b47c67867..b72344e8d 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.EndpointServiceClient.ListEndpointsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -58,6 +59,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcEndpointServiceStub extends EndpointServiceStub { private static final MethodDescriptor diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceCallableFactory.java index d1dcbdaa7..6d7e9dbde 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcJobServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java index 867fd331a..def0c8d7f 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java @@ -21,6 +21,7 @@ import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListDataLabelingJobsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListHyperparameterTuningJobsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -75,6 +76,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcJobServiceStub extends JobServiceStub { private static final MethodDescriptor diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceCallableFactory.java index 3f9dd12b2..035e9aa32 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcMigrationServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java index f70ed800b..445fe48ab 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.MigrationServiceClient.SearchMigratableResourcesPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -47,6 +48,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcMigrationServiceStub extends MigrationServiceStub { private static final MethodDescriptor< diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java index f56cc2f5f..688783f6f 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcModelServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java index 69d8b1d13..194d17378 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java @@ -20,6 +20,7 @@ import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelEvaluationsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -66,6 +67,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcModelServiceStub extends ModelServiceStub { private static final MethodDescriptor uploadModelMethodDescriptor = diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceCallableFactory.java index c9289b973..a46d4d76b 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcPipelineServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java index 742b70296..0038ba7b0 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.PipelineServiceClient.ListTrainingPipelinesPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -51,6 +52,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcPipelineServiceStub extends PipelineServiceStub { private static final MethodDescriptor diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceCallableFactory.java index 62f166cdd..11d610333 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcPredictionServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java index 49d493512..cdfb2302b 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -42,6 +43,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcPredictionServiceStub extends PredictionServiceStub { private static final MethodDescriptor predictMethodDescriptor = diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceCallableFactory.java index ec9ec56bc..c639234c2 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceCallableFactory.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceCallableFactory.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -41,7 +42,8 @@ * *

This class is for advanced usage. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public class GrpcSpecialistPoolServiceCallableFactory implements GrpcStubCallableFactory { @Override diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java index 08011b735..dc69b6170 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceClient.ListSpecialistPoolsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -53,6 +54,7 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ +@BetaApi @Generated("by gapic-generator-java") public class GrpcSpecialistPoolServiceStub extends SpecialistPoolServiceStub { private static final MethodDescriptor diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/JobServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/JobServiceStub.java index 2d43a0e8b..35779a161 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/JobServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/JobServiceStub.java @@ -21,6 +21,7 @@ import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListDataLabelingJobsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListHyperparameterTuningJobsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -64,7 +65,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class JobServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java index 18d58132f..0838a35d4 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.MigrationServiceClient.SearchMigratableResourcesPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -36,7 +37,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class MigrationServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java index 04ee41e41..a0496db40 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java @@ -20,6 +20,7 @@ import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelEvaluationsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -55,7 +56,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class ModelServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java index f0520c87a..3cba40f35 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.PipelineServiceClient.ListTrainingPipelinesPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -40,7 +41,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class PipelineServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java index 8839170a4..0dc82229b 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java @@ -16,6 +16,7 @@ package com.google.cloud.aiplatform.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.aiplatform.v1beta1.ExplainRequest; @@ -30,7 +31,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class PredictionServiceStub implements BackgroundResource { public UnaryCallable predictCallable() { diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/SpecialistPoolServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/SpecialistPoolServiceStub.java index 03588d0b4..3a30e42df 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/SpecialistPoolServiceStub.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/SpecialistPoolServiceStub.java @@ -18,6 +18,7 @@ import static com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceClient.ListSpecialistPoolsPagedResponse; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -42,7 +43,8 @@ * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") +@BetaApi +@Generated("by gapic-generator-java") public abstract class SpecialistPoolServiceStub implements BackgroundResource { public OperationsStub getOperationsStub() { diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/JobServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/JobServiceClientTest.java index 698734265..f432bc0c5 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/JobServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/JobServiceClientTest.java @@ -99,6 +99,7 @@ public void createCustomJobTest() throws Exception { .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString()) .setDisplayName("displayName1714148973") .setJobSpec(CustomJobSpec.newBuilder().build()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -149,6 +150,7 @@ public void createCustomJobTest2() throws Exception { .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString()) .setDisplayName("displayName1714148973") .setJobSpec(CustomJobSpec.newBuilder().build()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -199,6 +201,7 @@ public void getCustomJobTest() throws Exception { .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString()) .setDisplayName("displayName1714148973") .setJobSpec(CustomJobSpec.newBuilder().build()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -246,6 +249,7 @@ public void getCustomJobTest2() throws Exception { .setName(CustomJobName.of("[PROJECT]", "[LOCATION]", "[CUSTOM_JOB]").toString()) .setDisplayName("displayName1714148973") .setJobSpec(CustomJobSpec.newBuilder().build()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -539,6 +543,7 @@ public void createDataLabelingJobTest() throws Exception { .setInstructionUri("instructionUri1989242366") .setInputsSchemaUri("inputsSchemaUri1757461538") .setInputs(Value.newBuilder().build()) + .setState(JobState.forNumber(0)) .setLabelingProgress(-685978914) .setCurrentSpend(Money.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) @@ -598,6 +603,7 @@ public void createDataLabelingJobTest2() throws Exception { .setInstructionUri("instructionUri1989242366") .setInputsSchemaUri("inputsSchemaUri1757461538") .setInputs(Value.newBuilder().build()) + .setState(JobState.forNumber(0)) .setLabelingProgress(-685978914) .setCurrentSpend(Money.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) @@ -657,6 +663,7 @@ public void getDataLabelingJobTest() throws Exception { .setInstructionUri("instructionUri1989242366") .setInputsSchemaUri("inputsSchemaUri1757461538") .setInputs(Value.newBuilder().build()) + .setState(JobState.forNumber(0)) .setLabelingProgress(-685978914) .setCurrentSpend(Money.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) @@ -714,6 +721,7 @@ public void getDataLabelingJobTest2() throws Exception { .setInstructionUri("instructionUri1989242366") .setInputsSchemaUri("inputsSchemaUri1757461538") .setInputs(Value.newBuilder().build()) + .setState(JobState.forNumber(0)) .setLabelingProgress(-685978914) .setCurrentSpend(Money.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) @@ -1021,6 +1029,7 @@ public void createHyperparameterTuningJobTest() throws Exception { .setMaxFailedTrialCount(-887662497) .setTrialJobSpec(CustomJobSpec.newBuilder().build()) .addAllTrials(new ArrayList()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -1082,6 +1091,7 @@ public void createHyperparameterTuningJobTest2() throws Exception { .setMaxFailedTrialCount(-887662497) .setTrialJobSpec(CustomJobSpec.newBuilder().build()) .addAllTrials(new ArrayList()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -1143,6 +1153,7 @@ public void getHyperparameterTuningJobTest() throws Exception { .setMaxFailedTrialCount(-887662497) .setTrialJobSpec(CustomJobSpec.newBuilder().build()) .addAllTrials(new ArrayList()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -1201,6 +1212,7 @@ public void getHyperparameterTuningJobTest2() throws Exception { .setMaxFailedTrialCount(-887662497) .setTrialJobSpec(CustomJobSpec.newBuilder().build()) .addAllTrials(new ArrayList()) + .setState(JobState.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) @@ -1511,6 +1523,7 @@ public void createBatchPredictionJobTest() throws Exception { .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) .setManualBatchTuningParameters(ManualBatchTuningParameters.newBuilder().build()) .setOutputInfo(BatchPredictionJob.OutputInfo.newBuilder().build()) + .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .addAllPartialFailures(new ArrayList()) .setResourcesConsumed(ResourcesConsumed.newBuilder().build()) @@ -1573,6 +1586,7 @@ public void createBatchPredictionJobTest2() throws Exception { .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) .setManualBatchTuningParameters(ManualBatchTuningParameters.newBuilder().build()) .setOutputInfo(BatchPredictionJob.OutputInfo.newBuilder().build()) + .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .addAllPartialFailures(new ArrayList()) .setResourcesConsumed(ResourcesConsumed.newBuilder().build()) @@ -1635,6 +1649,7 @@ public void getBatchPredictionJobTest() throws Exception { .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) .setManualBatchTuningParameters(ManualBatchTuningParameters.newBuilder().build()) .setOutputInfo(BatchPredictionJob.OutputInfo.newBuilder().build()) + .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .addAllPartialFailures(new ArrayList()) .setResourcesConsumed(ResourcesConsumed.newBuilder().build()) @@ -1696,6 +1711,7 @@ public void getBatchPredictionJobTest2() throws Exception { .setDedicatedResources(BatchDedicatedResources.newBuilder().build()) .setManualBatchTuningParameters(ManualBatchTuningParameters.newBuilder().build()) .setOutputInfo(BatchPredictionJob.OutputInfo.newBuilder().build()) + .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .addAllPartialFailures(new ArrayList()) .setResourcesConsumed(ResourcesConsumed.newBuilder().build()) diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockDatasetServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockDatasetServiceImpl.java index 44effe1fc..cf3f355a1 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockDatasetServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockDatasetServiceImpl.java @@ -70,7 +70,13 @@ public void createDataset( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateDataset, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -84,7 +90,13 @@ public void getDataset(GetDatasetRequest request, StreamObserver respon } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetDataset, expected %s or %s", + response.getClass().getName(), + Dataset.class.getName(), + Exception.class.getName()))); } } @@ -99,7 +111,13 @@ public void updateDataset( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateDataset, expected %s or %s", + response.getClass().getName(), + Dataset.class.getName(), + Exception.class.getName()))); } } @@ -114,7 +132,13 @@ public void listDatasets( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListDatasets, expected %s or %s", + response.getClass().getName(), + ListDatasetsResponse.class.getName(), + Exception.class.getName()))); } } @@ -129,7 +153,13 @@ public void deleteDataset( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteDataset, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -143,7 +173,13 @@ public void importData(ImportDataRequest request, StreamObserver resp } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ImportData, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -157,7 +193,13 @@ public void exportData(ExportDataRequest request, StreamObserver resp } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ExportData, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -172,7 +214,13 @@ public void listDataItems( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListDataItems, expected %s or %s", + response.getClass().getName(), + ListDataItemsResponse.class.getName(), + Exception.class.getName()))); } } @@ -187,7 +235,13 @@ public void getAnnotationSpec( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetAnnotationSpec, expected %s or %s", + response.getClass().getName(), + AnnotationSpec.class.getName(), + Exception.class.getName()))); } } @@ -202,7 +256,13 @@ public void listAnnotations( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListAnnotations, expected %s or %s", + response.getClass().getName(), + ListAnnotationsResponse.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockEndpointServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockEndpointServiceImpl.java index ccc761e64..2ce8ad432 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockEndpointServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockEndpointServiceImpl.java @@ -70,7 +70,13 @@ public void createEndpoint( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateEndpoint, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -84,7 +90,13 @@ public void getEndpoint(GetEndpointRequest request, StreamObserver res } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetEndpoint, expected %s or %s", + response.getClass().getName(), + Endpoint.class.getName(), + Exception.class.getName()))); } } @@ -99,7 +111,13 @@ public void listEndpoints( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListEndpoints, expected %s or %s", + response.getClass().getName(), + ListEndpointsResponse.class.getName(), + Exception.class.getName()))); } } @@ -114,7 +132,13 @@ public void updateEndpoint( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateEndpoint, expected %s or %s", + response.getClass().getName(), + Endpoint.class.getName(), + Exception.class.getName()))); } } @@ -129,7 +153,13 @@ public void deleteEndpoint( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteEndpoint, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -143,7 +173,13 @@ public void deployModel(DeployModelRequest request, StreamObserver re } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeployModel, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -158,7 +194,13 @@ public void undeployModel( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UndeployModel, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockJobServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockJobServiceImpl.java index 81e8c05b9..c5a04be14 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockJobServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockJobServiceImpl.java @@ -71,7 +71,13 @@ public void createCustomJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateCustomJob, expected %s or %s", + response.getClass().getName(), + CustomJob.class.getName(), + Exception.class.getName()))); } } @@ -86,7 +92,13 @@ public void getCustomJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetCustomJob, expected %s or %s", + response.getClass().getName(), + CustomJob.class.getName(), + Exception.class.getName()))); } } @@ -101,7 +113,13 @@ public void listCustomJobs( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListCustomJobs, expected %s or %s", + response.getClass().getName(), + ListCustomJobsResponse.class.getName(), + Exception.class.getName()))); } } @@ -116,7 +134,13 @@ public void deleteCustomJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteCustomJob, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -131,7 +155,13 @@ public void cancelCustomJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CancelCustomJob, expected %s or %s", + response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); } } @@ -146,7 +176,13 @@ public void createDataLabelingJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateDataLabelingJob, expected %s or %s", + response.getClass().getName(), + DataLabelingJob.class.getName(), + Exception.class.getName()))); } } @@ -161,7 +197,13 @@ public void getDataLabelingJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetDataLabelingJob, expected %s or %s", + response.getClass().getName(), + DataLabelingJob.class.getName(), + Exception.class.getName()))); } } @@ -177,7 +219,13 @@ public void listDataLabelingJobs( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListDataLabelingJobs, expected %s or %s", + response.getClass().getName(), + ListDataLabelingJobsResponse.class.getName(), + Exception.class.getName()))); } } @@ -192,7 +240,13 @@ public void deleteDataLabelingJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteDataLabelingJob, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -207,7 +261,13 @@ public void cancelDataLabelingJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CancelDataLabelingJob, expected %s or %s", + response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); } } @@ -223,7 +283,13 @@ public void createHyperparameterTuningJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateHyperparameterTuningJob, expected %s or %s", + response.getClass().getName(), + HyperparameterTuningJob.class.getName(), + Exception.class.getName()))); } } @@ -239,7 +305,13 @@ public void getHyperparameterTuningJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetHyperparameterTuningJob, expected %s or %s", + response.getClass().getName(), + HyperparameterTuningJob.class.getName(), + Exception.class.getName()))); } } @@ -255,7 +327,13 @@ public void listHyperparameterTuningJobs( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListHyperparameterTuningJobs, expected %s or %s", + response.getClass().getName(), + ListHyperparameterTuningJobsResponse.class.getName(), + Exception.class.getName()))); } } @@ -270,7 +348,13 @@ public void deleteHyperparameterTuningJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteHyperparameterTuningJob, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -285,7 +369,13 @@ public void cancelHyperparameterTuningJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CancelHyperparameterTuningJob, expected %s or %s", + response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); } } @@ -301,7 +391,13 @@ public void createBatchPredictionJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateBatchPredictionJob, expected %s or %s", + response.getClass().getName(), + BatchPredictionJob.class.getName(), + Exception.class.getName()))); } } @@ -316,7 +412,13 @@ public void getBatchPredictionJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetBatchPredictionJob, expected %s or %s", + response.getClass().getName(), + BatchPredictionJob.class.getName(), + Exception.class.getName()))); } } @@ -332,7 +434,13 @@ public void listBatchPredictionJobs( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListBatchPredictionJobs, expected %s or %s", + response.getClass().getName(), + ListBatchPredictionJobsResponse.class.getName(), + Exception.class.getName()))); } } @@ -347,7 +455,13 @@ public void deleteBatchPredictionJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteBatchPredictionJob, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -362,7 +476,13 @@ public void cancelBatchPredictionJob( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CancelBatchPredictionJob, expected %s or %s", + response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockMigrationServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockMigrationServiceImpl.java index e2935c8ad..0cac9a70a 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockMigrationServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockMigrationServiceImpl.java @@ -71,7 +71,13 @@ public void searchMigratableResources( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method SearchMigratableResources, expected %s or %s", + response.getClass().getName(), + SearchMigratableResourcesResponse.class.getName(), + Exception.class.getName()))); } } @@ -86,7 +92,13 @@ public void batchMigrateResources( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method BatchMigrateResources, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockModelServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockModelServiceImpl.java index 250f9ac78..316657379 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockModelServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockModelServiceImpl.java @@ -69,7 +69,13 @@ public void uploadModel(UploadModelRequest request, StreamObserver re } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UploadModel, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -83,7 +89,13 @@ public void getModel(GetModelRequest request, StreamObserver responseObse } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetModel, expected %s or %s", + response.getClass().getName(), + Model.class.getName(), + Exception.class.getName()))); } } @@ -98,7 +110,13 @@ public void listModels( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListModels, expected %s or %s", + response.getClass().getName(), + ListModelsResponse.class.getName(), + Exception.class.getName()))); } } @@ -112,7 +130,13 @@ public void updateModel(UpdateModelRequest request, StreamObserver respon } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateModel, expected %s or %s", + response.getClass().getName(), + Model.class.getName(), + Exception.class.getName()))); } } @@ -126,7 +150,13 @@ public void deleteModel(DeleteModelRequest request, StreamObserver re } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteModel, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -140,7 +170,13 @@ public void exportModel(ExportModelRequest request, StreamObserver re } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ExportModel, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -155,7 +191,13 @@ public void getModelEvaluation( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetModelEvaluation, expected %s or %s", + response.getClass().getName(), + ModelEvaluation.class.getName(), + Exception.class.getName()))); } } @@ -171,7 +213,13 @@ public void listModelEvaluations( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListModelEvaluations, expected %s or %s", + response.getClass().getName(), + ListModelEvaluationsResponse.class.getName(), + Exception.class.getName()))); } } @@ -187,7 +235,13 @@ public void getModelEvaluationSlice( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetModelEvaluationSlice, expected %s or %s", + response.getClass().getName(), + ModelEvaluationSlice.class.getName(), + Exception.class.getName()))); } } @@ -203,7 +257,13 @@ public void listModelEvaluationSlices( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListModelEvaluationSlices, expected %s or %s", + response.getClass().getName(), + ListModelEvaluationSlicesResponse.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPipelineServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPipelineServiceImpl.java index ab1f6eef3..30489c6cd 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPipelineServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPipelineServiceImpl.java @@ -71,7 +71,13 @@ public void createTrainingPipeline( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateTrainingPipeline, expected %s or %s", + response.getClass().getName(), + TrainingPipeline.class.getName(), + Exception.class.getName()))); } } @@ -86,7 +92,13 @@ public void getTrainingPipeline( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetTrainingPipeline, expected %s or %s", + response.getClass().getName(), + TrainingPipeline.class.getName(), + Exception.class.getName()))); } } @@ -102,7 +114,13 @@ public void listTrainingPipelines( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListTrainingPipelines, expected %s or %s", + response.getClass().getName(), + ListTrainingPipelinesResponse.class.getName(), + Exception.class.getName()))); } } @@ -117,7 +135,13 @@ public void deleteTrainingPipeline( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteTrainingPipeline, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -132,7 +156,13 @@ public void cancelTrainingPipeline( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CancelTrainingPipeline, expected %s or %s", + response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPredictionServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPredictionServiceImpl.java index d12a7188b..297b7319e 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPredictionServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockPredictionServiceImpl.java @@ -68,7 +68,13 @@ public void predict(PredictRequest request, StreamObserver resp } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method Predict, expected %s or %s", + response.getClass().getName(), + PredictResponse.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockSpecialistPoolServiceImpl.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockSpecialistPoolServiceImpl.java index 44e33f3f7..5dc613ff9 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockSpecialistPoolServiceImpl.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/MockSpecialistPoolServiceImpl.java @@ -70,7 +70,13 @@ public void createSpecialistPool( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateSpecialistPool, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -85,7 +91,13 @@ public void getSpecialistPool( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetSpecialistPool, expected %s or %s", + response.getClass().getName(), + SpecialistPool.class.getName(), + Exception.class.getName()))); } } @@ -101,7 +113,13 @@ public void listSpecialistPools( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListSpecialistPools, expected %s or %s", + response.getClass().getName(), + ListSpecialistPoolsResponse.class.getName(), + Exception.class.getName()))); } } @@ -116,7 +134,13 @@ public void deleteSpecialistPool( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteSpecialistPool, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } @@ -131,7 +155,13 @@ public void updateSpecialistPool( } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { - responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateSpecialistPool, expected %s or %s", + response.getClass().getName(), + Operation.class.getName(), + Exception.class.getName()))); } } } diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/PipelineServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/PipelineServiceClientTest.java index 63f06af3a..4d63feb29 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/PipelineServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/PipelineServiceClientTest.java @@ -100,6 +100,7 @@ public void createTrainingPipelineTest() throws Exception { .setTrainingTaskInputs(Value.newBuilder().build()) .setTrainingTaskMetadata(Value.newBuilder().build()) .setModelToUpload(Model.newBuilder().build()) + .setState(PipelineState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) @@ -157,6 +158,7 @@ public void createTrainingPipelineTest2() throws Exception { .setTrainingTaskInputs(Value.newBuilder().build()) .setTrainingTaskMetadata(Value.newBuilder().build()) .setModelToUpload(Model.newBuilder().build()) + .setState(PipelineState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) @@ -214,6 +216,7 @@ public void getTrainingPipelineTest() throws Exception { .setTrainingTaskInputs(Value.newBuilder().build()) .setTrainingTaskMetadata(Value.newBuilder().build()) .setModelToUpload(Model.newBuilder().build()) + .setState(PipelineState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) @@ -269,6 +272,7 @@ public void getTrainingPipelineTest2() throws Exception { .setTrainingTaskInputs(Value.newBuilder().build()) .setTrainingTaskMetadata(Value.newBuilder().build()) .setModelToUpload(Model.newBuilder().build()) + .setState(PipelineState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java index 3d923f8af..9a2f6acfe 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java @@ -102,6 +102,7 @@ public void createDatasetTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -158,6 +159,7 @@ public void createDatasetTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -214,6 +216,7 @@ public void getDatasetTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockDatasetService.addResponse(expectedResponse); @@ -259,6 +262,7 @@ public void getDatasetTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockDatasetService.addResponse(expectedResponse); @@ -304,6 +308,7 @@ public void updateDatasetTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockDatasetService.addResponse(expectedResponse); diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClientTest.java index 1e6103e42..fe3690851 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/EndpointServiceClientTest.java @@ -101,6 +101,7 @@ public void createEndpointTest() throws Exception { .putAllLabels(new HashMap()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -158,6 +159,7 @@ public void createEndpointTest2() throws Exception { .putAllLabels(new HashMap()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -215,6 +217,7 @@ public void getEndpointTest() throws Exception { .putAllLabels(new HashMap()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockEndpointService.addResponse(expectedResponse); @@ -261,6 +264,7 @@ public void getEndpointTest2() throws Exception { .putAllLabels(new HashMap()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockEndpointService.addResponse(expectedResponse); @@ -395,6 +399,7 @@ public void updateEndpointTest() throws Exception { .putAllLabels(new HashMap()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockEndpointService.addResponse(expectedResponse); diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java index f591a0058..223e8d0b2 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/JobServiceClientTest.java @@ -106,6 +106,7 @@ public void createCustomJobTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -156,6 +157,7 @@ public void createCustomJobTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -206,6 +208,7 @@ public void getCustomJobTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -253,6 +256,7 @@ public void getCustomJobTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -547,6 +551,7 @@ public void createDataLabelingJobTest() throws Exception { .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) .addAllSpecialistPools(new ArrayList()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setActiveLearningConfig(ActiveLearningConfig.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -606,6 +611,7 @@ public void createDataLabelingJobTest2() throws Exception { .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) .addAllSpecialistPools(new ArrayList()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setActiveLearningConfig(ActiveLearningConfig.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -665,6 +671,7 @@ public void getDataLabelingJobTest() throws Exception { .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) .addAllSpecialistPools(new ArrayList()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setActiveLearningConfig(ActiveLearningConfig.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -722,6 +729,7 @@ public void getDataLabelingJobTest2() throws Exception { .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) .addAllSpecialistPools(new ArrayList()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setActiveLearningConfig(ActiveLearningConfig.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1028,6 +1036,7 @@ public void createHyperparameterTuningJobTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1089,6 +1098,7 @@ public void createHyperparameterTuningJobTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1150,6 +1160,7 @@ public void getHyperparameterTuningJobTest() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1208,6 +1219,7 @@ public void getHyperparameterTuningJobTest2() throws Exception { .setUpdateTime(Timestamp.newBuilder().build()) .setError(Status.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1523,6 +1535,7 @@ public void createBatchPredictionJobTest() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1587,6 +1600,7 @@ public void createBatchPredictionJobTest2() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1651,6 +1665,7 @@ public void getBatchPredictionJobTest() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); @@ -1714,6 +1729,7 @@ public void getBatchPredictionJobTest2() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockJobService.addResponse(expectedResponse); diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClientTest.java index 51d21228c..3669fffb5 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/ModelServiceClientTest.java @@ -213,6 +213,7 @@ public void getModelTest() throws Exception { .setExplanationSpec(ExplanationSpec.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockModelService.addResponse(expectedResponse); @@ -271,6 +272,7 @@ public void getModelTest2() throws Exception { .setExplanationSpec(ExplanationSpec.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockModelService.addResponse(expectedResponse); @@ -417,6 +419,7 @@ public void updateModelTest() throws Exception { .setExplanationSpec(ExplanationSpec.newBuilder().build()) .setEtag("etag3123477") .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockModelService.addResponse(expectedResponse); @@ -645,6 +648,7 @@ public void getModelEvaluationTest() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .addAllSliceDimensions(new ArrayList()) .setModelExplanation(ModelExplanation.newBuilder().build()) + .addAllExplanationSpecs(new ArrayList()) .build(); mockModelService.addResponse(expectedResponse); @@ -692,6 +696,7 @@ public void getModelEvaluationTest2() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .addAllSliceDimensions(new ArrayList()) .setModelExplanation(ModelExplanation.newBuilder().build()) + .addAllExplanationSpecs(new ArrayList()) .build(); mockModelService.addResponse(expectedResponse); diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClientTest.java index 17a9d66f6..4da35842e 100644 --- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClientTest.java +++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceClientTest.java @@ -107,6 +107,7 @@ public void createTrainingPipelineTest() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockPipelineService.addResponse(expectedResponse); @@ -164,6 +165,7 @@ public void createTrainingPipelineTest2() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockPipelineService.addResponse(expectedResponse); @@ -221,6 +223,7 @@ public void getTrainingPipelineTest() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockPipelineService.addResponse(expectedResponse); @@ -276,6 +279,7 @@ public void getTrainingPipelineTest2() throws Exception { .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap()) + .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .build(); mockPipelineService.addResponse(expectedResponse); diff --git a/grpc-google-cloud-aiplatform-v1/pom.xml b/grpc-google-cloud-aiplatform-v1/pom.xml index b8bca425c..8196044a7 100644 --- a/grpc-google-cloud-aiplatform-v1/pom.xml +++ b/grpc-google-cloud-aiplatform-v1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 grpc-google-cloud-aiplatform-v1 GRPC library for google-cloud-aiplatform com.google.cloud google-cloud-aiplatform-parent - 0.3.0 + 0.4.0 diff --git a/grpc-google-cloud-aiplatform-v1beta1/pom.xml b/grpc-google-cloud-aiplatform-v1beta1/pom.xml index 3c9738c5b..4d3f4aba7 100644 --- a/grpc-google-cloud-aiplatform-v1beta1/pom.xml +++ b/grpc-google-cloud-aiplatform-v1beta1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 grpc-google-cloud-aiplatform-v1beta1 GRPC library for google-cloud-aiplatform com.google.cloud google-cloud-aiplatform-parent - 0.3.0 + 0.4.0 diff --git a/pom.xml b/pom.xml index 0b71b5176..6044cb43e 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-aiplatform-parent pom - 0.3.0 + 0.4.0 Google AI Platform Parent https://github.com/googleapis/java- @@ -70,32 +70,32 @@ com.google.cloud google-cloud-aiplatform - 0.3.0 + 0.4.0 com.google.api.grpc proto-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 com.google.api.grpc proto-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 com.google.api.grpc grpc-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 com.google.cloud google-cloud-shared-dependencies - 0.20.0 + 0.20.1 pom import diff --git a/proto-google-cloud-aiplatform-v1/pom.xml b/proto-google-cloud-aiplatform-v1/pom.xml index 9714764a4..5a6a8400f 100644 --- a/proto-google-cloud-aiplatform-v1/pom.xml +++ b/proto-google-cloud-aiplatform-v1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-aiplatform-v1 - 0.3.0 + 0.4.0 proto-google-cloud-aiplatform-v1 Proto library for google-cloud-aiplatform com.google.cloud google-cloud-aiplatform-parent - 0.3.0 + 0.4.0 diff --git a/proto-google-cloud-aiplatform-v1beta1/pom.xml b/proto-google-cloud-aiplatform-v1beta1/pom.xml index 5a41e4de9..e5985515e 100644 --- a/proto-google-cloud-aiplatform-v1beta1/pom.xml +++ b/proto-google-cloud-aiplatform-v1beta1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-aiplatform-v1beta1 - 0.3.0 + 0.4.0 proto-google-cloud-aiplatform-v1beta1 Proto library for google-cloud-aiplatform com.google.cloud google-cloud-aiplatform-parent - 0.3.0 + 0.4.0 diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ActiveLearningConfig.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ActiveLearningConfig.java index b3d49aff4..843b0bf65 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ActiveLearningConfig.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ActiveLearningConfig.java @@ -22,7 +22,7 @@ * * *

- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
  *  label the data incrementally by several iterations. For every iteration, it
  *  will select a batch of data based on the sampling strategy.
  * 
@@ -563,7 +563,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    *  label the data incrementally by several iterations. For every iteration, it
    *  will select a batch of data based on the sampling strategy.
    * 
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java index f09ee1f08..cd3cf1562 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java @@ -404,8 +404,8 @@ public int getOutputIndex(int index) { * * *
-   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-   * predicted class name by a multi-classification Model.
+   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+   * the predicted class name by a multi-classification Model.
    * This field is only populated iff the Model predicts display names as a
    * separate field along with the explained output. The predicted display name
    * must has the same shape of the explained output, and can be located using
@@ -432,8 +432,8 @@ public java.lang.String getOutputDisplayName() {
    *
    *
    * 
-   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-   * predicted class name by a multi-classification Model.
+   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+   * the predicted class name by a multi-classification Model.
    * This field is only populated iff the Model predicts display names as a
    * separate field along with the explained output. The predicted display name
    * must has the same shape of the explained output, and can be located using
@@ -465,20 +465,19 @@ public com.google.protobuf.ByteString getOutputDisplayNameBytes() {
    * 
    * Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
    * explanation method. Lower value means more precise attributions.
-   * * For [Sampled Shapley
-   * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
-   * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
-   * * For [Integrated Gradients
-   * attribution][ExplanationParameters.integrated_gradients_attribution],
-   * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+   * * For Sampled Shapley
+   * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+   * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+   * the error.
+   * * For Integrated Gradients
+   * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+   * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
    * reduce the error.
-   * * For [XRAI
-   * attribution][ExplanationParameters.xrai_attribution], increasing
-   * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
-   * Refer to  AI Explanations Whitepaper for more details:
-   * https:
-   * //storage.googleapis.com/cloud-ai-whitep
-   * // apers/AI%20Explainability%20Whitepaper.pdf
+   * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+   * increasing
+   * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+   * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+   * for more information.
    * 
* * double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; @@ -1679,8 +1678,8 @@ public Builder clearOutputIndex() { * * *
-     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-     * predicted class name by a multi-classification Model.
+     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+     * the predicted class name by a multi-classification Model.
      * This field is only populated iff the Model predicts display names as a
      * separate field along with the explained output. The predicted display name
      * must has the same shape of the explained output, and can be located using
@@ -1706,8 +1705,8 @@ public java.lang.String getOutputDisplayName() {
      *
      *
      * 
-     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-     * predicted class name by a multi-classification Model.
+     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+     * the predicted class name by a multi-classification Model.
      * This field is only populated iff the Model predicts display names as a
      * separate field along with the explained output. The predicted display name
      * must has the same shape of the explained output, and can be located using
@@ -1733,8 +1732,8 @@ public com.google.protobuf.ByteString getOutputDisplayNameBytes() {
      *
      *
      * 
-     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-     * predicted class name by a multi-classification Model.
+     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+     * the predicted class name by a multi-classification Model.
      * This field is only populated iff the Model predicts display names as a
      * separate field along with the explained output. The predicted display name
      * must has the same shape of the explained output, and can be located using
@@ -1759,8 +1758,8 @@ public Builder setOutputDisplayName(java.lang.String value) {
      *
      *
      * 
-     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-     * predicted class name by a multi-classification Model.
+     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+     * the predicted class name by a multi-classification Model.
      * This field is only populated iff the Model predicts display names as a
      * separate field along with the explained output. The predicted display name
      * must has the same shape of the explained output, and can be located using
@@ -1781,8 +1780,8 @@ public Builder clearOutputDisplayName() {
      *
      *
      * 
-     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-     * predicted class name by a multi-classification Model.
+     * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+     * the predicted class name by a multi-classification Model.
      * This field is only populated iff the Model predicts display names as a
      * separate field along with the explained output. The predicted display name
      * must has the same shape of the explained output, and can be located using
@@ -1812,20 +1811,19 @@ public Builder setOutputDisplayNameBytes(com.google.protobuf.ByteString value) {
      * 
      * Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
      * explanation method. Lower value means more precise attributions.
-     * * For [Sampled Shapley
-     * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
-     * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
-     * * For [Integrated Gradients
-     * attribution][ExplanationParameters.integrated_gradients_attribution],
-     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+     * * For Sampled Shapley
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+     * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+     * the error.
+     * * For Integrated Gradients
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
      * reduce the error.
-     * * For [XRAI
-     * attribution][ExplanationParameters.xrai_attribution], increasing
-     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
-     * Refer to  AI Explanations Whitepaper for more details:
-     * https:
-     * //storage.googleapis.com/cloud-ai-whitep
-     * // apers/AI%20Explainability%20Whitepaper.pdf
+     * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+     * increasing
+     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+     * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+     * for more information.
      * 
* * double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; @@ -1842,20 +1840,19 @@ public double getApproximationError() { *
      * Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
      * explanation method. Lower value means more precise attributions.
-     * * For [Sampled Shapley
-     * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
-     * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
-     * * For [Integrated Gradients
-     * attribution][ExplanationParameters.integrated_gradients_attribution],
-     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+     * * For Sampled Shapley
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+     * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+     * the error.
+     * * For Integrated Gradients
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
      * reduce the error.
-     * * For [XRAI
-     * attribution][ExplanationParameters.xrai_attribution], increasing
-     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
-     * Refer to  AI Explanations Whitepaper for more details:
-     * https:
-     * //storage.googleapis.com/cloud-ai-whitep
-     * // apers/AI%20Explainability%20Whitepaper.pdf
+     * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+     * increasing
+     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+     * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+     * for more information.
      * 
* * double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; @@ -1875,20 +1872,19 @@ public Builder setApproximationError(double value) { *
      * Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
      * explanation method. Lower value means more precise attributions.
-     * * For [Sampled Shapley
-     * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
-     * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
-     * * For [Integrated Gradients
-     * attribution][ExplanationParameters.integrated_gradients_attribution],
-     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+     * * For Sampled Shapley
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+     * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+     * the error.
+     * * For Integrated Gradients
+     * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+     * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
      * reduce the error.
-     * * For [XRAI
-     * attribution][ExplanationParameters.xrai_attribution], increasing
-     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
-     * Refer to  AI Explanations Whitepaper for more details:
-     * https:
-     * //storage.googleapis.com/cloud-ai-whitep
-     * // apers/AI%20Explainability%20Whitepaper.pdf
+     * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+     * increasing
+     * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+     * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+     * for more information.
      * 
* * double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java index 48bc07ced..3dc3f1e63 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java @@ -214,8 +214,8 @@ public interface AttributionOrBuilder * * *
-   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-   * predicted class name by a multi-classification Model.
+   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+   * the predicted class name by a multi-classification Model.
    * This field is only populated iff the Model predicts display names as a
    * separate field along with the explained output. The predicted display name
    * must has the same shape of the explained output, and can be located using
@@ -231,8 +231,8 @@ public interface AttributionOrBuilder
    *
    *
    * 
-   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
-   * predicted class name by a multi-classification Model.
+   * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+   * the predicted class name by a multi-classification Model.
    * This field is only populated iff the Model predicts display names as a
    * separate field along with the explained output. The predicted display name
    * must has the same shape of the explained output, and can be located using
@@ -251,20 +251,19 @@ public interface AttributionOrBuilder
    * 
    * Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
    * explanation method. Lower value means more precise attributions.
-   * * For [Sampled Shapley
-   * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
-   * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
-   * * For [Integrated Gradients
-   * attribution][ExplanationParameters.integrated_gradients_attribution],
-   * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+   * * For Sampled Shapley
+   * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+   * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+   * the error.
+   * * For Integrated Gradients
+   * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+   * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
    * reduce the error.
-   * * For [XRAI
-   * attribution][ExplanationParameters.xrai_attribution], increasing
-   * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
-   * Refer to  AI Explanations Whitepaper for more details:
-   * https:
-   * //storage.googleapis.com/cloud-ai-whitep
-   * // apers/AI%20Explainability%20Whitepaper.pdf
+   * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+   * increasing
+   * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+   * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+   * for more information.
    * 
* * double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java index e8c8039c3..dbfb12ec4 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java @@ -39,7 +39,9 @@ private BatchMigrateResourcesOperationMetadata( super(builder); } - private BatchMigrateResourcesOperationMetadata() {} + private BatchMigrateResourcesOperationMetadata() { + partialResults_ = java.util.Collections.emptyList(); + } @java.lang.Override @SuppressWarnings({"unused"}) @@ -60,6 +62,7 @@ private BatchMigrateResourcesOperationMetadata( if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } + int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -88,6 +91,22 @@ private BatchMigrateResourcesOperationMetadata( break; } + case 18: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + partialResults_ = + new java.util.ArrayList< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult>(); + mutable_bitField0_ |= 0x00000001; + } + partialResults_.add( + input.readMessage( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.parser(), + extensionRegistry)); + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -102,6 +121,9 @@ private BatchMigrateResourcesOperationMetadata( } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + partialResults_ = java.util.Collections.unmodifiableList(partialResults_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -123,53 +145,1861 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { .class); } - public static final int GENERIC_METADATA_FIELD_NUMBER = 1; - private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_; + public interface PartialResultOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + * + * @return Whether the error field is set. + */ + boolean hasError(); + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + * + * @return The error. + */ + com.google.rpc.Status getError(); + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + */ + com.google.rpc.StatusOrBuilder getErrorOrBuilder(); + + /** + * + * + *
+     * Migrated model resource name.
+     * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The model. + */ + java.lang.String getModel(); + /** + * + * + *
+     * Migrated model resource name.
+     * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for model. + */ + com.google.protobuf.ByteString getModelBytes(); + + /** + * + * + *
+     * Migrated dataset resource name.
+     * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The dataset. + */ + java.lang.String getDataset(); + /** + * + * + *
+     * Migrated dataset resource name.
+     * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for dataset. + */ + com.google.protobuf.ByteString getDatasetBytes(); + + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return Whether the request field is set. + */ + boolean hasRequest(); + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return The request. + */ + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest(); + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder getRequestOrBuilder(); + + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .ResultCase + getResultCase(); + } + /** + * + * + *
+   * Represents a partial result in batch migration opreation for one
+   * [MigrateResourceRequest][google.cloud.aiplatform.v1beta1.MigrateResourceRequest].
+   * 
+ * + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult} + */ + public static final class PartialResult extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult) + PartialResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use PartialResult.newBuilder() to construct. + private PartialResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private PartialResult() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new PartialResult(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private PartialResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder subBuilder = + null; + if (request_ != null) { + subBuilder = request_.toBuilder(); + } + request_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(request_); + request_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + com.google.rpc.Status.Builder subBuilder = null; + if (resultCase_ == 2) { + subBuilder = ((com.google.rpc.Status) result_).toBuilder(); + } + result_ = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.rpc.Status) result_); + result_ = subBuilder.buildPartial(); + } + resultCase_ = 2; + break; + } + case 26: + { + java.lang.String s = input.readStringRequireUtf8(); + resultCase_ = 3; + result_ = s; + break; + } + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + resultCase_ = 4; + result_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.class, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.Builder.class); + } + + private int resultCase_ = 0; + private java.lang.Object result_; + + public enum ResultCase + implements + com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + ERROR(2), + MODEL(3), + DATASET(4), + RESULT_NOT_SET(0); + private final int value; + + private ResultCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ResultCase valueOf(int value) { + return forNumber(value); + } + + public static ResultCase forNumber(int value) { + switch (value) { + case 2: + return ERROR; + case 3: + return MODEL; + case 4: + return DATASET; + case 0: + return RESULT_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public ResultCase getResultCase() { + return ResultCase.forNumber(resultCase_); + } + + public static final int ERROR_FIELD_NUMBER = 2; + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + * + * @return Whether the error field is set. + */ + @java.lang.Override + public boolean hasError() { + return resultCase_ == 2; + } + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + * + * @return The error. + */ + @java.lang.Override + public com.google.rpc.Status getError() { + if (resultCase_ == 2) { + return (com.google.rpc.Status) result_; + } + return com.google.rpc.Status.getDefaultInstance(); + } + /** + * + * + *
+     * The error result of the migration request in case of failure.
+     * 
+ * + * .google.rpc.Status error = 2; + */ + @java.lang.Override + public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { + if (resultCase_ == 2) { + return (com.google.rpc.Status) result_; + } + return com.google.rpc.Status.getDefaultInstance(); + } + + public static final int MODEL_FIELD_NUMBER = 3; + /** + * + * + *
+     * Migrated model resource name.
+     * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The model. + */ + public java.lang.String getModel() { + java.lang.Object ref = ""; + if (resultCase_ == 3) { + ref = result_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resultCase_ == 3) { + result_ = s; + } + return s; + } + } + /** + * + * + *
+     * Migrated model resource name.
+     * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for model. + */ + public com.google.protobuf.ByteString getModelBytes() { + java.lang.Object ref = ""; + if (resultCase_ == 3) { + ref = result_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resultCase_ == 3) { + result_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DATASET_FIELD_NUMBER = 4; + /** + * + * + *
+     * Migrated dataset resource name.
+     * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The dataset. + */ + public java.lang.String getDataset() { + java.lang.Object ref = ""; + if (resultCase_ == 4) { + ref = result_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resultCase_ == 4) { + result_ = s; + } + return s; + } + } + /** + * + * + *
+     * Migrated dataset resource name.
+     * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for dataset. + */ + public com.google.protobuf.ByteString getDatasetBytes() { + java.lang.Object ref = ""; + if (resultCase_ == 4) { + ref = result_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resultCase_ == 4) { + result_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int REQUEST_FIELD_NUMBER = 1; + private com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest request_; + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return Whether the request field is set. + */ + @java.lang.Override + public boolean hasRequest() { + return request_ != null; + } + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return The request. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest() { + return request_ == null + ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance() + : request_; + } + /** + * + * + *
+     * It's the same as the value in
+     * [MigrateResourceRequest.migrate_resource_requests][].
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder + getRequestOrBuilder() { + return getRequest(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (request_ != null) { + output.writeMessage(1, getRequest()); + } + if (resultCase_ == 2) { + output.writeMessage(2, (com.google.rpc.Status) result_); + } + if (resultCase_ == 3) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, result_); + } + if (resultCase_ == 4) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, result_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (request_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRequest()); + } + if (resultCase_ == 2) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize( + 2, (com.google.rpc.Status) result_); + } + if (resultCase_ == 3) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, result_); + } + if (resultCase_ == 4) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, result_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + other = + (com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult) + obj; + + if (hasRequest() != other.hasRequest()) return false; + if (hasRequest()) { + if (!getRequest().equals(other.getRequest())) return false; + } + if (!getResultCase().equals(other.getResultCase())) return false; + switch (resultCase_) { + case 2: + if (!getError().equals(other.getError())) return false; + break; + case 3: + if (!getModel().equals(other.getModel())) return false; + break; + case 4: + if (!getDataset().equals(other.getDataset())) return false; + break; + case 0: + default: + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasRequest()) { + hash = (37 * hash) + REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getRequest().hashCode(); + } + switch (resultCase_) { + case 2: + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + break; + case 3: + hash = (37 * hash) + MODEL_FIELD_NUMBER; + hash = (53 * hash) + getModel().hashCode(); + break; + case 4: + hash = (37 * hash) + DATASET_FIELD_NUMBER; + hash = (53 * hash) + getDataset().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * Represents a partial result in batch migration opreation for one
+     * [MigrateResourceRequest][google.cloud.aiplatform.v1beta1.MigrateResourceRequest].
+     * 
+ * + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult) + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.class, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.Builder.class); + } + + // Construct using + // com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (requestBuilder_ == null) { + request_ = null; + } else { + request_ = null; + requestBuilder_ = null; + } + resultCase_ = 0; + result_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto + .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + build() { + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + buildPartial() { + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + result = + new com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult(this); + if (resultCase_ == 2) { + if (errorBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = errorBuilder_.build(); + } + } + if (resultCase_ == 3) { + result.result_ = result_; + } + if (resultCase_ == 4) { + result.result_ = result_; + } + if (requestBuilder_ == null) { + result.request_ = request_; + } else { + result.request_ = requestBuilder_.build(); + } + result.resultCase_ = resultCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult) { + return mergeFrom( + (com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult) + other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + other) { + if (other + == com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.getDefaultInstance()) return this; + if (other.hasRequest()) { + mergeRequest(other.getRequest()); + } + switch (other.getResultCase()) { + case ERROR: + { + mergeError(other.getError()); + break; + } + case MODEL: + { + resultCase_ = 3; + result_ = other.result_; + onChanged(); + break; + } + case DATASET: + { + resultCase_ = 4; + result_ = other.result_; + onChanged(); + break; + } + case RESULT_NOT_SET: + { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int resultCase_ = 0; + private java.lang.Object result_; + + public ResultCase getResultCase() { + return ResultCase.forNumber(resultCase_); + } + + public Builder clearResult() { + resultCase_ = 0; + result_ = null; + onChanged(); + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> + errorBuilder_; + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + * + * @return Whether the error field is set. + */ + @java.lang.Override + public boolean hasError() { + return resultCase_ == 2; + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + * + * @return The error. + */ + @java.lang.Override + public com.google.rpc.Status getError() { + if (errorBuilder_ == null) { + if (resultCase_ == 2) { + return (com.google.rpc.Status) result_; + } + return com.google.rpc.Status.getDefaultInstance(); + } else { + if (resultCase_ == 2) { + return errorBuilder_.getMessage(); + } + return com.google.rpc.Status.getDefaultInstance(); + } + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + public Builder setError(com.google.rpc.Status value) { + if (errorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + errorBuilder_.setMessage(value); + } + resultCase_ = 2; + return this; + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + public Builder setError(com.google.rpc.Status.Builder builderForValue) { + if (errorBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + errorBuilder_.setMessage(builderForValue.build()); + } + resultCase_ = 2; + return this; + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + public Builder mergeError(com.google.rpc.Status value) { + if (errorBuilder_ == null) { + if (resultCase_ == 2 && result_ != com.google.rpc.Status.getDefaultInstance()) { + result_ = + com.google.rpc.Status.newBuilder((com.google.rpc.Status) result_) + .mergeFrom(value) + .buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + if (resultCase_ == 2) { + errorBuilder_.mergeFrom(value); + } + errorBuilder_.setMessage(value); + } + resultCase_ = 2; + return this; + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + public Builder clearError() { + if (errorBuilder_ == null) { + if (resultCase_ == 2) { + resultCase_ = 0; + result_ = null; + onChanged(); + } + } else { + if (resultCase_ == 2) { + resultCase_ = 0; + result_ = null; + } + errorBuilder_.clear(); + } + return this; + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + public com.google.rpc.Status.Builder getErrorBuilder() { + return getErrorFieldBuilder().getBuilder(); + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + @java.lang.Override + public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { + if ((resultCase_ == 2) && (errorBuilder_ != null)) { + return errorBuilder_.getMessageOrBuilder(); + } else { + if (resultCase_ == 2) { + return (com.google.rpc.Status) result_; + } + return com.google.rpc.Status.getDefaultInstance(); + } + } + /** + * + * + *
+       * The error result of the migration request in case of failure.
+       * 
+ * + * .google.rpc.Status error = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> + getErrorFieldBuilder() { + if (errorBuilder_ == null) { + if (!(resultCase_ == 2)) { + result_ = com.google.rpc.Status.getDefaultInstance(); + } + errorBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.Status, + com.google.rpc.Status.Builder, + com.google.rpc.StatusOrBuilder>( + (com.google.rpc.Status) result_, getParentForChildren(), isClean()); + result_ = null; + } + resultCase_ = 2; + onChanged(); + ; + return errorBuilder_; + } + + /** + * + * + *
+       * Migrated model resource name.
+       * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The model. + */ + @java.lang.Override + public java.lang.String getModel() { + java.lang.Object ref = ""; + if (resultCase_ == 3) { + ref = result_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resultCase_ == 3) { + result_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * Migrated model resource name.
+       * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for model. + */ + @java.lang.Override + public com.google.protobuf.ByteString getModelBytes() { + java.lang.Object ref = ""; + if (resultCase_ == 3) { + ref = result_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resultCase_ == 3) { + result_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * Migrated model resource name.
+       * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @param value The model to set. + * @return This builder for chaining. + */ + public Builder setModel(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + resultCase_ = 3; + result_ = value; + onChanged(); + return this; + } + /** + * + * + *
+       * Migrated model resource name.
+       * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @return This builder for chaining. + */ + public Builder clearModel() { + if (resultCase_ == 3) { + resultCase_ = 0; + result_ = null; + onChanged(); + } + return this; + } + /** + * + * + *
+       * Migrated model resource name.
+       * 
+ * + * string model = 3 [(.google.api.resource_reference) = { ... } + * + * @param value The bytes for model to set. + * @return This builder for chaining. + */ + public Builder setModelBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + resultCase_ = 3; + result_ = value; + onChanged(); + return this; + } + + /** + * + * + *
+       * Migrated dataset resource name.
+       * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The dataset. + */ + @java.lang.Override + public java.lang.String getDataset() { + java.lang.Object ref = ""; + if (resultCase_ == 4) { + ref = result_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resultCase_ == 4) { + result_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * Migrated dataset resource name.
+       * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return The bytes for dataset. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDatasetBytes() { + java.lang.Object ref = ""; + if (resultCase_ == 4) { + ref = result_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resultCase_ == 4) { + result_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * Migrated dataset resource name.
+       * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @param value The dataset to set. + * @return This builder for chaining. + */ + public Builder setDataset(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + resultCase_ = 4; + result_ = value; + onChanged(); + return this; + } + /** + * + * + *
+       * Migrated dataset resource name.
+       * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @return This builder for chaining. + */ + public Builder clearDataset() { + if (resultCase_ == 4) { + resultCase_ = 0; + result_ = null; + onChanged(); + } + return this; + } + /** + * + * + *
+       * Migrated dataset resource name.
+       * 
+ * + * string dataset = 4 [(.google.api.resource_reference) = { ... } + * + * @param value The bytes for dataset to set. + * @return This builder for chaining. + */ + public Builder setDatasetBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + resultCase_ = 4; + result_ = value; + onChanged(); + return this; + } + + private com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest request_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder> + requestBuilder_; + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return Whether the request field is set. + */ + public boolean hasRequest() { + return requestBuilder_ != null || request_ != null; + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + * + * @return The request. + */ + public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest() { + if (requestBuilder_ == null) { + return request_ == null + ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance() + : request_; + } else { + return requestBuilder_.getMessage(); + } + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public Builder setRequest(com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + request_ = value; + onChanged(); + } else { + requestBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public Builder setRequest( + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder builderForValue) { + if (requestBuilder_ == null) { + request_ = builderForValue.build(); + onChanged(); + } else { + requestBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public Builder mergeRequest( + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest value) { + if (requestBuilder_ == null) { + if (request_ != null) { + request_ = + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.newBuilder(request_) + .mergeFrom(value) + .buildPartial(); + } else { + request_ = value; + } + onChanged(); + } else { + requestBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public Builder clearRequest() { + if (requestBuilder_ == null) { + request_ = null; + onChanged(); + } else { + request_ = null; + requestBuilder_ = null; + } + + return this; + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder + getRequestBuilder() { + + onChanged(); + return getRequestFieldBuilder().getBuilder(); + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder + getRequestOrBuilder() { + if (requestBuilder_ != null) { + return requestBuilder_.getMessageOrBuilder(); + } else { + return request_ == null + ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance() + : request_; + } + } + /** + * + * + *
+       * It's the same as the value in
+       * [MigrateResourceRequest.migrate_resource_requests][].
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder> + getRequestFieldBuilder() { + if (requestBuilder_ == null) { + requestBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder, + com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder>( + getRequest(), getParentForChildren(), isClean()); + request_ = null; + } + return requestBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult) + private static final com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult(); + } + + public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public PartialResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PartialResult(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + + public static final int GENERIC_METADATA_FIELD_NUMBER = 1; + private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_; + /** + * + * + *
+   * The common part of the operation metadata.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + * + * @return Whether the genericMetadata field is set. + */ + @java.lang.Override + public boolean hasGenericMetadata() { + return genericMetadata_ != null; + } + /** + * + * + *
+   * The common part of the operation metadata.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + * + * @return The genericMetadata. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() { + return genericMetadata_ == null + ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance() + : genericMetadata_; + } + /** + * + * + *
+   * The common part of the operation metadata.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder + getGenericMetadataOrBuilder() { + return getGenericMetadata(); + } + + public static final int PARTIAL_RESULTS_FIELD_NUMBER = 2; + private java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult> + partialResults_; /** * * *
-   * The common part of the operation metadata.
+   * Partial results that reflects the latest migration operation progress.
    * 
* - * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + @java.lang.Override + public java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult> + getPartialResultsList() { + return partialResults_; + } + /** * - * @return Whether the genericMetadata field is set. + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * */ @java.lang.Override - public boolean hasGenericMetadata() { - return genericMetadata_ != null; + public java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder> + getPartialResultsOrBuilderList() { + return partialResults_; } /** * * *
-   * The common part of the operation metadata.
+   * Partial results that reflects the latest migration operation progress.
    * 
* - * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + @java.lang.Override + public int getPartialResultsCount() { + return partialResults_.size(); + } + /** * - * @return The genericMetadata. + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * */ @java.lang.Override - public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() { - return genericMetadata_ == null - ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance() - : genericMetadata_; + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + getPartialResults(int index) { + return partialResults_.get(index); } /** * * *
-   * The common part of the operation metadata.
+   * Partial results that reflects the latest migration operation progress.
    * 
* - * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1; + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * */ @java.lang.Override - public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder - getGenericMetadataOrBuilder() { - return getGenericMetadata(); + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder + getPartialResultsOrBuilder(int index) { + return partialResults_.get(index); } private byte memoizedIsInitialized = -1; @@ -189,6 +2019,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (genericMetadata_ != null) { output.writeMessage(1, getGenericMetadata()); } + for (int i = 0; i < partialResults_.size(); i++) { + output.writeMessage(2, partialResults_.get(i)); + } unknownFields.writeTo(output); } @@ -201,6 +2034,9 @@ public int getSerializedSize() { if (genericMetadata_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata()); } + for (int i = 0; i < partialResults_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, partialResults_.get(i)); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -222,6 +2058,7 @@ public boolean equals(final java.lang.Object obj) { if (hasGenericMetadata()) { if (!getGenericMetadata().equals(other.getGenericMetadata())) return false; } + if (!getPartialResultsList().equals(other.getPartialResultsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -237,6 +2074,10 @@ public int hashCode() { hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER; hash = (53 * hash) + getGenericMetadata().hashCode(); } + if (getPartialResultsCount() > 0) { + hash = (37 * hash) + PARTIAL_RESULTS_FIELD_NUMBER; + hash = (53 * hash) + getPartialResultsList().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -385,7 +2226,9 @@ private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getPartialResultsFieldBuilder(); + } } @java.lang.Override @@ -397,6 +2240,12 @@ public Builder clear() { genericMetadata_ = null; genericMetadataBuilder_ = null; } + if (partialResultsBuilder_ == null) { + partialResults_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + partialResultsBuilder_.clear(); + } return this; } @@ -428,11 +2277,21 @@ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadat buildPartial() { com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata result = new com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata(this); + int from_bitField0_ = bitField0_; if (genericMetadataBuilder_ == null) { result.genericMetadata_ = genericMetadata_; } else { result.genericMetadata_ = genericMetadataBuilder_.build(); } + if (partialResultsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + partialResults_ = java.util.Collections.unmodifiableList(partialResults_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.partialResults_ = partialResults_; + } else { + result.partialResults_ = partialResultsBuilder_.build(); + } onBuilt(); return result; } @@ -490,6 +2349,33 @@ public Builder mergeFrom( if (other.hasGenericMetadata()) { mergeGenericMetadata(other.getGenericMetadata()); } + if (partialResultsBuilder_ == null) { + if (!other.partialResults_.isEmpty()) { + if (partialResults_.isEmpty()) { + partialResults_ = other.partialResults_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePartialResultsIsMutable(); + partialResults_.addAll(other.partialResults_); + } + onChanged(); + } + } else { + if (!other.partialResults_.isEmpty()) { + if (partialResultsBuilder_.isEmpty()) { + partialResultsBuilder_.dispose(); + partialResultsBuilder_ = null; + partialResults_ = other.partialResults_; + bitField0_ = (bitField0_ & ~0x00000001); + partialResultsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getPartialResultsFieldBuilder() + : null; + } else { + partialResultsBuilder_.addAllMessages(other.partialResults_); + } + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -522,6 +2408,8 @@ public Builder mergeFrom( return this; } + private int bitField0_; + private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata, @@ -713,6 +2601,451 @@ public Builder clearGenericMetadata() { return genericMetadataBuilder_; } + private java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult> + partialResults_ = java.util.Collections.emptyList(); + + private void ensurePartialResultsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + partialResults_ = + new java.util.ArrayList< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult>(partialResults_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder> + partialResultsBuilder_; + + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult> + getPartialResultsList() { + if (partialResultsBuilder_ == null) { + return java.util.Collections.unmodifiableList(partialResults_); + } else { + return partialResultsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public int getPartialResultsCount() { + if (partialResultsBuilder_ == null) { + return partialResults_.size(); + } else { + return partialResultsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + getPartialResults(int index) { + if (partialResultsBuilder_ == null) { + return partialResults_.get(index); + } else { + return partialResultsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder setPartialResults( + int index, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + value) { + if (partialResultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartialResultsIsMutable(); + partialResults_.set(index, value); + onChanged(); + } else { + partialResultsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder setPartialResults( + int index, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + builderForValue) { + if (partialResultsBuilder_ == null) { + ensurePartialResultsIsMutable(); + partialResults_.set(index, builderForValue.build()); + onChanged(); + } else { + partialResultsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder addPartialResults( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + value) { + if (partialResultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartialResultsIsMutable(); + partialResults_.add(value); + onChanged(); + } else { + partialResultsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder addPartialResults( + int index, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + value) { + if (partialResultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartialResultsIsMutable(); + partialResults_.add(index, value); + onChanged(); + } else { + partialResultsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder addPartialResults( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + builderForValue) { + if (partialResultsBuilder_ == null) { + ensurePartialResultsIsMutable(); + partialResults_.add(builderForValue.build()); + onChanged(); + } else { + partialResultsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder addPartialResults( + int index, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + builderForValue) { + if (partialResultsBuilder_ == null) { + ensurePartialResultsIsMutable(); + partialResults_.add(index, builderForValue.build()); + onChanged(); + } else { + partialResultsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder addAllPartialResults( + java.lang.Iterable< + ? extends + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult> + values) { + if (partialResultsBuilder_ == null) { + ensurePartialResultsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, partialResults_); + onChanged(); + } else { + partialResultsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder clearPartialResults() { + if (partialResultsBuilder_ == null) { + partialResults_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + partialResultsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public Builder removePartialResults(int index) { + if (partialResultsBuilder_ == null) { + ensurePartialResultsIsMutable(); + partialResults_.remove(index); + onChanged(); + } else { + partialResultsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + getPartialResultsBuilder(int index) { + return getPartialResultsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder + getPartialResultsOrBuilder(int index) { + if (partialResultsBuilder_ == null) { + return partialResults_.get(index); + } else { + return partialResultsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder> + getPartialResultsOrBuilderList() { + if (partialResultsBuilder_ != null) { + return partialResultsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(partialResults_); + } + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + addPartialResultsBuilder() { + return getPartialResultsFieldBuilder() + .addBuilder( + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.getDefaultInstance()); + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder + addPartialResultsBuilder(int index) { + return getPartialResultsFieldBuilder() + .addBuilder( + index, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.getDefaultInstance()); + } + /** + * + * + *
+     * Partial results that reflects the latest migration operation progress.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + public java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder> + getPartialResultsBuilderList() { + return getPartialResultsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + .Builder, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder> + getPartialResultsFieldBuilder() { + if (partialResultsBuilder_ == null) { + partialResultsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResult.Builder, + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder>( + partialResults_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + partialResults_ = null; + } + return partialResultsBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java index 27c36f4e1..b613cd4c8 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java @@ -58,4 +58,73 @@ public interface BatchMigrateResourcesOperationMetadataOrBuilder */ com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder getGenericMetadataOrBuilder(); + + /** + * + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + java.util.List< + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult> + getPartialResultsList(); + /** + * + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult + getPartialResults(int index); + /** + * + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + int getPartialResultsCount(); + /** + * + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata + .PartialResultOrBuilder> + getPartialResultsOrBuilderList(); + /** + * + * + *
+   * Partial results that reflects the latest migration operation progress.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2; + * + */ + com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResultOrBuilder + getPartialResultsOrBuilder(int index); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java index 82a4704aa..ce4996373 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java @@ -344,6 +344,23 @@ private BatchPredictionJob( case 184: { generateExplanation_ = input.readBool(); + break; + } + case 194: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } case 202: @@ -420,7 +437,7 @@ public interface InputConfigOrBuilder * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -432,7 +449,7 @@ public interface InputConfigOrBuilder * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -444,7 +461,7 @@ public interface InputConfigOrBuilder * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -713,7 +730,7 @@ public SourceCase getSourceCase() { * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -728,7 +745,7 @@ public boolean hasGcsSource() { * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -746,7 +763,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() { * * *
-     * The Google Cloud Storage location for the input instances.
+     * The Cloud Storage location for the input instances.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1304,7 +1321,7 @@ public Builder clearSource() { * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1319,7 +1336,7 @@ public boolean hasGcsSource() { * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1344,7 +1361,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() { * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1366,7 +1383,7 @@ public Builder setGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value) * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1386,7 +1403,7 @@ public Builder setGcsSource( * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1417,7 +1434,7 @@ public Builder mergeGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource valu * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1442,7 +1459,7 @@ public Builder clearGcsSource() { * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1454,7 +1471,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource.Builder getGcsSourceBuilder * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1474,7 +1491,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuil * * *
-       * The Google Cloud Storage location for the input instances.
+       * The Cloud Storage location for the input instances.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2; @@ -1931,7 +1948,7 @@ public interface OutputConfigOrBuilder * * *
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -1963,7 +1980,7 @@ public interface OutputConfigOrBuilder
      *
      *
      * 
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -1995,7 +2012,7 @@ public interface OutputConfigOrBuilder
      *
      *
      * 
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2328,7 +2345,7 @@ public DestinationCase getDestinationCase() {
      *
      *
      * 
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2363,7 +2380,7 @@ public boolean hasGcsDestination() {
      *
      *
      * 
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2401,7 +2418,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
      *
      *
      * 
-     * The Google Cloud Storage location of the directory where the output is
+     * The Cloud Storage location of the directory where the output is
      * to be written to. In the given directory a new directory is created.
      * Its name is `prediction-<model-display-name>-<job-create-time>`,
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3020,7 +3037,7 @@ public Builder clearDestination() {
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3055,7 +3072,7 @@ public boolean hasGcsDestination() {
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3100,7 +3117,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3142,7 +3159,7 @@ public Builder setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestinat
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3182,7 +3199,7 @@ public Builder setGcsDestination(
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3234,7 +3251,7 @@ public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestin
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3279,7 +3296,7 @@ public Builder clearGcsDestination() {
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3311,7 +3328,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3352,7 +3369,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
        *
        *
        * 
-       * The Google Cloud Storage location of the directory where the output is
+       * The Cloud Storage location of the directory where the output is
        * to be written to. In the given directory a new directory is created.
        * Its name is `prediction-<model-display-name>-<job-create-time>`,
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3950,7 +3967,7 @@ public interface OutputInfoOrBuilder
      *
      *
      * 
-     * Output only. The full path of the Google Cloud Storage directory created, into which
+     * Output only. The full path of the Cloud Storage directory created, into which
      * the prediction output is written.
      * 
* @@ -3963,7 +3980,7 @@ public interface OutputInfoOrBuilder * * *
-     * Output only. The full path of the Google Cloud Storage directory created, into which
+     * Output only. The full path of the Cloud Storage directory created, into which
      * the prediction output is written.
      * 
* @@ -4156,7 +4173,7 @@ public OutputLocationCase getOutputLocationCase() { * * *
-     * Output only. The full path of the Google Cloud Storage directory created, into which
+     * Output only. The full path of the Cloud Storage directory created, into which
      * the prediction output is written.
      * 
* @@ -4184,7 +4201,7 @@ public java.lang.String getGcsOutputDirectory() { * * *
-     * Output only. The full path of the Google Cloud Storage directory created, into which
+     * Output only. The full path of the Cloud Storage directory created, into which
      * the prediction output is written.
      * 
* @@ -4667,7 +4684,7 @@ public Builder clearOutputLocation() { * * *
-       * Output only. The full path of the Google Cloud Storage directory created, into which
+       * Output only. The full path of the Cloud Storage directory created, into which
        * the prediction output is written.
        * 
* @@ -4696,7 +4713,7 @@ public java.lang.String getGcsOutputDirectory() { * * *
-       * Output only. The full path of the Google Cloud Storage directory created, into which
+       * Output only. The full path of the Cloud Storage directory created, into which
        * the prediction output is written.
        * 
* @@ -4725,7 +4742,7 @@ public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() { * * *
-       * Output only. The full path of the Google Cloud Storage directory created, into which
+       * Output only. The full path of the Cloud Storage directory created, into which
        * the prediction output is written.
        * 
* @@ -4747,7 +4764,7 @@ public Builder setGcsOutputDirectory(java.lang.String value) { * * *
-       * Output only. The full path of the Google Cloud Storage directory created, into which
+       * Output only. The full path of the Cloud Storage directory created, into which
        * the prediction output is written.
        * 
* @@ -4767,7 +4784,7 @@ public Builder clearGcsOutputDirectory() { * * *
-       * Output only. The full path of the Google Cloud Storage directory created, into which
+       * Output only. The full path of the Cloud Storage directory created, into which
        * the prediction output is written.
        * 
* @@ -5463,15 +5480,18 @@ public boolean hasManualBatchTuningParameters() { * * *
-   * Generate explanation along with the batch prediction results.
-   * When it's true, the batch prediction output will change based on the
-   * [output format][BatchPredictionJob.output_config.predictions_format]:
-   *  * `bigquery`: output will include a column named `explanation`. The value
+   * Generate explanation with the batch prediction results.
+   * When set to `true`, the batch prediction output changes based on the
+   * `predictions_format` field of the
+   * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+   *  * `bigquery`: output includes a column named `explanation`. The value
    *    is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
-   *  * `jsonl`: The JSON objects on each line will include an additional entry
+   *  * `jsonl`: The JSON objects on each line include an additional entry
    *    keyed `explanation`. The value of the entry is a JSON object that
    *    conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
    *  * `csv`: Generating explanations for CSV format is not supported.
+   * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+   * populated.
    * 
* * bool generate_explanation = 23; @@ -5489,15 +5509,12 @@ public boolean getGenerateExplanation() { * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -5512,15 +5529,12 @@ public boolean hasExplanationSpec() { * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -5537,15 +5551,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -6232,6 +6243,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) { return map.get(key); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -6305,6 +6370,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (generateExplanation_ != false) { output.writeBool(23, generateExplanation_); } + if (encryptionSpec_ != null) { + output.writeMessage(24, getEncryptionSpec()); + } if (explanationSpec_ != null) { output.writeMessage(25, getExplanationSpec()); } @@ -6386,6 +6454,9 @@ public int getSerializedSize() { if (generateExplanation_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(23, generateExplanation_); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec()); + } if (explanationSpec_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(25, getExplanationSpec()); } @@ -6469,6 +6540,10 @@ public boolean equals(final java.lang.Object obj) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -6554,6 +6629,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -6826,6 +6905,12 @@ public Builder clear() { updateTimeBuilder_ = null; } internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -6940,6 +7025,11 @@ public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob buildPartial() { } result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -7078,6 +7168,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob mergeUpdateTime(other.getUpdateTime()); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -8658,15 +8751,18 @@ public Builder clearManualBatchTuningParameters() { * * *
-     * Generate explanation along with the batch prediction results.
-     * When it's true, the batch prediction output will change based on the
-     * [output format][BatchPredictionJob.output_config.predictions_format]:
-     *  * `bigquery`: output will include a column named `explanation`. The value
+     * Generate explanation with the batch prediction results.
+     * When set to `true`, the batch prediction output changes based on the
+     * `predictions_format` field of the
+     * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+     *  * `bigquery`: output includes a column named `explanation`. The value
      *    is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
-     *  * `jsonl`: The JSON objects on each line will include an additional entry
+     *  * `jsonl`: The JSON objects on each line include an additional entry
      *    keyed `explanation`. The value of the entry is a JSON object that
      *    conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
      *  * `csv`: Generating explanations for CSV format is not supported.
+     * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+     * populated.
      * 
* * bool generate_explanation = 23; @@ -8681,15 +8777,18 @@ public boolean getGenerateExplanation() { * * *
-     * Generate explanation along with the batch prediction results.
-     * When it's true, the batch prediction output will change based on the
-     * [output format][BatchPredictionJob.output_config.predictions_format]:
-     *  * `bigquery`: output will include a column named `explanation`. The value
+     * Generate explanation with the batch prediction results.
+     * When set to `true`, the batch prediction output changes based on the
+     * `predictions_format` field of the
+     * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+     *  * `bigquery`: output includes a column named `explanation`. The value
      *    is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
-     *  * `jsonl`: The JSON objects on each line will include an additional entry
+     *  * `jsonl`: The JSON objects on each line include an additional entry
      *    keyed `explanation`. The value of the entry is a JSON object that
      *    conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
      *  * `csv`: Generating explanations for CSV format is not supported.
+     * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+     * populated.
      * 
* * bool generate_explanation = 23; @@ -8707,15 +8806,18 @@ public Builder setGenerateExplanation(boolean value) { * * *
-     * Generate explanation along with the batch prediction results.
-     * When it's true, the batch prediction output will change based on the
-     * [output format][BatchPredictionJob.output_config.predictions_format]:
-     *  * `bigquery`: output will include a column named `explanation`. The value
+     * Generate explanation with the batch prediction results.
+     * When set to `true`, the batch prediction output changes based on the
+     * `predictions_format` field of the
+     * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+     *  * `bigquery`: output includes a column named `explanation`. The value
      *    is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
-     *  * `jsonl`: The JSON objects on each line will include an additional entry
+     *  * `jsonl`: The JSON objects on each line include an additional entry
      *    keyed `explanation`. The value of the entry is a JSON object that
      *    conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
      *  * `csv`: Generating explanations for CSV format is not supported.
+     * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+     * populated.
      * 
* * bool generate_explanation = 23; @@ -8739,15 +8841,12 @@ public Builder clearGenerateExplanation() { * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8761,15 +8860,12 @@ public boolean hasExplanationSpec() { * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8789,15 +8885,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8819,15 +8912,12 @@ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanatio * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8847,15 +8937,12 @@ public Builder setExplanationSpec( * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8881,15 +8968,12 @@ public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanat * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8909,15 +8993,12 @@ public Builder clearExplanationSpec() { * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8931,15 +9012,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -8958,15 +9036,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio * * *
-     * Explanation configuration for this BatchPredictionJob. Can only be
-     * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-     * specified it with generate_explanation set to false or unset.
+     * Explanation configuration for this BatchPredictionJob. Can be
+     * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
      * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-     * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-     * this Model is not allowed.
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+     * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
      * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -11354,6 +11429,211 @@ public Builder putAllLabels(java.util.Map va return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a BatchPredictionJob. If this
+     * is set, then all resources created by the BatchPredictionJob will be
+     * encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java index 98a1d0819..bb249963f 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java @@ -369,15 +369,18 @@ public interface BatchPredictionJobOrBuilder * * *
-   * Generate explanation along with the batch prediction results.
-   * When it's true, the batch prediction output will change based on the
-   * [output format][BatchPredictionJob.output_config.predictions_format]:
-   *  * `bigquery`: output will include a column named `explanation`. The value
+   * Generate explanation with the batch prediction results.
+   * When set to `true`, the batch prediction output changes based on the
+   * `predictions_format` field of the
+   * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+   *  * `bigquery`: output includes a column named `explanation`. The value
    *    is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
-   *  * `jsonl`: The JSON objects on each line will include an additional entry
+   *  * `jsonl`: The JSON objects on each line include an additional entry
    *    keyed `explanation`. The value of the entry is a JSON object that
    *    conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
    *  * `csv`: Generating explanations for CSV format is not supported.
+   * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+   * populated.
    * 
* * bool generate_explanation = 23; @@ -390,15 +393,12 @@ public interface BatchPredictionJobOrBuilder * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -410,15 +410,12 @@ public interface BatchPredictionJobOrBuilder * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -430,15 +427,12 @@ public interface BatchPredictionJobOrBuilder * * *
-   * Explanation configuration for this BatchPredictionJob. Can only be
-   * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
-   * specified it with generate_explanation set to false or unset.
+   * Explanation configuration for this BatchPredictionJob. Can be
+   * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
    * This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
-   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
-   * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
-   * this Model is not allowed.
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+   * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
    * 
* * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25; @@ -955,4 +949,45 @@ public interface BatchPredictionJobOrBuilder * map<string, string> labels = 19; */ java.lang.String getLabelsOrThrow(java.lang.String key); + + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a BatchPredictionJob. If this
+   * is set, then all resources created by the BatchPredictionJob will be
+   * encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java index fb08c35ec..afffd8030 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java @@ -61,72 +61,75 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "tform.v1beta1\032\037google/api/field_behavior" + ".proto\032\031google/api/resource.proto\0326googl" + "e/cloud/aiplatform/v1beta1/completion_st" - + "ats.proto\0321google/cloud/aiplatform/v1bet" - + "a1/explanation.proto\032(google/cloud/aipla" - + "tform/v1beta1/io.proto\032/google/cloud/aip" - + "latform/v1beta1/job_state.proto\0327google/" - + "cloud/aiplatform/v1beta1/machine_resourc" - + "es.proto\032Dgoogle/cloud/aiplatform/v1beta" - + "1/manual_batch_tuning_parameters.proto\032\034" - + "google/protobuf/struct.proto\032\037google/pro" - + "tobuf/timestamp.proto\032\027google/rpc/status" - + ".proto\032\034google/api/annotations.proto\"\241\020\n" - + "\022BatchPredictionJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031" - + "\n\014display_name\030\002 \001(\tB\003\340A\002\0226\n\005model\030\003 \001(\t" - + "B\'\340A\002\372A!\n\037aiplatform.googleapis.com/Mode" - + "l\022Z\n\014input_config\030\004 \001(\0132?.google.cloud.a" - + "iplatform.v1beta1.BatchPredictionJob.Inp" - + "utConfigB\003\340A\002\0220\n\020model_parameters\030\005 \001(\0132" - + "\026.google.protobuf.Value\022\\\n\routput_config" - + "\030\006 \001(\0132@.google.cloud.aiplatform.v1beta1" - + ".BatchPredictionJob.OutputConfigB\003\340A\002\022U\n" - + "\023dedicated_resources\030\007 \001(\01328.google.clou" - + "d.aiplatform.v1beta1.BatchDedicatedResou" - + "rces\022i\n\036manual_batch_tuning_parameters\030\010" - + " \001(\0132<.google.cloud.aiplatform.v1beta1.M" - + "anualBatchTuningParametersB\003\340A\005\022\034\n\024gener" - + "ate_explanation\030\027 \001(\010\022J\n\020explanation_spe" - + "c\030\031 \001(\01320.google.cloud.aiplatform.v1beta" - + "1.ExplanationSpec\022X\n\013output_info\030\t \001(\0132>" + + "ats.proto\0325google/cloud/aiplatform/v1bet" + + "a1/encryption_spec.proto\0321google/cloud/a" + + "iplatform/v1beta1/explanation.proto\032(goo" + + "gle/cloud/aiplatform/v1beta1/io.proto\032/g" + + "oogle/cloud/aiplatform/v1beta1/job_state" + + ".proto\0327google/cloud/aiplatform/v1beta1/" + + "machine_resources.proto\032Dgoogle/cloud/ai" + + "platform/v1beta1/manual_batch_tuning_par" + + "ameters.proto\032\034google/protobuf/struct.pr" + + "oto\032\037google/protobuf/timestamp.proto\032\027go" + + "ogle/rpc/status.proto\032\034google/api/annota" + + "tions.proto\"\353\020\n\022BatchPredictionJob\022\021\n\004na" + + "me\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002" + + "\0226\n\005model\030\003 \001(\tB\'\340A\002\372A!\n\037aiplatform.goog" + + "leapis.com/Model\022Z\n\014input_config\030\004 \001(\0132?" + ".google.cloud.aiplatform.v1beta1.BatchPr" - + "edictionJob.OutputInfoB\003\340A\003\022=\n\005state\030\n \001" - + "(\0162).google.cloud.aiplatform.v1beta1.Job" - + "StateB\003\340A\003\022&\n\005error\030\013 \001(\0132\022.google.rpc.S" - + "tatusB\003\340A\003\0221\n\020partial_failures\030\014 \003(\0132\022.g" - + "oogle.rpc.StatusB\003\340A\003\022S\n\022resources_consu" - + "med\030\r \001(\01322.google.cloud.aiplatform.v1be" - + "ta1.ResourcesConsumedB\003\340A\003\022O\n\020completion" - + "_stats\030\016 \001(\01320.google.cloud.aiplatform.v" - + "1beta1.CompletionStatsB\003\340A\003\0224\n\013create_ti" - + "me\030\017 \001(\0132\032.google.protobuf.TimestampB\003\340A" - + "\003\0223\n\nstart_time\030\020 \001(\0132\032.google.protobuf." - + "TimestampB\003\340A\003\0221\n\010end_time\030\021 \001(\0132\032.googl" - + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time" - + "\030\022 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022" - + "O\n\006labels\030\023 \003(\0132?.google.cloud.aiplatfor" - + "m.v1beta1.BatchPredictionJob.LabelsEntry" - + "\032\304\001\n\013InputConfig\022@\n\ngcs_source\030\002 \001(\0132*.g" - + "oogle.cloud.aiplatform.v1beta1.GcsSource" - + "H\000\022J\n\017bigquery_source\030\003 \001(\0132/.google.clo" - + "ud.aiplatform.v1beta1.BigQuerySourceH\000\022\035" - + "\n\020instances_format\030\001 \001(\tB\003\340A\002B\010\n\006source\032" - + "\340\001\n\014OutputConfig\022J\n\017gcs_destination\030\002 \001(" - + "\0132/.google.cloud.aiplatform.v1beta1.GcsD" - + "estinationH\000\022T\n\024bigquery_destination\030\003 \001" - + "(\01324.google.cloud.aiplatform.v1beta1.Big" - + "QueryDestinationH\000\022\037\n\022predictions_format" - + "\030\001 \001(\tB\003\340A\002B\r\n\013destination\032l\n\nOutputInfo" - + "\022#\n\024gcs_output_directory\030\001 \001(\tB\003\340A\003H\000\022&\n" - + "\027bigquery_output_dataset\030\002 \001(\tB\003\340A\003H\000B\021\n" - + "\017output_location\032-\n\013LabelsEntry\022\013\n\003key\030\001" - + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001\n,aiplatfo" - + "rm.googleapis.com/BatchPredictionJob\022Rpr" - + "ojects/{project}/locations/{location}/ba" - + "tchPredictionJobs/{batch_prediction_job}" - + "B\213\001\n#com.google.cloud.aiplatform.v1beta1" - + "B\027BatchPredictionJobProtoP\001ZIgoogle.gola" - + "ng.org/genproto/googleapis/cloud/aiplatf" - + "orm/v1beta1;aiplatformb\006proto3" + + "edictionJob.InputConfigB\003\340A\002\0220\n\020model_pa" + + "rameters\030\005 \001(\0132\026.google.protobuf.Value\022\\" + + "\n\routput_config\030\006 \001(\0132@.google.cloud.aip" + + "latform.v1beta1.BatchPredictionJob.Outpu" + + "tConfigB\003\340A\002\022U\n\023dedicated_resources\030\007 \001(" + + "\01328.google.cloud.aiplatform.v1beta1.Batc" + + "hDedicatedResources\022i\n\036manual_batch_tuni" + + "ng_parameters\030\010 \001(\0132<.google.cloud.aipla" + + "tform.v1beta1.ManualBatchTuningParameter" + + "sB\003\340A\005\022\034\n\024generate_explanation\030\027 \001(\010\022J\n\020" + + "explanation_spec\030\031 \001(\01320.google.cloud.ai" + + "platform.v1beta1.ExplanationSpec\022X\n\013outp" + + "ut_info\030\t \001(\0132>.google.cloud.aiplatform." + + "v1beta1.BatchPredictionJob.OutputInfoB\003\340" + + "A\003\022=\n\005state\030\n \001(\0162).google.cloud.aiplatf" + + "orm.v1beta1.JobStateB\003\340A\003\022&\n\005error\030\013 \001(\013" + + "2\022.google.rpc.StatusB\003\340A\003\0221\n\020partial_fai" + + "lures\030\014 \003(\0132\022.google.rpc.StatusB\003\340A\003\022S\n\022" + + "resources_consumed\030\r \001(\01322.google.cloud." + + "aiplatform.v1beta1.ResourcesConsumedB\003\340A" + + "\003\022O\n\020completion_stats\030\016 \001(\01320.google.clo" + + "ud.aiplatform.v1beta1.CompletionStatsB\003\340" + + "A\003\0224\n\013create_time\030\017 \001(\0132\032.google.protobu" + + "f.TimestampB\003\340A\003\0223\n\nstart_time\030\020 \001(\0132\032.g" + + "oogle.protobuf.TimestampB\003\340A\003\0221\n\010end_tim" + + "e\030\021 \001(\0132\032.google.protobuf.TimestampB\003\340A\003" + + "\0224\n\013update_time\030\022 \001(\0132\032.google.protobuf." + + "TimestampB\003\340A\003\022O\n\006labels\030\023 \003(\0132?.google." + + "cloud.aiplatform.v1beta1.BatchPrediction" + + "Job.LabelsEntry\022H\n\017encryption_spec\030\030 \001(\013" + + "2/.google.cloud.aiplatform.v1beta1.Encry" + + "ptionSpec\032\304\001\n\013InputConfig\022@\n\ngcs_source\030" + + "\002 \001(\0132*.google.cloud.aiplatform.v1beta1." + + "GcsSourceH\000\022J\n\017bigquery_source\030\003 \001(\0132/.g" + + "oogle.cloud.aiplatform.v1beta1.BigQueryS" + + "ourceH\000\022\035\n\020instances_format\030\001 \001(\tB\003\340A\002B\010" + + "\n\006source\032\340\001\n\014OutputConfig\022J\n\017gcs_destina" + + "tion\030\002 \001(\0132/.google.cloud.aiplatform.v1b" + + "eta1.GcsDestinationH\000\022T\n\024bigquery_destin" + + "ation\030\003 \001(\01324.google.cloud.aiplatform.v1" + + "beta1.BigQueryDestinationH\000\022\037\n\022predictio" + + "ns_format\030\001 \001(\tB\003\340A\002B\r\n\013destination\032l\n\nO" + + "utputInfo\022#\n\024gcs_output_directory\030\001 \001(\tB" + + "\003\340A\003H\000\022&\n\027bigquery_output_dataset\030\002 \001(\tB" + + "\003\340A\003H\000B\021\n\017output_location\032-\n\013LabelsEntry" + + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001\n" + + ",aiplatform.googleapis.com/BatchPredicti" + + "onJob\022Rprojects/{project}/locations/{loc" + + "ation}/batchPredictionJobs/{batch_predic" + + "tion_job}B\213\001\n#com.google.cloud.aiplatfor" + + "m.v1beta1B\027BatchPredictionJobProtoP\001ZIgo" + + "ogle.golang.org/genproto/googleapis/clou" + + "d/aiplatform/v1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -135,6 +138,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.CompletionStatsProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(), @@ -172,6 +176,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "EndTime", "UpdateTime", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor = internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor @@ -223,6 +228,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.CompletionStatsProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java index 21da5207e..7f47accd5 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java @@ -117,9 +117,13 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Required. BigQuery URI to a project, up to 2000 characters long.
+   * Required. BigQuery URI to a project or table, up to 2000 characters long.
+   * When only project is specified, Dataset and Table is created.
+   * When full table reference is specified, Dataset must exist and table must
+   * not exist.
    * Accepted forms:
-   * *  BigQuery path. For example: `bq://projectId`.
+   * *  BigQuery path. For example:
+   * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
    * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -142,9 +146,13 @@ public java.lang.String getOutputUri() { * * *
-   * Required. BigQuery URI to a project, up to 2000 characters long.
+   * Required. BigQuery URI to a project or table, up to 2000 characters long.
+   * When only project is specified, Dataset and Table is created.
+   * When full table reference is specified, Dataset must exist and table must
+   * not exist.
    * Accepted forms:
-   * *  BigQuery path. For example: `bq://projectId`.
+   * *  BigQuery path. For example:
+   * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
    * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -488,9 +496,13 @@ public Builder mergeFrom( * * *
-     * Required. BigQuery URI to a project, up to 2000 characters long.
+     * Required. BigQuery URI to a project or table, up to 2000 characters long.
+     * When only project is specified, Dataset and Table is created.
+     * When full table reference is specified, Dataset must exist and table must
+     * not exist.
      * Accepted forms:
-     * *  BigQuery path. For example: `bq://projectId`.
+     * *  BigQuery path. For example:
+     * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
      * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -512,9 +524,13 @@ public java.lang.String getOutputUri() { * * *
-     * Required. BigQuery URI to a project, up to 2000 characters long.
+     * Required. BigQuery URI to a project or table, up to 2000 characters long.
+     * When only project is specified, Dataset and Table is created.
+     * When full table reference is specified, Dataset must exist and table must
+     * not exist.
      * Accepted forms:
-     * *  BigQuery path. For example: `bq://projectId`.
+     * *  BigQuery path. For example:
+     * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
      * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -536,9 +552,13 @@ public com.google.protobuf.ByteString getOutputUriBytes() { * * *
-     * Required. BigQuery URI to a project, up to 2000 characters long.
+     * Required. BigQuery URI to a project or table, up to 2000 characters long.
+     * When only project is specified, Dataset and Table is created.
+     * When full table reference is specified, Dataset must exist and table must
+     * not exist.
      * Accepted forms:
-     * *  BigQuery path. For example: `bq://projectId`.
+     * *  BigQuery path. For example:
+     * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
      * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -559,9 +579,13 @@ public Builder setOutputUri(java.lang.String value) { * * *
-     * Required. BigQuery URI to a project, up to 2000 characters long.
+     * Required. BigQuery URI to a project or table, up to 2000 characters long.
+     * When only project is specified, Dataset and Table is created.
+     * When full table reference is specified, Dataset must exist and table must
+     * not exist.
      * Accepted forms:
-     * *  BigQuery path. For example: `bq://projectId`.
+     * *  BigQuery path. For example:
+     * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
      * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -578,9 +602,13 @@ public Builder clearOutputUri() { * * *
-     * Required. BigQuery URI to a project, up to 2000 characters long.
+     * Required. BigQuery URI to a project or table, up to 2000 characters long.
+     * When only project is specified, Dataset and Table is created.
+     * When full table reference is specified, Dataset must exist and table must
+     * not exist.
      * Accepted forms:
-     * *  BigQuery path. For example: `bq://projectId`.
+     * *  BigQuery path. For example:
+     * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
      * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java index aff64073c..0ff99b258 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java @@ -27,9 +27,13 @@ public interface BigQueryDestinationOrBuilder * * *
-   * Required. BigQuery URI to a project, up to 2000 characters long.
+   * Required. BigQuery URI to a project or table, up to 2000 characters long.
+   * When only project is specified, Dataset and Table is created.
+   * When full table reference is specified, Dataset must exist and table must
+   * not exist.
    * Accepted forms:
-   * *  BigQuery path. For example: `bq://projectId`.
+   * *  BigQuery path. For example:
+   * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
    * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; @@ -41,9 +45,13 @@ public interface BigQueryDestinationOrBuilder * * *
-   * Required. BigQuery URI to a project, up to 2000 characters long.
+   * Required. BigQuery URI to a project or table, up to 2000 characters long.
+   * When only project is specified, Dataset and Table is created.
+   * When full table reference is specified, Dataset must exist and table must
+   * not exist.
    * Accepted forms:
-   * *  BigQuery path. For example: `bq://projectId`.
+   * *  BigQuery path. For example:
+   * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
    * 
* * string output_uri = 1 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java index 469374d81..058277cd5 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java @@ -199,6 +199,23 @@ private CustomJob( input.readMessage( LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); labels_.getMutableMap().put(labels__.getKey(), labels__.getValue()); + break; + } + case 98: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } default: @@ -807,6 +824,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) { return map.get(key); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 12; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -850,6 +921,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 11); + if (encryptionSpec_ != null) { + output.writeMessage(12, getEncryptionSpec()); + } unknownFields.writeTo(output); } @@ -896,6 +970,9 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, labels__); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, getEncryptionSpec()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -940,6 +1017,10 @@ public boolean equals(final java.lang.Object obj) { if (!getError().equals(other.getError())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -985,6 +1066,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1196,6 +1281,12 @@ public Builder clear() { errorBuilder_ = null; } internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -1259,6 +1350,11 @@ public com.google.cloud.aiplatform.v1beta1.CustomJob buildPartial() { } result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -1338,6 +1434,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.CustomJob other) { mergeError(other.getError()); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3059,6 +3158,211 @@ public Builder putAllLabels(java.util.Map va return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a CustomJob. If this is set,
+     * then all resources created by the CustomJob will be encrypted with the
+     * provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java index 23e21ff9d..516d9aa0a 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java @@ -416,4 +416,45 @@ public interface CustomJobOrBuilder * map<string, string> labels = 11; */ java.lang.String getLabelsOrThrow(java.lang.String key); + + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a CustomJob. If this is set,
+   * then all resources created by the CustomJob will be encrypted with the
+   * provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java index 0261623f3..78f43fce9 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java @@ -67,58 +67,62 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "\n0google/cloud/aiplatform/v1beta1/custom" + "_job.proto\022\037google.cloud.aiplatform.v1be" + "ta1\032\037google/api/field_behavior.proto\032\031go" - + "ogle/api/resource.proto\032-google/cloud/ai" - + "platform/v1beta1/env_var.proto\032(google/c" - + "loud/aiplatform/v1beta1/io.proto\032/google" - + "/cloud/aiplatform/v1beta1/job_state.prot" - + "o\0327google/cloud/aiplatform/v1beta1/machi" - + "ne_resources.proto\032\036google/protobuf/dura" - + "tion.proto\032\037google/protobuf/timestamp.pr" - + "oto\032\027google/rpc/status.proto\032\034google/api" - + "/annotations.proto\"\235\005\n\tCustomJob\022\021\n\004name" - + "\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022E" - + "\n\010job_spec\030\004 \001(\0132..google.cloud.aiplatfo" - + "rm.v1beta1.CustomJobSpecB\003\340A\002\022=\n\005state\030\005" - + " \001(\0162).google.cloud.aiplatform.v1beta1.J" - + "obStateB\003\340A\003\0224\n\013create_time\030\006 \001(\0132\032.goog" - + "le.protobuf.TimestampB\003\340A\003\0223\n\nstart_time" - + "\030\007 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022" - + "1\n\010end_time\030\010 \001(\0132\032.google.protobuf.Time" - + "stampB\003\340A\003\0224\n\013update_time\030\t \001(\0132\032.google" - + ".protobuf.TimestampB\003\340A\003\022&\n\005error\030\n \001(\0132" - + "\022.google.rpc.StatusB\003\340A\003\022F\n\006labels\030\013 \003(\013" - + "26.google.cloud.aiplatform.v1beta1.Custo" - + "mJob.LabelsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001" - + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:i\352Af\n#aiplatform" - + ".googleapis.com/CustomJob\022?projects/{pro" - + "ject}/locations/{location}/customJobs/{c" - + "ustom_job}\"\233\002\n\rCustomJobSpec\022O\n\021worker_p" - + "ool_specs\030\001 \003(\0132/.google.cloud.aiplatfor" - + "m.v1beta1.WorkerPoolSpecB\003\340A\002\022?\n\nschedul" - + "ing\030\003 \001(\0132+.google.cloud.aiplatform.v1be" - + "ta1.Scheduling\022\027\n\017service_account\030\004 \001(\t\022" - + "\017\n\007network\030\005 \001(\t\022N\n\025base_output_director" - + "y\030\006 \001(\0132/.google.cloud.aiplatform.v1beta" - + "1.GcsDestination\"\333\002\n\016WorkerPoolSpec\022H\n\016c" - + "ontainer_spec\030\006 \001(\0132..google.cloud.aipla" - + "tform.v1beta1.ContainerSpecH\000\022Q\n\023python_" - + "package_spec\030\007 \001(\01322.google.cloud.aiplat" - + "form.v1beta1.PythonPackageSpecH\000\022J\n\014mach" - + "ine_spec\030\001 \001(\0132,.google.cloud.aiplatform" - + ".v1beta1.MachineSpecB\006\340A\002\340A\005\022\032\n\rreplica_" - + "count\030\002 \001(\003B\003\340A\002\022<\n\tdisk_spec\030\005 \001(\0132).go" - + "ogle.cloud.aiplatform.v1beta1.DiskSpecB\006" - + "\n\004task\"F\n\rContainerSpec\022\026\n\timage_uri\030\001 \001" - + "(\tB\003\340A\002\022\017\n\007command\030\002 \003(\t\022\014\n\004args\030\003 \003(\t\"y" - + "\n\021PythonPackageSpec\022\037\n\022executor_image_ur" - + "i\030\001 \001(\tB\003\340A\002\022\031\n\014package_uris\030\002 \003(\tB\003\340A\002\022" - + "\032\n\rpython_module\030\003 \001(\tB\003\340A\002\022\014\n\004args\030\004 \003(" - + "\t\"_\n\nScheduling\022*\n\007timeout\030\001 \001(\0132\031.googl" - + "e.protobuf.Duration\022%\n\035restart_job_on_wo" - + "rker_restart\030\003 \001(\010B\202\001\n#com.google.cloud." - + "aiplatform.v1beta1B\016CustomJobProtoP\001ZIgo" - + "ogle.golang.org/genproto/googleapis/clou" - + "d/aiplatform/v1beta1;aiplatformb\006proto3" + + "ogle/api/resource.proto\0325google/cloud/ai" + + "platform/v1beta1/encryption_spec.proto\032-" + + "google/cloud/aiplatform/v1beta1/env_var." + + "proto\032(google/cloud/aiplatform/v1beta1/i" + + "o.proto\032/google/cloud/aiplatform/v1beta1" + + "/job_state.proto\0327google/cloud/aiplatfor" + + "m/v1beta1/machine_resources.proto\032\036googl" + + "e/protobuf/duration.proto\032\037google/protob" + + "uf/timestamp.proto\032\027google/rpc/status.pr" + + "oto\032\034google/api/annotations.proto\"\347\005\n\tCu" + + "stomJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_na" + + "me\030\002 \001(\tB\003\340A\002\022E\n\010job_spec\030\004 \001(\0132..google" + + ".cloud.aiplatform.v1beta1.CustomJobSpecB" + + "\003\340A\002\022=\n\005state\030\005 \001(\0162).google.cloud.aipla" + + "tform.v1beta1.JobStateB\003\340A\003\0224\n\013create_ti" + + "me\030\006 \001(\0132\032.google.protobuf.TimestampB\003\340A" + + "\003\0223\n\nstart_time\030\007 \001(\0132\032.google.protobuf." + + "TimestampB\003\340A\003\0221\n\010end_time\030\010 \001(\0132\032.googl" + + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time" + + "\030\t \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022" + + "&\n\005error\030\n \001(\0132\022.google.rpc.StatusB\003\340A\003\022" + + "F\n\006labels\030\013 \003(\01326.google.cloud.aiplatfor" + + "m.v1beta1.CustomJob.LabelsEntry\022H\n\017encry" + + "ption_spec\030\014 \001(\0132/.google.cloud.aiplatfo" + + "rm.v1beta1.EncryptionSpec\032-\n\013LabelsEntry" + + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:i\352Af\n#a" + + "iplatform.googleapis.com/CustomJob\022?proj" + + "ects/{project}/locations/{location}/cust" + + "omJobs/{custom_job}\"\233\002\n\rCustomJobSpec\022O\n" + + "\021worker_pool_specs\030\001 \003(\0132/.google.cloud." + + "aiplatform.v1beta1.WorkerPoolSpecB\003\340A\002\022?" + + "\n\nscheduling\030\003 \001(\0132+.google.cloud.aiplat" + + "form.v1beta1.Scheduling\022\027\n\017service_accou" + + "nt\030\004 \001(\t\022\017\n\007network\030\005 \001(\t\022N\n\025base_output" + + "_directory\030\006 \001(\0132/.google.cloud.aiplatfo" + + "rm.v1beta1.GcsDestination\"\333\002\n\016WorkerPool" + + "Spec\022H\n\016container_spec\030\006 \001(\0132..google.cl" + + "oud.aiplatform.v1beta1.ContainerSpecH\000\022Q" + + "\n\023python_package_spec\030\007 \001(\01322.google.clo" + + "ud.aiplatform.v1beta1.PythonPackageSpecH" + + "\000\022J\n\014machine_spec\030\001 \001(\0132,.google.cloud.a" + + "iplatform.v1beta1.MachineSpecB\006\340A\001\340A\005\022\032\n" + + "\rreplica_count\030\002 \001(\003B\003\340A\001\022<\n\tdisk_spec\030\005" + + " \001(\0132).google.cloud.aiplatform.v1beta1.D" + + "iskSpecB\006\n\004task\"F\n\rContainerSpec\022\026\n\timag" + + "e_uri\030\001 \001(\tB\003\340A\002\022\017\n\007command\030\002 \003(\t\022\014\n\004arg" + + "s\030\003 \003(\t\"y\n\021PythonPackageSpec\022\037\n\022executor" + + "_image_uri\030\001 \001(\tB\003\340A\002\022\031\n\014package_uris\030\002 " + + "\003(\tB\003\340A\002\022\032\n\rpython_module\030\003 \001(\tB\003\340A\002\022\014\n\004" + + "args\030\004 \003(\t\"_\n\nScheduling\022*\n\007timeout\030\001 \001(" + + "\0132\031.google.protobuf.Duration\022%\n\035restart_" + + "job_on_worker_restart\030\003 \001(\010B\202\001\n#com.goog" + + "le.cloud.aiplatform.v1beta1B\016CustomJobPr" + + "otoP\001ZIgoogle.golang.org/genproto/google" + + "apis/cloud/aiplatform/v1beta1;aiplatform" + + "b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -126,6 +130,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(), @@ -151,6 +156,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "UpdateTime", "Error", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_CustomJob_LabelsEntry_descriptor = internal_static_google_cloud_aiplatform_v1beta1_CustomJob_descriptor @@ -215,6 +221,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java index 137e2116c..b0c020694 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java @@ -307,6 +307,8 @@ public com.google.cloud.aiplatform.v1beta1.SchedulingOrBuilder getSchedulingOrBu *
    * Specifies the service account for workload run-as account.
    * Users submitting jobs must have act-as permission on this run-as account.
+   * If unspecified, the AI Platform Custom Code Service Agent for the
+   * CustomJob's project is used.
    * 
* * string service_account = 4; @@ -331,6 +333,8 @@ public java.lang.String getServiceAccount() { *
    * Specifies the service account for workload run-as account.
    * Users submitting jobs must have act-as permission on this run-as account.
+   * If unspecified, the AI Platform Custom Code Service Agent for the
+   * CustomJob's project is used.
    * 
* * string service_account = 4; @@ -358,11 +362,10 @@ public com.google.protobuf.ByteString getServiceAccountBytes() { *
    * The full name of the Compute Engine
    * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-   * should be peered. For example, projects/12345/global/networks/myVPC.
-   * [Format](https:
-   * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-   * is of the form projects/{project}/global/networks/{network}.
-   * Where {project} is a project number, as in '12345', and {network} is
+   * should be peered. For example, `projects/12345/global/networks/myVPC`.
+   * [Format](/compute/docs/reference/rest/v1/networks/insert)
+   * is of the form `projects/{project}/global/networks/{network}`.
+   * Where {project} is a project number, as in `12345`, and {network} is a
    * network name.
    * Private services access must already be configured for the network. If left
    * unspecified, the job is not peered with any network.
@@ -390,11 +393,10 @@ public java.lang.String getNetwork() {
    * 
    * The full name of the Compute Engine
    * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-   * should be peered. For example, projects/12345/global/networks/myVPC.
-   * [Format](https:
-   * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-   * is of the form projects/{project}/global/networks/{network}.
-   * Where {project} is a project number, as in '12345', and {network} is
+   * should be peered. For example, `projects/12345/global/networks/myVPC`.
+   * [Format](/compute/docs/reference/rest/v1/networks/insert)
+   * is of the form `projects/{project}/global/networks/{network}`.
+   * Where {project} is a project number, as in `12345`, and {network} is a
    * network name.
    * Private services access must already be configured for the network. If left
    * unspecified, the job is not peered with any network.
@@ -423,14 +425,14 @@ public com.google.protobuf.ByteString getNetworkBytes() {
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -453,14 +455,14 @@ public boolean hasBaseOutputDirectory() {
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -485,14 +487,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getBaseOutputDirectory
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1550,6 +1552,8 @@ public com.google.cloud.aiplatform.v1beta1.SchedulingOrBuilder getSchedulingOrBu
      * 
      * Specifies the service account for workload run-as account.
      * Users submitting jobs must have act-as permission on this run-as account.
+     * If unspecified, the AI Platform Custom Code Service Agent for the
+     * CustomJob's project is used.
      * 
* * string service_account = 4; @@ -1573,6 +1577,8 @@ public java.lang.String getServiceAccount() { *
      * Specifies the service account for workload run-as account.
      * Users submitting jobs must have act-as permission on this run-as account.
+     * If unspecified, the AI Platform Custom Code Service Agent for the
+     * CustomJob's project is used.
      * 
* * string service_account = 4; @@ -1596,6 +1602,8 @@ public com.google.protobuf.ByteString getServiceAccountBytes() { *
      * Specifies the service account for workload run-as account.
      * Users submitting jobs must have act-as permission on this run-as account.
+     * If unspecified, the AI Platform Custom Code Service Agent for the
+     * CustomJob's project is used.
      * 
* * string service_account = 4; @@ -1618,6 +1626,8 @@ public Builder setServiceAccount(java.lang.String value) { *
      * Specifies the service account for workload run-as account.
      * Users submitting jobs must have act-as permission on this run-as account.
+     * If unspecified, the AI Platform Custom Code Service Agent for the
+     * CustomJob's project is used.
      * 
* * string service_account = 4; @@ -1636,6 +1646,8 @@ public Builder clearServiceAccount() { *
      * Specifies the service account for workload run-as account.
      * Users submitting jobs must have act-as permission on this run-as account.
+     * If unspecified, the AI Platform Custom Code Service Agent for the
+     * CustomJob's project is used.
      * 
* * string service_account = 4; @@ -1661,11 +1673,10 @@ public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) { *
      * The full name of the Compute Engine
      * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-     * should be peered. For example, projects/12345/global/networks/myVPC.
-     * [Format](https:
-     * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-     * is of the form projects/{project}/global/networks/{network}.
-     * Where {project} is a project number, as in '12345', and {network} is
+     * should be peered. For example, `projects/12345/global/networks/myVPC`.
+     * [Format](/compute/docs/reference/rest/v1/networks/insert)
+     * is of the form `projects/{project}/global/networks/{network}`.
+     * Where {project} is a project number, as in `12345`, and {network} is a
      * network name.
      * Private services access must already be configured for the network. If left
      * unspecified, the job is not peered with any network.
@@ -1692,11 +1703,10 @@ public java.lang.String getNetwork() {
      * 
      * The full name of the Compute Engine
      * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-     * should be peered. For example, projects/12345/global/networks/myVPC.
-     * [Format](https:
-     * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-     * is of the form projects/{project}/global/networks/{network}.
-     * Where {project} is a project number, as in '12345', and {network} is
+     * should be peered. For example, `projects/12345/global/networks/myVPC`.
+     * [Format](/compute/docs/reference/rest/v1/networks/insert)
+     * is of the form `projects/{project}/global/networks/{network}`.
+     * Where {project} is a project number, as in `12345`, and {network} is a
      * network name.
      * Private services access must already be configured for the network. If left
      * unspecified, the job is not peered with any network.
@@ -1723,11 +1733,10 @@ public com.google.protobuf.ByteString getNetworkBytes() {
      * 
      * The full name of the Compute Engine
      * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-     * should be peered. For example, projects/12345/global/networks/myVPC.
-     * [Format](https:
-     * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-     * is of the form projects/{project}/global/networks/{network}.
-     * Where {project} is a project number, as in '12345', and {network} is
+     * should be peered. For example, `projects/12345/global/networks/myVPC`.
+     * [Format](/compute/docs/reference/rest/v1/networks/insert)
+     * is of the form `projects/{project}/global/networks/{network}`.
+     * Where {project} is a project number, as in `12345`, and {network} is a
      * network name.
      * Private services access must already be configured for the network. If left
      * unspecified, the job is not peered with any network.
@@ -1753,11 +1762,10 @@ public Builder setNetwork(java.lang.String value) {
      * 
      * The full name of the Compute Engine
      * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-     * should be peered. For example, projects/12345/global/networks/myVPC.
-     * [Format](https:
-     * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-     * is of the form projects/{project}/global/networks/{network}.
-     * Where {project} is a project number, as in '12345', and {network} is
+     * should be peered. For example, `projects/12345/global/networks/myVPC`.
+     * [Format](/compute/docs/reference/rest/v1/networks/insert)
+     * is of the form `projects/{project}/global/networks/{network}`.
+     * Where {project} is a project number, as in `12345`, and {network} is a
      * network name.
      * Private services access must already be configured for the network. If left
      * unspecified, the job is not peered with any network.
@@ -1779,11 +1787,10 @@ public Builder clearNetwork() {
      * 
      * The full name of the Compute Engine
      * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-     * should be peered. For example, projects/12345/global/networks/myVPC.
-     * [Format](https:
-     * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-     * is of the form projects/{project}/global/networks/{network}.
-     * Where {project} is a project number, as in '12345', and {network} is
+     * should be peered. For example, `projects/12345/global/networks/myVPC`.
+     * [Format](/compute/docs/reference/rest/v1/networks/insert)
+     * is of the form `projects/{project}/global/networks/{network}`.
+     * Where {project} is a project number, as in `12345`, and {network} is a
      * network name.
      * Private services access must already be configured for the network. If left
      * unspecified, the job is not peered with any network.
@@ -1815,14 +1822,14 @@ public Builder setNetworkBytes(com.google.protobuf.ByteString value) {
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1844,14 +1851,14 @@ public boolean hasBaseOutputDirectory() {
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1879,14 +1886,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getBaseOutputDirectory
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1917,14 +1924,14 @@ public Builder setBaseOutputDirectory(
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1952,14 +1959,14 @@ public Builder setBaseOutputDirectory(
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1994,14 +2001,14 @@ public Builder mergeBaseOutputDirectory(
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2029,14 +2036,14 @@ public Builder clearBaseOutputDirectory() {
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2059,14 +2066,14 @@ public Builder clearBaseOutputDirectory() {
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2093,14 +2100,14 @@ public Builder clearBaseOutputDirectory() {
      *
      *
      * 
-     * The Google Cloud Storage location to store the output of this CustomJob or
+     * The Cloud Storage location to store the output of this CustomJob or
      * HyperparameterTuningJob. For HyperparameterTuningJob,
-     * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+     * the baseOutputDirectory of
      * each child CustomJob backing a Trial is set to a subdirectory of name
-     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-     * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-     * Following AI Platform environment variables will be passed to containers or
-     * python modules when this field is set:
+     * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+     * baseOutputDirectory.
+     * The following AI Platform environment variables will be passed to
+     * containers or python modules when this field is set:
      *   For CustomJob:
      *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
      *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
index a6c089ef2..483100384 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
@@ -127,6 +127,8 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
    * 
    * Specifies the service account for workload run-as account.
    * Users submitting jobs must have act-as permission on this run-as account.
+   * If unspecified, the AI Platform Custom Code Service Agent for the
+   * CustomJob's project is used.
    * 
* * string service_account = 4; @@ -140,6 +142,8 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr *
    * Specifies the service account for workload run-as account.
    * Users submitting jobs must have act-as permission on this run-as account.
+   * If unspecified, the AI Platform Custom Code Service Agent for the
+   * CustomJob's project is used.
    * 
* * string service_account = 4; @@ -154,11 +158,10 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr *
    * The full name of the Compute Engine
    * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-   * should be peered. For example, projects/12345/global/networks/myVPC.
-   * [Format](https:
-   * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-   * is of the form projects/{project}/global/networks/{network}.
-   * Where {project} is a project number, as in '12345', and {network} is
+   * should be peered. For example, `projects/12345/global/networks/myVPC`.
+   * [Format](/compute/docs/reference/rest/v1/networks/insert)
+   * is of the form `projects/{project}/global/networks/{network}`.
+   * Where {project} is a project number, as in `12345`, and {network} is a
    * network name.
    * Private services access must already be configured for the network. If left
    * unspecified, the job is not peered with any network.
@@ -175,11 +178,10 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
    * 
    * The full name of the Compute Engine
    * [network](/compute/docs/networks-and-firewalls#networks) to which the Job
-   * should be peered. For example, projects/12345/global/networks/myVPC.
-   * [Format](https:
-   * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
-   * is of the form projects/{project}/global/networks/{network}.
-   * Where {project} is a project number, as in '12345', and {network} is
+   * should be peered. For example, `projects/12345/global/networks/myVPC`.
+   * [Format](/compute/docs/reference/rest/v1/networks/insert)
+   * is of the form `projects/{project}/global/networks/{network}`.
+   * Where {project} is a project number, as in `12345`, and {network} is a
    * network name.
    * Private services access must already be configured for the network. If left
    * unspecified, the job is not peered with any network.
@@ -195,14 +197,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -222,14 +224,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -249,14 +251,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
    *
    *
    * 
-   * The Google Cloud Storage location to store the output of this CustomJob or
+   * The Cloud Storage location to store the output of this CustomJob or
    * HyperparameterTuningJob. For HyperparameterTuningJob,
-   * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+   * the baseOutputDirectory of
    * each child CustomJob backing a Trial is set to a subdirectory of name
-   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
-   * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
-   * Following AI Platform environment variables will be passed to containers or
-   * python modules when this field is set:
+   * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+   * baseOutputDirectory.
+   * The following AI Platform environment variables will be passed to
+   * containers or python modules when this field is set:
    *   For CustomJob:
    *   * AIP_MODEL_DIR = `<base_output_directory>/model/`
    *   * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
index 6e0fe044c..2df6ab022 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
@@ -229,6 +229,23 @@ private DataLabelingJob(
                 mutable_bitField0_ |= 0x00000008;
               }
               specialistPools_.add(s);
+              break;
+            }
+          case 162:
+            {
+              com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+              if (encryptionSpec_ != null) {
+                subBuilder = encryptionSpec_.toBuilder();
+              }
+              encryptionSpec_ =
+                  input.readMessage(
+                      com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+                      extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(encryptionSpec_);
+                encryptionSpec_ = subBuilder.buildPartial();
+              }
+
               break;
             }
           case 170:
@@ -1248,13 +1265,70 @@ public com.google.protobuf.ByteString getSpecialistPoolsBytes(int index) {
     return specialistPools_.getByteString(index);
   }
 
+  public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 20;
+  private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+  /**
+   *
+   *
+   * 
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + public static final int ACTIVE_LEARNING_CONFIG_FIELD_NUMBER = 21; private com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig activeLearningConfig_; /** * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
@@ -1271,7 +1345,7 @@ public boolean hasActiveLearningConfig() { * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
@@ -1290,7 +1364,7 @@ public com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig getActiveLearnin * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
@@ -1360,6 +1434,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < specialistPools_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 16, specialistPools_.getRaw(i)); } + if (encryptionSpec_ != null) { + output.writeMessage(20, getEncryptionSpec()); + } if (activeLearningConfig_ != null) { output.writeMessage(21, getActiveLearningConfig()); } @@ -1444,6 +1521,9 @@ public int getSerializedSize() { size += dataSize; size += 2 * getSpecialistPoolsList().size(); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(20, getEncryptionSpec()); + } if (activeLearningConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(21, getActiveLearningConfig()); @@ -1498,6 +1578,10 @@ public boolean equals(final java.lang.Object obj) { } if (!internalGetLabels().equals(other.internalGetLabels())) return false; if (!getSpecialistPoolsList().equals(other.getSpecialistPoolsList())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (hasActiveLearningConfig() != other.hasActiveLearningConfig()) return false; if (hasActiveLearningConfig()) { if (!getActiveLearningConfig().equals(other.getActiveLearningConfig())) return false; @@ -1563,6 +1647,10 @@ public int hashCode() { hash = (37 * hash) + SPECIALIST_POOLS_FIELD_NUMBER; hash = (53 * hash) + getSpecialistPoolsList().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } if (hasActiveLearningConfig()) { hash = (37 * hash) + ACTIVE_LEARNING_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getActiveLearningConfig().hashCode(); @@ -1787,6 +1875,12 @@ public Builder clear() { internalGetMutableLabels().clear(); specialistPools_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } if (activeLearningConfigBuilder_ == null) { activeLearningConfig_ = null; } else { @@ -1867,6 +1961,11 @@ public com.google.cloud.aiplatform.v1beta1.DataLabelingJob buildPartial() { bitField0_ = (bitField0_ & ~0x00000008); } result.specialistPools_ = specialistPools_; + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } if (activeLearningConfigBuilder_ == null) { result.activeLearningConfig_ = activeLearningConfig_; } else { @@ -1984,6 +2083,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.DataLabelingJob oth } onChanged(); } + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } if (other.hasActiveLearningConfig()) { mergeActiveLearningConfig(other.getActiveLearningConfig()); } @@ -4450,6 +4552,220 @@ public Builder addSpecialistPoolsBytes(com.google.protobuf.ByteString value) { return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+     * DataLabelingJob will be secured by this key.
+     * Note: Annotations created in the DataLabelingJob are associated with
+     * the EncryptionSpec of the Dataset they are exported to.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + private com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig activeLearningConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig, @@ -4460,7 +4776,7 @@ public Builder addSpecialistPoolsBytes(com.google.protobuf.ByteString value) { * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4477,7 +4793,7 @@ public boolean hasActiveLearningConfig() { * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4500,7 +4816,7 @@ public com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig getActiveLearnin * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4526,7 +4842,7 @@ public Builder setActiveLearningConfig( * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4549,7 +4865,7 @@ public Builder setActiveLearningConfig( * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4580,7 +4896,7 @@ public Builder mergeActiveLearningConfig( * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4603,7 +4919,7 @@ public Builder clearActiveLearningConfig() { * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4621,7 +4937,7 @@ public Builder clearActiveLearningConfig() { * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
@@ -4643,7 +4959,7 @@ public Builder clearActiveLearningConfig() { * * *
-     * Paramaters that configure active learning pipeline. Active learning will
+     * Parameters that configure active learning pipeline. Active learning will
      * label the data incrementally via several iterations. For every iteration,
      * it will select a batch of data based on the sampling strategy.
      * 
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java index d14e83ae7..5e4ffea31 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java @@ -692,7 +692,51 @@ java.lang.String getAnnotationLabelsOrDefault( * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+   * DataLabelingJob will be secured by this key.
+   * Note: Annotations created in the DataLabelingJob are associated with
+   * the EncryptionSpec of the Dataset they are exported to.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); + + /** + * + * + *
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
@@ -706,7 +750,7 @@ java.lang.String getAnnotationLabelsOrDefault( * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
@@ -720,7 +764,7 @@ java.lang.String getAnnotationLabelsOrDefault( * * *
-   * Paramaters that configure active learning pipeline. Active learning will
+   * Parameters that configure active learning pipeline. Active learning will
    * label the data incrementally via several iterations. For every iteration,
    * it will select a batch of data based on the sampling strategy.
    * 
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java index ab991290d..3fb48edbb 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java @@ -65,59 +65,62 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "rm.v1beta1\032\037google/api/field_behavior.pr" + "oto\032\031google/api/resource.proto\0326google/c" + "loud/aiplatform/v1beta1/accelerator_type" - + ".proto\032/google/cloud/aiplatform/v1beta1/" - + "job_state.proto\0325google/cloud/aiplatform" - + "/v1beta1/specialist_pool.proto\032\034google/p" - + "rotobuf/struct.proto\032\037google/protobuf/ti" - + "mestamp.proto\032\027google/rpc/status.proto\032\027" - + "google/type/money.proto\032\034google/api/anno" - + "tations.proto\"\256\010\n\017DataLabelingJob\022\021\n\004nam" - + "e\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022" - + ";\n\010datasets\030\003 \003(\tB)\340A\002\372A#\n!aiplatform.go" - + "ogleapis.com/Dataset\022a\n\021annotation_label" - + "s\030\014 \003(\0132F.google.cloud.aiplatform.v1beta" - + "1.DataLabelingJob.AnnotationLabelsEntry\022" - + "\032\n\rlabeler_count\030\004 \001(\005B\003\340A\002\022\034\n\017instructi" - + "on_uri\030\005 \001(\tB\003\340A\002\022\036\n\021inputs_schema_uri\030\006" - + " \001(\tB\003\340A\002\022+\n\006inputs\030\007 \001(\0132\026.google.proto" - + "buf.ValueB\003\340A\002\022=\n\005state\030\010 \001(\0162).google.c" - + "loud.aiplatform.v1beta1.JobStateB\003\340A\003\022\036\n" - + "\021labeling_progress\030\r \001(\005B\003\340A\003\022.\n\rcurrent" - + "_spend\030\016 \001(\0132\022.google.type.MoneyB\003\340A\003\0224\n" - + "\013create_time\030\t \001(\0132\032.google.protobuf.Tim" - + "estampB\003\340A\003\0224\n\013update_time\030\n \001(\0132\032.googl" - + "e.protobuf.TimestampB\003\340A\003\022&\n\005error\030\026 \001(\013" - + "2\022.google.rpc.StatusB\003\340A\003\022L\n\006labels\030\013 \003(" - + "\0132<.google.cloud.aiplatform.v1beta1.Data" - + "LabelingJob.LabelsEntry\022\030\n\020specialist_po" - + "ols\030\020 \003(\t\022U\n\026active_learning_config\030\025 \001(" - + "\01325.google.cloud.aiplatform.v1beta1.Acti" - + "veLearningConfig\0327\n\025AnnotationLabelsEntr" - + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032-\n\013Lab" - + "elsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001" - + ":|\352Ay\n)aiplatform.googleapis.com/DataLab" - + "elingJob\022Lprojects/{project}/locations/{" - + "location}/dataLabelingJobs/{data_labelin" - + "g_job}\"\202\002\n\024ActiveLearningConfig\022\035\n\023max_d" - + "ata_item_count\030\001 \001(\003H\000\022\"\n\030max_data_item_" - + "percentage\030\002 \001(\005H\000\022D\n\rsample_config\030\003 \001(" - + "\0132-.google.cloud.aiplatform.v1beta1.Samp" - + "leConfig\022H\n\017training_config\030\004 \001(\0132/.goog" - + "le.cloud.aiplatform.v1beta1.TrainingConf" - + "igB\027\n\025human_labeling_budget\"\275\002\n\014SampleCo" - + "nfig\022)\n\037initial_batch_sample_percentage\030" - + "\001 \001(\005H\000\022+\n!following_batch_sample_percen" - + "tage\030\003 \001(\005H\001\022U\n\017sample_strategy\030\005 \001(\0162<." - + "google.cloud.aiplatform.v1beta1.SampleCo" - + "nfig.SampleStrategy\"B\n\016SampleStrategy\022\037\n" - + "\033SAMPLE_STRATEGY_UNSPECIFIED\020\000\022\017\n\013UNCERT" - + "AINTY\020\001B\033\n\031initial_batch_sample_sizeB\035\n\033" - + "following_batch_sample_size\"6\n\016TrainingC" - + "onfig\022$\n\034timeout_training_milli_hours\030\001 " - + "\001(\003B\210\001\n#com.google.cloud.aiplatform.v1be" - + "ta1B\024DataLabelingJobProtoP\001ZIgoogle.gola" - + "ng.org/genproto/googleapis/cloud/aiplatf" - + "orm/v1beta1;aiplatformb\006proto3" + + ".proto\0325google/cloud/aiplatform/v1beta1/" + + "encryption_spec.proto\032/google/cloud/aipl" + + "atform/v1beta1/job_state.proto\0325google/c" + + "loud/aiplatform/v1beta1/specialist_pool." + + "proto\032\034google/protobuf/struct.proto\032\037goo" + + "gle/protobuf/timestamp.proto\032\027google/rpc" + + "/status.proto\032\027google/type/money.proto\032\034" + + "google/api/annotations.proto\"\370\010\n\017DataLab" + + "elingJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_n" + + "ame\030\002 \001(\tB\003\340A\002\022;\n\010datasets\030\003 \003(\tB)\340A\002\372A#" + + "\n!aiplatform.googleapis.com/Dataset\022a\n\021a" + + "nnotation_labels\030\014 \003(\0132F.google.cloud.ai" + + "platform.v1beta1.DataLabelingJob.Annotat" + + "ionLabelsEntry\022\032\n\rlabeler_count\030\004 \001(\005B\003\340" + + "A\002\022\034\n\017instruction_uri\030\005 \001(\tB\003\340A\002\022\036\n\021inpu" + + "ts_schema_uri\030\006 \001(\tB\003\340A\002\022+\n\006inputs\030\007 \001(\013" + + "2\026.google.protobuf.ValueB\003\340A\002\022=\n\005state\030\010" + + " \001(\0162).google.cloud.aiplatform.v1beta1.J" + + "obStateB\003\340A\003\022\036\n\021labeling_progress\030\r \001(\005B" + + "\003\340A\003\022.\n\rcurrent_spend\030\016 \001(\0132\022.google.typ" + + "e.MoneyB\003\340A\003\0224\n\013create_time\030\t \001(\0132\032.goog" + + "le.protobuf.TimestampB\003\340A\003\0224\n\013update_tim" + + "e\030\n \001(\0132\032.google.protobuf.TimestampB\003\340A\003" + + "\022&\n\005error\030\026 \001(\0132\022.google.rpc.StatusB\003\340A\003" + + "\022L\n\006labels\030\013 \003(\0132<.google.cloud.aiplatfo" + + "rm.v1beta1.DataLabelingJob.LabelsEntry\022\030" + + "\n\020specialist_pools\030\020 \003(\t\022H\n\017encryption_s" + + "pec\030\024 \001(\0132/.google.cloud.aiplatform.v1be" + + "ta1.EncryptionSpec\022U\n\026active_learning_co" + + "nfig\030\025 \001(\01325.google.cloud.aiplatform.v1b" + + "eta1.ActiveLearningConfig\0327\n\025AnnotationL" + + "abelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\002" + + "8\001\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" + + "\002 \001(\t:\0028\001:|\352Ay\n)aiplatform.googleapis.co" + + "m/DataLabelingJob\022Lprojects/{project}/lo" + + "cations/{location}/dataLabelingJobs/{dat" + + "a_labeling_job}\"\202\002\n\024ActiveLearningConfig" + + "\022\035\n\023max_data_item_count\030\001 \001(\003H\000\022\"\n\030max_d" + + "ata_item_percentage\030\002 \001(\005H\000\022D\n\rsample_co" + + "nfig\030\003 \001(\0132-.google.cloud.aiplatform.v1b" + + "eta1.SampleConfig\022H\n\017training_config\030\004 \001" + + "(\0132/.google.cloud.aiplatform.v1beta1.Tra" + + "iningConfigB\027\n\025human_labeling_budget\"\275\002\n" + + "\014SampleConfig\022)\n\037initial_batch_sample_pe" + + "rcentage\030\001 \001(\005H\000\022+\n!following_batch_samp" + + "le_percentage\030\003 \001(\005H\001\022U\n\017sample_strategy" + + "\030\005 \001(\0162<.google.cloud.aiplatform.v1beta1" + + ".SampleConfig.SampleStrategy\"B\n\016SampleSt" + + "rategy\022\037\n\033SAMPLE_STRATEGY_UNSPECIFIED\020\000\022" + + "\017\n\013UNCERTAINTY\020\001B\033\n\031initial_batch_sample" + + "_sizeB\035\n\033following_batch_sample_size\"6\n\016" + + "TrainingConfig\022$\n\034timeout_training_milli" + + "_hours\030\001 \001(\003B\210\001\n#com.google.cloud.aiplat" + + "form.v1beta1B\024DataLabelingJobProtoP\001ZIgo" + + "ogle.golang.org/genproto/googleapis/clou" + + "d/aiplatform/v1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -126,6 +129,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.AcceleratorTypeProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.SpecialistPoolProto.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), @@ -156,6 +160,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "Error", "Labels", "SpecialistPools", + "EncryptionSpec", "ActiveLearningConfig", }); internal_static_google_cloud_aiplatform_v1beta1_DataLabelingJob_AnnotationLabelsEntry_descriptor = @@ -220,6 +225,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.AcceleratorTypeProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.SpecialistPoolProto.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java index 01df6018b..839cb7af2 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java @@ -157,6 +157,23 @@ private Dataset( metadata_ = subBuilder.buildPartial(); } + break; + } + case 90: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } default: @@ -696,6 +713,57 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) { return map.get(key); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 11; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -733,6 +801,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (metadata_ != null) { output.writeMessage(8, getMetadata()); } + if (encryptionSpec_ != null) { + output.writeMessage(11, getEncryptionSpec()); + } unknownFields.writeTo(output); } @@ -773,6 +844,9 @@ public int getSerializedSize() { if (metadata_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getMetadata()); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getEncryptionSpec()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -806,6 +880,10 @@ public boolean equals(final java.lang.Object obj) { } if (!getEtag().equals(other.getEtag())) return false; if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -841,6 +919,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1033,6 +1115,12 @@ public Builder clear() { etag_ = ""; internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -1082,6 +1170,11 @@ public com.google.cloud.aiplatform.v1beta1.Dataset buildPartial() { result.etag_ = etag_; result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -1157,6 +1250,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Dataset other) { onChanged(); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2455,6 +2551,202 @@ public Builder putAllLabels(java.util.Map va return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+     * and all sub-resources of this Dataset will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java index f0196cae4..54bee561d 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java @@ -351,4 +351,42 @@ public interface DatasetOrBuilder * map<string, string> labels = 7; */ java.lang.String getLabelsOrThrow(java.lang.String key); + + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+   * and all sub-resources of this Dataset will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java index 8958051f7..bc261f981 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java @@ -59,36 +59,40 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "\n-google/cloud/aiplatform/v1beta1/datase" + "t.proto\022\037google.cloud.aiplatform.v1beta1" + "\032\037google/api/field_behavior.proto\032\031googl" - + "e/api/resource.proto\032(google/cloud/aipla" - + "tform/v1beta1/io.proto\032\034google/protobuf/" - + "struct.proto\032\037google/protobuf/timestamp." - + "proto\032\034google/api/annotations.proto\"\333\003\n\007" - + "Dataset\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_na" - + "me\030\002 \001(\tB\003\340A\002\022 \n\023metadata_schema_uri\030\003 \001" - + "(\tB\003\340A\002\022-\n\010metadata\030\010 \001(\0132\026.google.proto" - + "buf.ValueB\003\340A\002\0224\n\013create_time\030\004 \001(\0132\032.go" - + "ogle.protobuf.TimestampB\003\340A\003\0224\n\013update_t" - + "ime\030\005 \001(\0132\032.google.protobuf.TimestampB\003\340" - + "A\003\022\014\n\004etag\030\006 \001(\t\022D\n\006labels\030\007 \003(\01324.googl" - + "e.cloud.aiplatform.v1beta1.Dataset.Label" - + "sEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" - + "lue\030\002 \001(\t:\0028\001:b\352A_\n!aiplatform.googleapi" - + "s.com/Dataset\022:projects/{project}/locati" - + "ons/{location}/datasets/{dataset}\"\226\002\n\020Im" - + "portDataConfig\022@\n\ngcs_source\030\001 \001(\0132*.goo" - + "gle.cloud.aiplatform.v1beta1.GcsSourceH\000" - + "\022_\n\020data_item_labels\030\002 \003(\0132E.google.clou" - + "d.aiplatform.v1beta1.ImportDataConfig.Da" - + "taItemLabelsEntry\022\036\n\021import_schema_uri\030\004" - + " \001(\tB\003\340A\002\0325\n\023DataItemLabelsEntry\022\013\n\003key\030" - + "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\010\n\006source\"\211\001\n\020E" - + "xportDataConfig\022J\n\017gcs_destination\030\001 \001(\013" - + "2/.google.cloud.aiplatform.v1beta1.GcsDe" - + "stinationH\000\022\032\n\022annotations_filter\030\002 \001(\tB" - + "\r\n\013destinationB\200\001\n#com.google.cloud.aipl" - + "atform.v1beta1B\014DatasetProtoP\001ZIgoogle.g" - + "olang.org/genproto/googleapis/cloud/aipl" - + "atform/v1beta1;aiplatformb\006proto3" + + "e/api/resource.proto\0325google/cloud/aipla" + + "tform/v1beta1/encryption_spec.proto\032(goo" + + "gle/cloud/aiplatform/v1beta1/io.proto\032\034g" + + "oogle/protobuf/struct.proto\032\037google/prot" + + "obuf/timestamp.proto\032\034google/api/annotat" + + "ions.proto\"\245\004\n\007Dataset\022\021\n\004name\030\001 \001(\tB\003\340A" + + "\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022 \n\023metadata" + + "_schema_uri\030\003 \001(\tB\003\340A\002\022-\n\010metadata\030\010 \001(\013" + + "2\026.google.protobuf.ValueB\003\340A\002\0224\n\013create_" + + "time\030\004 \001(\0132\032.google.protobuf.TimestampB\003" + + "\340A\003\0224\n\013update_time\030\005 \001(\0132\032.google.protob" + + "uf.TimestampB\003\340A\003\022\014\n\004etag\030\006 \001(\t\022D\n\006label" + + "s\030\007 \003(\01324.google.cloud.aiplatform.v1beta" + + "1.Dataset.LabelsEntry\022H\n\017encryption_spec" + + "\030\013 \001(\0132/.google.cloud.aiplatform.v1beta1" + + ".EncryptionSpec\032-\n\013LabelsEntry\022\013\n\003key\030\001 " + + "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:b\352A_\n!aiplatform." + + "googleapis.com/Dataset\022:projects/{projec" + + "t}/locations/{location}/datasets/{datase" + + "t}\"\226\002\n\020ImportDataConfig\022@\n\ngcs_source\030\001 " + + "\001(\0132*.google.cloud.aiplatform.v1beta1.Gc" + + "sSourceH\000\022_\n\020data_item_labels\030\002 \003(\0132E.go" + + "ogle.cloud.aiplatform.v1beta1.ImportData" + + "Config.DataItemLabelsEntry\022\036\n\021import_sch" + + "ema_uri\030\004 \001(\tB\003\340A\002\0325\n\023DataItemLabelsEntr" + + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\010\n\006sou" + + "rce\"\211\001\n\020ExportDataConfig\022J\n\017gcs_destinat" + + "ion\030\001 \001(\0132/.google.cloud.aiplatform.v1be" + + "ta1.GcsDestinationH\000\022\032\n\022annotations_filt" + + "er\030\002 \001(\tB\r\n\013destinationB\200\001\n#com.google.c" + + "loud.aiplatform.v1beta1B\014DatasetProtoP\001Z" + + "Igoogle.golang.org/genproto/googleapis/c" + + "loud/aiplatform/v1beta1;aiplatformb\006prot" + + "o3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -96,6 +100,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), @@ -115,6 +120,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "UpdateTime", "Etag", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_Dataset_LabelsEntry_descriptor = internal_static_google_cloud_aiplatform_v1beta1_Dataset_descriptor.getNestedTypes().get(0); @@ -158,6 +164,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java index b6068bca9..de3249d36 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java @@ -715,7 +715,7 @@ public com.google.protobuf.ByteString getServiceAccountBytes() { *
    * If true, the container of the DeployedModel instances will send `stderr`
    * and `stdout` streams to Stackdriver Logging.
-   * Only supported for custom-trained Models and AutoML Tables Models.
+   * Only supported for custom-trained Models and AutoML Tabular Models.
    * 
* * bool enable_container_logging = 12; @@ -2664,7 +2664,7 @@ public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) { *
      * If true, the container of the DeployedModel instances will send `stderr`
      * and `stdout` streams to Stackdriver Logging.
-     * Only supported for custom-trained Models and AutoML Tables Models.
+     * Only supported for custom-trained Models and AutoML Tabular Models.
      * 
* * bool enable_container_logging = 12; @@ -2681,7 +2681,7 @@ public boolean getEnableContainerLogging() { *
      * If true, the container of the DeployedModel instances will send `stderr`
      * and `stdout` streams to Stackdriver Logging.
-     * Only supported for custom-trained Models and AutoML Tables Models.
+     * Only supported for custom-trained Models and AutoML Tabular Models.
      * 
* * bool enable_container_logging = 12; @@ -2701,7 +2701,7 @@ public Builder setEnableContainerLogging(boolean value) { *
      * If true, the container of the DeployedModel instances will send `stderr`
      * and `stdout` streams to Stackdriver Logging.
-     * Only supported for custom-trained Models and AutoML Tables Models.
+     * Only supported for custom-trained Models and AutoML Tabular Models.
      * 
* * bool enable_container_logging = 12; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java index dc2507765..0410f13c2 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java @@ -317,7 +317,7 @@ public interface DeployedModelOrBuilder *
    * If true, the container of the DeployedModel instances will send `stderr`
    * and `stdout` streams to Stackdriver Logging.
-   * Only supported for custom-trained Models and AutoML Tables Models.
+   * Only supported for custom-trained Models and AutoML Tabular Models.
    * 
* * bool enable_container_logging = 12; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java index a7d7a5dc7..f1ca46c51 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java @@ -122,7 +122,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Type of the boot disk (default is "pd-standard").
+   * Type of the boot disk (default is "pd-ssd").
    * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
    * "pd-standard" (Persistent Disk Hard Disk Drive).
    * 
@@ -147,7 +147,7 @@ public java.lang.String getBootDiskType() { * * *
-   * Type of the boot disk (default is "pd-standard").
+   * Type of the boot disk (default is "pd-ssd").
    * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
    * "pd-standard" (Persistent Disk Hard Disk Drive).
    * 
@@ -523,7 +523,7 @@ public Builder mergeFrom( * * *
-     * Type of the boot disk (default is "pd-standard").
+     * Type of the boot disk (default is "pd-ssd").
      * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
      * "pd-standard" (Persistent Disk Hard Disk Drive).
      * 
@@ -547,7 +547,7 @@ public java.lang.String getBootDiskType() { * * *
-     * Type of the boot disk (default is "pd-standard").
+     * Type of the boot disk (default is "pd-ssd").
      * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
      * "pd-standard" (Persistent Disk Hard Disk Drive).
      * 
@@ -571,7 +571,7 @@ public com.google.protobuf.ByteString getBootDiskTypeBytes() { * * *
-     * Type of the boot disk (default is "pd-standard").
+     * Type of the boot disk (default is "pd-ssd").
      * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
      * "pd-standard" (Persistent Disk Hard Disk Drive).
      * 
@@ -594,7 +594,7 @@ public Builder setBootDiskType(java.lang.String value) { * * *
-     * Type of the boot disk (default is "pd-standard").
+     * Type of the boot disk (default is "pd-ssd").
      * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
      * "pd-standard" (Persistent Disk Hard Disk Drive).
      * 
@@ -613,7 +613,7 @@ public Builder clearBootDiskType() { * * *
-     * Type of the boot disk (default is "pd-standard").
+     * Type of the boot disk (default is "pd-ssd").
      * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
      * "pd-standard" (Persistent Disk Hard Disk Drive).
      * 
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java index 54c1c5b64..288fd2baf 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java @@ -27,7 +27,7 @@ public interface DiskSpecOrBuilder * * *
-   * Type of the boot disk (default is "pd-standard").
+   * Type of the boot disk (default is "pd-ssd").
    * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
    * "pd-standard" (Persistent Disk Hard Disk Drive).
    * 
@@ -41,7 +41,7 @@ public interface DiskSpecOrBuilder * * *
-   * Type of the boot disk (default is "pd-standard").
+   * Type of the boot disk (default is "pd-ssd").
    * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
    * "pd-standard" (Persistent Disk Hard Disk Drive).
    * 
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java new file mode 100644 index 000000000..388bb0d27 --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java @@ -0,0 +1,666 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/encryption_spec.proto + +package com.google.cloud.aiplatform.v1beta1; + +/** + * + * + *
+ * Represents a customer-managed encryption key spec that can be applied to
+ * a top-level resource.
+ * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.EncryptionSpec} + */ +public final class EncryptionSpec extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.EncryptionSpec) + EncryptionSpecOrBuilder { + private static final long serialVersionUID = 0L; + // Use EncryptionSpec.newBuilder() to construct. + private EncryptionSpec(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private EncryptionSpec() { + kmsKeyName_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new EncryptionSpec(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private EncryptionSpec( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + kmsKeyName_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto + .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto + .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.class, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder.class); + } + + public static final int KMS_KEY_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object kmsKeyName_; + /** + * + * + *
+   * Required. The Cloud KMS resource identifier of the customer managed encryption key
+   * used to protect a resource. Has the form:
+   * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+   * The key needs to be in the same region as where the compute resource is
+   * created.
+   * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The kmsKeyName. + */ + @java.lang.Override + public java.lang.String getKmsKeyName() { + java.lang.Object ref = kmsKeyName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kmsKeyName_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The Cloud KMS resource identifier of the customer managed encryption key
+   * used to protect a resource. Has the form:
+   * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+   * The key needs to be in the same region as where the compute resource is
+   * created.
+   * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for kmsKeyName. + */ + @java.lang.Override + public com.google.protobuf.ByteString getKmsKeyNameBytes() { + java.lang.Object ref = kmsKeyName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + kmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getKmsKeyNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kmsKeyName_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getKmsKeyNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kmsKeyName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.EncryptionSpec)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.EncryptionSpec other = + (com.google.cloud.aiplatform.v1beta1.EncryptionSpec) obj; + + if (!getKmsKeyName().equals(other.getKmsKeyName())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + KMS_KEY_NAME_FIELD_NUMBER; + hash = (53 * hash) + getKmsKeyName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.EncryptionSpec prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Represents a customer-managed encryption key spec that can be applied to
+   * a top-level resource.
+   * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.EncryptionSpec} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.EncryptionSpec) + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto + .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto + .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.class, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder.class); + } + + // Construct using com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + kmsKeyName_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto + .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec build() { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec buildPartial() { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec result = + new com.google.cloud.aiplatform.v1beta1.EncryptionSpec(this); + result.kmsKeyName_ = kmsKeyName_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.aiplatform.v1beta1.EncryptionSpec) { + return mergeFrom((com.google.cloud.aiplatform.v1beta1.EncryptionSpec) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.EncryptionSpec other) { + if (other == com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()) + return this; + if (!other.getKmsKeyName().isEmpty()) { + kmsKeyName_ = other.kmsKeyName_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.EncryptionSpec) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object kmsKeyName_ = ""; + /** + * + * + *
+     * Required. The Cloud KMS resource identifier of the customer managed encryption key
+     * used to protect a resource. Has the form:
+     * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+     * The key needs to be in the same region as where the compute resource is
+     * created.
+     * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The kmsKeyName. + */ + public java.lang.String getKmsKeyName() { + java.lang.Object ref = kmsKeyName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kmsKeyName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The Cloud KMS resource identifier of the customer managed encryption key
+     * used to protect a resource. Has the form:
+     * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+     * The key needs to be in the same region as where the compute resource is
+     * created.
+     * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for kmsKeyName. + */ + public com.google.protobuf.ByteString getKmsKeyNameBytes() { + java.lang.Object ref = kmsKeyName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + kmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The Cloud KMS resource identifier of the customer managed encryption key
+     * used to protect a resource. Has the form:
+     * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+     * The key needs to be in the same region as where the compute resource is
+     * created.
+     * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The kmsKeyName to set. + * @return This builder for chaining. + */ + public Builder setKmsKeyName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + kmsKeyName_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The Cloud KMS resource identifier of the customer managed encryption key
+     * used to protect a resource. Has the form:
+     * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+     * The key needs to be in the same region as where the compute resource is
+     * created.
+     * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearKmsKeyName() { + + kmsKeyName_ = getDefaultInstance().getKmsKeyName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The Cloud KMS resource identifier of the customer managed encryption key
+     * used to protect a resource. Has the form:
+     * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+     * The key needs to be in the same region as where the compute resource is
+     * created.
+     * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The bytes for kmsKeyName to set. + * @return This builder for chaining. + */ + public Builder setKmsKeyNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + kmsKeyName_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.EncryptionSpec) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.EncryptionSpec) + private static final com.google.cloud.aiplatform.v1beta1.EncryptionSpec DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.EncryptionSpec(); + } + + public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public EncryptionSpec parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EncryptionSpec(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecOrBuilder.java new file mode 100644 index 000000000..b5ce0eb31 --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/encryption_spec.proto + +package com.google.cloud.aiplatform.v1beta1; + +public interface EncryptionSpecOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.EncryptionSpec) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The Cloud KMS resource identifier of the customer managed encryption key
+   * used to protect a resource. Has the form:
+   * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+   * The key needs to be in the same region as where the compute resource is
+   * created.
+   * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The kmsKeyName. + */ + java.lang.String getKmsKeyName(); + /** + * + * + *
+   * Required. The Cloud KMS resource identifier of the customer managed encryption key
+   * used to protect a resource. Has the form:
+   * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+   * The key needs to be in the same region as where the compute resource is
+   * created.
+   * 
+ * + * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for kmsKeyName. + */ + com.google.protobuf.ByteString getKmsKeyNameBytes(); +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java new file mode 100644 index 000000000..097eddefb --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java @@ -0,0 +1,78 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/encryption_spec.proto + +package com.google.cloud.aiplatform.v1beta1; + +public final class EncryptionSpecProto { + private EncryptionSpecProto() {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } + + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + return descriptor; + } + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + + static { + java.lang.String[] descriptorData = { + "\n5google/cloud/aiplatform/v1beta1/encryp" + + "tion_spec.proto\022\037google.cloud.aiplatform" + + ".v1beta1\032\037google/api/field_behavior.prot" + + "o\032\034google/api/annotations.proto\"+\n\016Encry" + + "ptionSpec\022\031\n\014kms_key_name\030\001 \001(\tB\003\340A\002B\207\001\n" + + "#com.google.cloud.aiplatform.v1beta1B\023En" + + "cryptionSpecProtoP\001ZIgoogle.golang.org/g" + + "enproto/googleapis/cloud/aiplatform/v1be" + + "ta1;aiplatformb\006proto3" + }; + descriptor = + com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.api.FieldBehaviorProto.getDescriptor(), + com.google.api.AnnotationsProto.getDescriptor(), + }); + internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor, + new java.lang.String[] { + "KmsKeyName", + }); + com.google.protobuf.ExtensionRegistry registry = + com.google.protobuf.ExtensionRegistry.newInstance(); + registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); + com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( + descriptor, registry); + com.google.api.FieldBehaviorProto.getDescriptor(); + com.google.api.AnnotationsProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java index 157c5ff3c..9ee75e4a9 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java @@ -173,6 +173,23 @@ private Endpoint( updateTime_ = subBuilder.buildPartial(); } + break; + } + case 82: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } default: @@ -849,6 +866,60 @@ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return getUpdateTime(); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 10; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -888,6 +959,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (updateTime_ != null) { output.writeMessage(9, getUpdateTime()); } + if (encryptionSpec_ != null) { + output.writeMessage(10, getEncryptionSpec()); + } unknownFields.writeTo(output); } @@ -938,6 +1012,9 @@ public int getSerializedSize() { if (updateTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getUpdateTime()); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(10, getEncryptionSpec()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -969,6 +1046,10 @@ public boolean equals(final java.lang.Object obj) { if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1008,6 +1089,10 @@ public int hashCode() { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1208,6 +1293,12 @@ public Builder clear() { updateTime_ = null; updateTimeBuilder_ = null; } + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -1263,6 +1354,11 @@ public com.google.cloud.aiplatform.v1beta1.Endpoint buildPartial() { } else { result.updateTime_ = updateTimeBuilder_.build(); } + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -1363,6 +1459,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Endpoint other) { if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3046,6 +3145,211 @@ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return updateTimeBuilder_; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for an Endpoint. If set, this
+     * Endpoint and all sub-resources of this Endpoint will be secured by
+     * this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java index d23033be3..83d69a594 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java @@ -429,4 +429,45 @@ public interface EndpointOrBuilder * */ com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); + + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for an Endpoint. If set, this
+   * Endpoint and all sub-resources of this Endpoint will be secured by
+   * this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java index 2b47ee4c4..47c9a8e84 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java @@ -55,44 +55,47 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "\n.google/cloud/aiplatform/v1beta1/endpoi" + "nt.proto\022\037google.cloud.aiplatform.v1beta" + "1\032\037google/api/field_behavior.proto\032\031goog" - + "le/api/resource.proto\0321google/cloud/aipl" - + "atform/v1beta1/explanation.proto\0327google" - + "/cloud/aiplatform/v1beta1/machine_resour" - + "ces.proto\032\037google/protobuf/timestamp.pro" - + "to\032\034google/api/annotations.proto\"\373\004\n\010End" - + "point\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_name" - + "\030\002 \001(\tB\003\340A\002\022\023\n\013description\030\003 \001(\t\022L\n\017depl" - + "oyed_models\030\004 \003(\0132..google.cloud.aiplatf" - + "orm.v1beta1.DeployedModelB\003\340A\003\022R\n\rtraffi" - + "c_split\030\005 \003(\0132;.google.cloud.aiplatform." - + "v1beta1.Endpoint.TrafficSplitEntry\022\014\n\004et" - + "ag\030\006 \001(\t\022E\n\006labels\030\007 \003(\01325.google.cloud." - + "aiplatform.v1beta1.Endpoint.LabelsEntry\022" - + "4\n\013create_time\030\010 \001(\0132\032.google.protobuf.T" - + "imestampB\003\340A\003\0224\n\013update_time\030\t \001(\0132\032.goo" - + "gle.protobuf.TimestampB\003\340A\003\0323\n\021TrafficSp" - + "litEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\005:\0028\001" - + "\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 " - + "\001(\t:\0028\001:e\352Ab\n\"aiplatform.googleapis.com/" - + "Endpoint\022 - * Variables that reference a $(VAR_NAME) are expanded + * Required. Variables that reference a $(VAR_NAME) are expanded * using the previous defined environment variables in the container and * any service environment variables. If a variable cannot be resolved, * the reference in the input string will be unchanged. The $(VAR_NAME) @@ -183,7 +183,7 @@ public com.google.protobuf.ByteString getNameBytes() { * exists or not. *
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The value. */ @@ -203,7 +203,7 @@ public java.lang.String getValue() { * * *
-   * Variables that reference a $(VAR_NAME) are expanded
+   * Required. Variables that reference a $(VAR_NAME) are expanded
    * using the previous defined environment variables in the container and
    * any service environment variables. If a variable cannot be resolved,
    * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -212,7 +212,7 @@ public java.lang.String getValue() {
    * exists or not.
    * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The bytes for value. */ @@ -672,7 +672,7 @@ public Builder setNameBytes(com.google.protobuf.ByteString value) { * * *
-     * Variables that reference a $(VAR_NAME) are expanded
+     * Required. Variables that reference a $(VAR_NAME) are expanded
      * using the previous defined environment variables in the container and
      * any service environment variables. If a variable cannot be resolved,
      * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -681,7 +681,7 @@ public Builder setNameBytes(com.google.protobuf.ByteString value) {
      * exists or not.
      * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The value. */ @@ -700,7 +700,7 @@ public java.lang.String getValue() { * * *
-     * Variables that reference a $(VAR_NAME) are expanded
+     * Required. Variables that reference a $(VAR_NAME) are expanded
      * using the previous defined environment variables in the container and
      * any service environment variables. If a variable cannot be resolved,
      * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -709,7 +709,7 @@ public java.lang.String getValue() {
      * exists or not.
      * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The bytes for value. */ @@ -728,7 +728,7 @@ public com.google.protobuf.ByteString getValueBytes() { * * *
-     * Variables that reference a $(VAR_NAME) are expanded
+     * Required. Variables that reference a $(VAR_NAME) are expanded
      * using the previous defined environment variables in the container and
      * any service environment variables. If a variable cannot be resolved,
      * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -737,7 +737,7 @@ public com.google.protobuf.ByteString getValueBytes() {
      * exists or not.
      * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @param value The value to set. * @return This builder for chaining. @@ -755,7 +755,7 @@ public Builder setValue(java.lang.String value) { * * *
-     * Variables that reference a $(VAR_NAME) are expanded
+     * Required. Variables that reference a $(VAR_NAME) are expanded
      * using the previous defined environment variables in the container and
      * any service environment variables. If a variable cannot be resolved,
      * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -764,7 +764,7 @@ public Builder setValue(java.lang.String value) {
      * exists or not.
      * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return This builder for chaining. */ @@ -778,7 +778,7 @@ public Builder clearValue() { * * *
-     * Variables that reference a $(VAR_NAME) are expanded
+     * Required. Variables that reference a $(VAR_NAME) are expanded
      * using the previous defined environment variables in the container and
      * any service environment variables. If a variable cannot be resolved,
      * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -787,7 +787,7 @@ public Builder clearValue() {
      * exists or not.
      * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @param value The bytes for value to set. * @return This builder for chaining. diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java index e160feabf..2317b8c6d 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java @@ -52,7 +52,7 @@ public interface EnvVarOrBuilder * * *
-   * Variables that reference a $(VAR_NAME) are expanded
+   * Required. Variables that reference a $(VAR_NAME) are expanded
    * using the previous defined environment variables in the container and
    * any service environment variables. If a variable cannot be resolved,
    * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -61,7 +61,7 @@ public interface EnvVarOrBuilder
    * exists or not.
    * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The value. */ @@ -70,7 +70,7 @@ public interface EnvVarOrBuilder * * *
-   * Variables that reference a $(VAR_NAME) are expanded
+   * Required. Variables that reference a $(VAR_NAME) are expanded
    * using the previous defined environment variables in the container and
    * any service environment variables. If a variable cannot be resolved,
    * the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -79,7 +79,7 @@ public interface EnvVarOrBuilder
    * exists or not.
    * 
* - * string value = 2; + * string value = 2 [(.google.api.field_behavior) = REQUIRED]; * * @return The bytes for value. */ diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java index ca406038d..9a6720956 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java @@ -43,12 +43,12 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "\n-google/cloud/aiplatform/v1beta1/env_va" + "r.proto\022\037google.cloud.aiplatform.v1beta1" + "\032\037google/api/field_behavior.proto\032\034googl" - + "e/api/annotations.proto\"*\n\006EnvVar\022\021\n\004nam" - + "e\030\001 \001(\tB\003\340A\002\022\r\n\005value\030\002 \001(\tB\177\n#com.googl" - + "e.cloud.aiplatform.v1beta1B\013EnvVarProtoP" - + "\001ZIgoogle.golang.org/genproto/googleapis" - + "/cloud/aiplatform/v1beta1;aiplatformb\006pr" - + "oto3" + + "e/api/annotations.proto\"/\n\006EnvVar\022\021\n\004nam" + + "e\030\001 \001(\tB\003\340A\002\022\022\n\005value\030\002 \001(\tB\003\340A\002B\177\n#com." + + "google.cloud.aiplatform.v1beta1B\013EnvVarP" + + "rotoP\001ZIgoogle.golang.org/genproto/googl" + + "eapis/cloud/aiplatform/v1beta1;aiplatfor" + + "mb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java index 4c06cea68..f0985dbd5 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java @@ -110,6 +110,23 @@ private ExplainRequest( parameters_ = subBuilder.buildPartial(); } + break; + } + case 42: + { + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder subBuilder = null; + if (explanationSpecOverride_ != null) { + subBuilder = explanationSpecOverride_.toBuilder(); + } + explanationSpecOverride_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(explanationSpecOverride_); + explanationSpecOverride_ = subBuilder.buildPartial(); + } + break; } default: @@ -374,6 +391,79 @@ public com.google.protobuf.ValueOrBuilder getParametersOrBuilder() { return getParameters(); } + public static final int EXPLANATION_SPEC_OVERRIDE_FIELD_NUMBER = 5; + private com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanationSpecOverride_; + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return Whether the explanationSpecOverride field is set. + */ + @java.lang.Override + public boolean hasExplanationSpecOverride() { + return explanationSpecOverride_ != null; + } + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return The explanationSpecOverride. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getExplanationSpecOverride() { + return explanationSpecOverride_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance() + : explanationSpecOverride_; + } + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder + getExplanationSpecOverrideOrBuilder() { + return getExplanationSpecOverride(); + } + public static final int DEPLOYED_MODEL_ID_FIELD_NUMBER = 3; private volatile java.lang.Object deployedModelId_; /** @@ -451,6 +541,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (parameters_ != null) { output.writeMessage(4, getParameters()); } + if (explanationSpecOverride_ != null) { + output.writeMessage(5, getExplanationSpecOverride()); + } unknownFields.writeTo(output); } @@ -472,6 +565,10 @@ public int getSerializedSize() { if (parameters_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getParameters()); } + if (explanationSpecOverride_ != null) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize(5, getExplanationSpecOverride()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -494,6 +591,10 @@ public boolean equals(final java.lang.Object obj) { if (hasParameters()) { if (!getParameters().equals(other.getParameters())) return false; } + if (hasExplanationSpecOverride() != other.hasExplanationSpecOverride()) return false; + if (hasExplanationSpecOverride()) { + if (!getExplanationSpecOverride().equals(other.getExplanationSpecOverride())) return false; + } if (!getDeployedModelId().equals(other.getDeployedModelId())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -516,6 +617,10 @@ public int hashCode() { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getParameters().hashCode(); } + if (hasExplanationSpecOverride()) { + hash = (37 * hash) + EXPLANATION_SPEC_OVERRIDE_FIELD_NUMBER; + hash = (53 * hash) + getExplanationSpecOverride().hashCode(); + } hash = (37 * hash) + DEPLOYED_MODEL_ID_FIELD_NUMBER; hash = (53 * hash) + getDeployedModelId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); @@ -679,6 +784,12 @@ public Builder clear() { parameters_ = null; parametersBuilder_ = null; } + if (explanationSpecOverrideBuilder_ == null) { + explanationSpecOverride_ = null; + } else { + explanationSpecOverride_ = null; + explanationSpecOverrideBuilder_ = null; + } deployedModelId_ = ""; return this; @@ -724,6 +835,11 @@ public com.google.cloud.aiplatform.v1beta1.ExplainRequest buildPartial() { } else { result.parameters_ = parametersBuilder_.build(); } + if (explanationSpecOverrideBuilder_ == null) { + result.explanationSpecOverride_ = explanationSpecOverride_; + } else { + result.explanationSpecOverride_ = explanationSpecOverrideBuilder_.build(); + } result.deployedModelId_ = deployedModelId_; onBuilt(); return result; @@ -809,6 +925,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ExplainRequest othe if (other.hasParameters()) { mergeParameters(other.getParameters()); } + if (other.hasExplanationSpecOverride()) { + mergeExplanationSpecOverride(other.getExplanationSpecOverride()); + } if (!other.getDeployedModelId().isEmpty()) { deployedModelId_ = other.deployedModelId_; onChanged(); @@ -1701,6 +1820,270 @@ public com.google.protobuf.ValueOrBuilder getParametersOrBuilder() { return parametersBuilder_; } + private com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanationSpecOverride_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder> + explanationSpecOverrideBuilder_; + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return Whether the explanationSpecOverride field is set. + */ + public boolean hasExplanationSpecOverride() { + return explanationSpecOverrideBuilder_ != null || explanationSpecOverride_ != null; + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return The explanationSpecOverride. + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride + getExplanationSpecOverride() { + if (explanationSpecOverrideBuilder_ == null) { + return explanationSpecOverride_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance() + : explanationSpecOverride_; + } else { + return explanationSpecOverrideBuilder_.getMessage(); + } + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public Builder setExplanationSpecOverride( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride value) { + if (explanationSpecOverrideBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + explanationSpecOverride_ = value; + onChanged(); + } else { + explanationSpecOverrideBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public Builder setExplanationSpecOverride( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder builderForValue) { + if (explanationSpecOverrideBuilder_ == null) { + explanationSpecOverride_ = builderForValue.build(); + onChanged(); + } else { + explanationSpecOverrideBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public Builder mergeExplanationSpecOverride( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride value) { + if (explanationSpecOverrideBuilder_ == null) { + if (explanationSpecOverride_ != null) { + explanationSpecOverride_ = + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.newBuilder( + explanationSpecOverride_) + .mergeFrom(value) + .buildPartial(); + } else { + explanationSpecOverride_ = value; + } + onChanged(); + } else { + explanationSpecOverrideBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public Builder clearExplanationSpecOverride() { + if (explanationSpecOverrideBuilder_ == null) { + explanationSpecOverride_ = null; + onChanged(); + } else { + explanationSpecOverride_ = null; + explanationSpecOverrideBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder + getExplanationSpecOverrideBuilder() { + + onChanged(); + return getExplanationSpecOverrideFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder + getExplanationSpecOverrideOrBuilder() { + if (explanationSpecOverrideBuilder_ != null) { + return explanationSpecOverrideBuilder_.getMessageOrBuilder(); + } else { + return explanationSpecOverride_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance() + : explanationSpecOverride_; + } + } + /** + * + * + *
+     * If specified, overrides the
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+     * Can be used for explaining prediction results with different
+     * configurations, such as:
+     *  - Explaining top-5 predictions results as opposed to top-1;
+     *  - Increasing path count or step count of the attribution methods to reduce
+     *    approximate errors;
+     *  - Using different baselines for explaining the prediction results.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder> + getExplanationSpecOverrideFieldBuilder() { + if (explanationSpecOverrideBuilder_ == null) { + explanationSpecOverrideBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder>( + getExplanationSpecOverride(), getParentForChildren(), isClean()); + explanationSpecOverride_ = null; + } + return explanationSpecOverrideBuilder_; + } + private java.lang.Object deployedModelId_ = ""; /** * diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java index 49654405e..05f8740af 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java @@ -196,6 +196,66 @@ public interface ExplainRequestOrBuilder */ com.google.protobuf.ValueOrBuilder getParametersOrBuilder(); + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return Whether the explanationSpecOverride field is set. + */ + boolean hasExplanationSpecOverride(); + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + * + * @return The explanationSpecOverride. + */ + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getExplanationSpecOverride(); + /** + * + * + *
+   * If specified, overrides the
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+   * Can be used for explaining prediction results with different
+   * configurations, such as:
+   *  - Explaining top-5 predictions results as opposed to top-1;
+   *  - Increasing path count or step count of the attribution methods to reduce
+   *    approximate errors;
+   *  - Using different baselines for explaining the prediction results.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5; + * + */ + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder + getExplanationSpecOverrideOrBuilder(); + /** * * diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java index fa4e9660d..511c22b5e 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java @@ -10318,8 +10318,8 @@ public int getInputsCount() { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -10354,8 +10354,8 @@ public boolean containsInputs(java.lang.String key) { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -10381,8 +10381,8 @@ public boolean containsInputs(java.lang.String key) { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -10414,8 +10414,8 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata get * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11093,8 +11093,8 @@ public int getInputsCount() { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11129,8 +11129,8 @@ public boolean containsInputs(java.lang.String key) { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11156,8 +11156,8 @@ public boolean containsInputs(java.lang.String key) { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11190,8 +11190,8 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata get * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11231,8 +11231,8 @@ public Builder clearInputs() { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11266,8 +11266,8 @@ public Builder removeInputs(java.lang.String key) { * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -11299,8 +11299,8 @@ public Builder putInputs( * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java index fc2ca5493..e543a0496 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java @@ -33,8 +33,8 @@ public interface ExplanationMetadataOrBuilder * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -55,8 +55,8 @@ public interface ExplanationMetadataOrBuilder * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -82,8 +82,8 @@ public interface ExplanationMetadataOrBuilder * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -106,8 +106,8 @@ public interface ExplanationMetadataOrBuilder * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. @@ -130,8 +130,8 @@ com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata getInputsO * name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline * of the empty feature is chosen by AI Platform. * For AI Platform provided Tensorflow images, the key can be any friendly - * name of the feature . Once specified, [ - * featureAttributions][Attribution.feature_attributions] will be keyed by + * name of the feature. Once specified, + * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by * this key (if not grouped with another feature). * For custom images, the key must match with the key in * [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances]. diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java new file mode 100644 index 000000000..e1ed5ec8d --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java @@ -0,0 +1,2094 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/explanation.proto + +package com.google.cloud.aiplatform.v1beta1; + +/** + * + * + *
+ * The [ExplanationMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata] entries that can be overridden at
+ * [online explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride} + */ +public final class ExplanationMetadataOverride extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + ExplanationMetadataOverrideOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExplanationMetadataOverride.newBuilder() to construct. + private ExplanationMetadataOverride(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ExplanationMetadataOverride() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ExplanationMetadataOverride(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ExplanationMetadataOverride( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + inputs_ = + com.google.protobuf.MapField.newMapField(InputsDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000001; + } + com.google.protobuf.MapEntry< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride> + inputs__ = + input.readMessage( + InputsDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + inputs_.getMutableMap().put(inputs__.getKey(), inputs__.getValue()); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 1: + return internalGetInputs(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.class, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder.class); + } + + public interface InputMetadataOverrideOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + java.util.List getInputBaselinesList(); + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + com.google.protobuf.Value getInputBaselines(int index); + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + int getInputBaselinesCount(); + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + java.util.List getInputBaselinesOrBuilderList(); + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index); + } + /** + * + * + *
+   * The [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] entries to be
+   * overridden.
+   * 
+ * + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride} + */ + public static final class InputMetadataOverride extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + InputMetadataOverrideOrBuilder { + private static final long serialVersionUID = 0L; + // Use InputMetadataOverride.newBuilder() to construct. + private InputMetadataOverride(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private InputMetadataOverride() { + inputBaselines_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new InputMetadataOverride(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private InputMetadataOverride( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + inputBaselines_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + inputBaselines_.add( + input.readMessage(com.google.protobuf.Value.parser(), extensionRegistry)); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + inputBaselines_ = java.util.Collections.unmodifiableList(inputBaselines_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + .class, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + .Builder.class); + } + + public static final int INPUT_BASELINES_FIELD_NUMBER = 1; + private java.util.List inputBaselines_; + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + @java.lang.Override + public java.util.List getInputBaselinesList() { + return inputBaselines_; + } + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + @java.lang.Override + public java.util.List + getInputBaselinesOrBuilderList() { + return inputBaselines_; + } + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + @java.lang.Override + public int getInputBaselinesCount() { + return inputBaselines_.size(); + } + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + @java.lang.Override + public com.google.protobuf.Value getInputBaselines(int index) { + return inputBaselines_.get(index); + } + /** + * + * + *
+     * Baseline inputs for this feature.
+     * This overrides the `input_baseline` field of the
+     * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+     * object of the corresponding feature's input metadata. If it's not
+     * specified, the original baselines are not overridden.
+     * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + @java.lang.Override + public com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index) { + return inputBaselines_.get(index); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < inputBaselines_.size(); i++) { + output.writeMessage(1, inputBaselines_.get(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < inputBaselines_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, inputBaselines_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride other = + (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + obj; + + if (!getInputBaselinesList().equals(other.getInputBaselinesList())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getInputBaselinesCount() > 0) { + hash = (37 * hash) + INPUT_BASELINES_FIELD_NUMBER; + hash = (53 * hash) + getInputBaselinesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * The [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] entries to be
+     * overridden.
+     * 
+ * + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverrideOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride.class, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride.Builder.class); + } + + // Construct using + // com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getInputBaselinesFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (inputBaselinesBuilder_ == null) { + inputBaselines_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + inputBaselinesBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + build() { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + buildPartial() { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + result = + new com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride(this); + int from_bitField0_ = bitField0_; + if (inputBaselinesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + inputBaselines_ = java.util.Collections.unmodifiableList(inputBaselines_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.inputBaselines_ = inputBaselines_; + } else { + result.inputBaselines_ = inputBaselinesBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) { + return mergeFrom( + (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride) + other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + other) { + if (other + == com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + .getDefaultInstance()) return this; + if (inputBaselinesBuilder_ == null) { + if (!other.inputBaselines_.isEmpty()) { + if (inputBaselines_.isEmpty()) { + inputBaselines_ = other.inputBaselines_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureInputBaselinesIsMutable(); + inputBaselines_.addAll(other.inputBaselines_); + } + onChanged(); + } + } else { + if (!other.inputBaselines_.isEmpty()) { + if (inputBaselinesBuilder_.isEmpty()) { + inputBaselinesBuilder_.dispose(); + inputBaselinesBuilder_ = null; + inputBaselines_ = other.inputBaselines_; + bitField0_ = (bitField0_ & ~0x00000001); + inputBaselinesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getInputBaselinesFieldBuilder() + : null; + } else { + inputBaselinesBuilder_.addAllMessages(other.inputBaselines_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.util.List inputBaselines_ = + java.util.Collections.emptyList(); + + private void ensureInputBaselinesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + inputBaselines_ = new java.util.ArrayList(inputBaselines_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.protobuf.Value, + com.google.protobuf.Value.Builder, + com.google.protobuf.ValueOrBuilder> + inputBaselinesBuilder_; + + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public java.util.List getInputBaselinesList() { + if (inputBaselinesBuilder_ == null) { + return java.util.Collections.unmodifiableList(inputBaselines_); + } else { + return inputBaselinesBuilder_.getMessageList(); + } + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public int getInputBaselinesCount() { + if (inputBaselinesBuilder_ == null) { + return inputBaselines_.size(); + } else { + return inputBaselinesBuilder_.getCount(); + } + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public com.google.protobuf.Value getInputBaselines(int index) { + if (inputBaselinesBuilder_ == null) { + return inputBaselines_.get(index); + } else { + return inputBaselinesBuilder_.getMessage(index); + } + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder setInputBaselines(int index, com.google.protobuf.Value value) { + if (inputBaselinesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputBaselinesIsMutable(); + inputBaselines_.set(index, value); + onChanged(); + } else { + inputBaselinesBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder setInputBaselines( + int index, com.google.protobuf.Value.Builder builderForValue) { + if (inputBaselinesBuilder_ == null) { + ensureInputBaselinesIsMutable(); + inputBaselines_.set(index, builderForValue.build()); + onChanged(); + } else { + inputBaselinesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder addInputBaselines(com.google.protobuf.Value value) { + if (inputBaselinesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputBaselinesIsMutable(); + inputBaselines_.add(value); + onChanged(); + } else { + inputBaselinesBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder addInputBaselines(int index, com.google.protobuf.Value value) { + if (inputBaselinesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputBaselinesIsMutable(); + inputBaselines_.add(index, value); + onChanged(); + } else { + inputBaselinesBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder addInputBaselines(com.google.protobuf.Value.Builder builderForValue) { + if (inputBaselinesBuilder_ == null) { + ensureInputBaselinesIsMutable(); + inputBaselines_.add(builderForValue.build()); + onChanged(); + } else { + inputBaselinesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder addInputBaselines( + int index, com.google.protobuf.Value.Builder builderForValue) { + if (inputBaselinesBuilder_ == null) { + ensureInputBaselinesIsMutable(); + inputBaselines_.add(index, builderForValue.build()); + onChanged(); + } else { + inputBaselinesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder addAllInputBaselines( + java.lang.Iterable values) { + if (inputBaselinesBuilder_ == null) { + ensureInputBaselinesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, inputBaselines_); + onChanged(); + } else { + inputBaselinesBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder clearInputBaselines() { + if (inputBaselinesBuilder_ == null) { + inputBaselines_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + inputBaselinesBuilder_.clear(); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public Builder removeInputBaselines(int index) { + if (inputBaselinesBuilder_ == null) { + ensureInputBaselinesIsMutable(); + inputBaselines_.remove(index); + onChanged(); + } else { + inputBaselinesBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public com.google.protobuf.Value.Builder getInputBaselinesBuilder(int index) { + return getInputBaselinesFieldBuilder().getBuilder(index); + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index) { + if (inputBaselinesBuilder_ == null) { + return inputBaselines_.get(index); + } else { + return inputBaselinesBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public java.util.List + getInputBaselinesOrBuilderList() { + if (inputBaselinesBuilder_ != null) { + return inputBaselinesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(inputBaselines_); + } + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public com.google.protobuf.Value.Builder addInputBaselinesBuilder() { + return getInputBaselinesFieldBuilder() + .addBuilder(com.google.protobuf.Value.getDefaultInstance()); + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public com.google.protobuf.Value.Builder addInputBaselinesBuilder(int index) { + return getInputBaselinesFieldBuilder() + .addBuilder(index, com.google.protobuf.Value.getDefaultInstance()); + } + /** + * + * + *
+       * Baseline inputs for this feature.
+       * This overrides the `input_baseline` field of the
+       * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+       * object of the corresponding feature's input metadata. If it's not
+       * specified, the original baselines are not overridden.
+       * 
+ * + * repeated .google.protobuf.Value input_baselines = 1; + */ + public java.util.List getInputBaselinesBuilderList() { + return getInputBaselinesFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.protobuf.Value, + com.google.protobuf.Value.Builder, + com.google.protobuf.ValueOrBuilder> + getInputBaselinesFieldBuilder() { + if (inputBaselinesBuilder_ == null) { + inputBaselinesBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.protobuf.Value, + com.google.protobuf.Value.Builder, + com.google.protobuf.ValueOrBuilder>( + inputBaselines_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + inputBaselines_ = null; + } + return inputBaselinesBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride) + private static final com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride(); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InputMetadataOverride parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InputMetadataOverride(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + + public static final int INPUTS_FIELD_NUMBER = 1; + + private static final class InputsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + defaultEntry = + com.google.protobuf.MapEntry + . + newDefaultInstance( + com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride.getDefaultInstance()); + } + + private com.google.protobuf.MapField< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + inputs_; + + private com.google.protobuf.MapField< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + internalGetInputs() { + if (inputs_ == null) { + return com.google.protobuf.MapField.emptyMapField(InputsDefaultEntryHolder.defaultEntry); + } + return inputs_; + } + + public int getInputsCount() { + return internalGetInputs().getMap().size(); + } + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public boolean containsInputs(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + return internalGetInputs().getMap().containsKey(key); + } + /** Use {@link #getInputsMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputs() { + return getInputsMap(); + } + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputsMap() { + return internalGetInputs().getMap(); + } + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrDefault( + java.lang.String key, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + defaultValue) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + map = internalGetInputs().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrThrow(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + map = internalGetInputs().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + output, internalGetInputs(), InputsDefaultEntryHolder.defaultEntry, 1); + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + entry : internalGetInputs().getMap().entrySet()) { + com.google.protobuf.MapEntry< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + inputs__ = + InputsDefaultEntryHolder.defaultEntry + .newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, inputs__); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride other = + (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) obj; + + if (!internalGetInputs().equals(other.internalGetInputs())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (!internalGetInputs().getMap().isEmpty()) { + hash = (37 * hash) + INPUTS_FIELD_NUMBER; + hash = (53 * hash) + internalGetInputs().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * The [ExplanationMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata] entries that can be overridden at
+   * [online explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+   * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 1: + return internalGetInputs(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + switch (number) { + case 1: + return internalGetMutableInputs(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.class, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder.class); + } + + // Construct using com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + internalGetMutableInputs().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride build() { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride buildPartial() { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride result = + new com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride(this); + int from_bitField0_ = bitField0_; + result.inputs_ = internalGetInputs(); + result.inputs_.makeImmutable(); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) { + return mergeFrom((com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride other) { + if (other + == com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance()) + return this; + internalGetMutableInputs().mergeFrom(other.internalGetInputs()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + inputs_; + + private com.google.protobuf.MapField< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + internalGetInputs() { + if (inputs_ == null) { + return com.google.protobuf.MapField.emptyMapField(InputsDefaultEntryHolder.defaultEntry); + } + return inputs_; + } + + private com.google.protobuf.MapField< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + internalGetMutableInputs() { + onChanged(); + ; + if (inputs_ == null) { + inputs_ = com.google.protobuf.MapField.newMapField(InputsDefaultEntryHolder.defaultEntry); + } + if (!inputs_.isMutable()) { + inputs_ = inputs_.copy(); + } + return inputs_; + } + + public int getInputsCount() { + return internalGetInputs().getMap().size(); + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public boolean containsInputs(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + return internalGetInputs().getMap().containsKey(key); + } + /** Use {@link #getInputsMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputs() { + return getInputsMap(); + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputsMap() { + return internalGetInputs().getMap(); + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrDefault( + java.lang.String key, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + defaultValue) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + map = internalGetInputs().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrThrow(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + map = internalGetInputs().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearInputs() { + internalGetMutableInputs().getMutableMap().clear(); + return this; + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder removeInputs(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + internalGetMutableInputs().getMutableMap().remove(key); + return this; + } + /** Use alternate mutation accessors instead. */ + @java.lang.Deprecated + public java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getMutableInputs() { + return internalGetMutableInputs().getMutableMap(); + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder putInputs( + java.lang.String key, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + value) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + if (value == null) { + throw new java.lang.NullPointerException(); + } + internalGetMutableInputs().getMutableMap().put(key, value); + return this; + } + /** + * + * + *
+     * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+     * The key is the name of the feature to be overridden. The keys specified
+     * here must exist in the input metadata to be overridden. If a feature is
+     * not specified here, the corresponding feature's input metadata is not
+     * overridden.
+     * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder putAllInputs( + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + .InputMetadataOverride> + values) { + internalGetMutableInputs().getMutableMap().putAll(values); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + private static final com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride(); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExplanationMetadataOverride parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExplanationMetadataOverride(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverrideOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverrideOrBuilder.java new file mode 100644 index 000000000..d668b24b2 --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverrideOrBuilder.java @@ -0,0 +1,120 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/explanation.proto + +package com.google.cloud.aiplatform.v1beta1; + +public interface ExplanationMetadataOverrideOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + int getInputsCount(); + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + boolean containsInputs(java.lang.String key); + /** Use {@link #getInputsMap()} instead. */ + @java.lang.Deprecated + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputs(); + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + java.util.Map< + java.lang.String, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> + getInputsMap(); + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrDefault( + java.lang.String key, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + defaultValue); + /** + * + * + *
+   * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+   * The key is the name of the feature to be overridden. The keys specified
+   * here must exist in the input metadata to be overridden. If a feature is
+   * not specified here, the corresponding feature's input metadata is not
+   * overridden.
+   * 
+ * + * + * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride + getInputsOrThrow(java.lang.String key); +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java index e4612aeb9..eb75dc2a5 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java @@ -466,7 +466,7 @@ public int getTopK() { * *
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -489,7 +489,7 @@ public boolean hasOutputIndices() {
    *
    * 
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -514,7 +514,7 @@ public com.google.protobuf.ListValue getOutputIndices() {
    *
    * 
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1860,7 +1860,7 @@ public Builder clearTopK() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1882,7 +1882,7 @@ public boolean hasOutputIndices() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1910,7 +1910,7 @@ public com.google.protobuf.ListValue getOutputIndices() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1940,7 +1940,7 @@ public Builder setOutputIndices(com.google.protobuf.ListValue value) {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1967,7 +1967,7 @@ public Builder setOutputIndices(com.google.protobuf.ListValue.Builder builderFor
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2001,7 +2001,7 @@ public Builder mergeOutputIndices(com.google.protobuf.ListValue value) {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2029,7 +2029,7 @@ public Builder clearOutputIndices() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2051,7 +2051,7 @@ public com.google.protobuf.ListValue.Builder getOutputIndicesBuilder() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2077,7 +2077,7 @@ public com.google.protobuf.ListValueOrBuilder getOutputIndicesOrBuilder() {
      *
      * 
      * If populated, only returns attributions that have
-     * [output_index][Attributions.output_index] contained in output_indices. It
+     * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
      * must be an ndarray of integers, with the same shape of the output it's
      * explaining.
      * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
index 49e328159..481e2c415 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
@@ -200,7 +200,7 @@ public interface ExplanationParametersOrBuilder
    *
    * 
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -220,7 +220,7 @@ public interface ExplanationParametersOrBuilder
    *
    * 
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -240,7 +240,7 @@ public interface ExplanationParametersOrBuilder
    *
    * 
    * If populated, only returns attributions that have
-   * [output_index][Attributions.output_index] contained in output_indices. It
+   * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
    * must be an ndarray of integers, with the same shape of the output it's
    * explaining.
    * If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
index 024a4c76c..5dba6213f 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
@@ -71,6 +71,22 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r
       internal_static_google_cloud_aiplatform_v1beta1_FeatureNoiseSigma_NoiseSigmaForFeature_descriptor;
   static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
       internal_static_google_cloud_aiplatform_v1beta1_FeatureNoiseSigma_NoiseSigmaForFeature_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor;
+  static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor;
+  static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor;
+  static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor;
+  static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_fieldAccessorTable;
 
   public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
     return descriptor;
@@ -128,10 +144,23 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
           + "gle.cloud.aiplatform.v1beta1.FeatureNois"
           + "eSigma.NoiseSigmaForFeature\0323\n\024NoiseSigm"
           + "aForFeature\022\014\n\004name\030\001 \001(\t\022\r\n\005sigma\030\002 \001(\002"
-          + "B\204\001\n#com.google.cloud.aiplatform.v1beta1"
-          + "B\020ExplanationProtoP\001ZIgoogle.golang.org/"
-          + "genproto/googleapis/cloud/aiplatform/v1b"
-          + "eta1;aiplatformb\006proto3"
+          + "\"\265\001\n\027ExplanationSpecOverride\022J\n\nparamete"
+          + "rs\030\001 \001(\01326.google.cloud.aiplatform.v1bet"
+          + "a1.ExplanationParameters\022N\n\010metadata\030\002 \001"
+          + "(\0132<.google.cloud.aiplatform.v1beta1.Exp"
+          + "lanationMetadataOverride\"\312\002\n\033Explanation"
+          + "MetadataOverride\022]\n\006inputs\030\001 \003(\0132H.googl"
+          + "e.cloud.aiplatform.v1beta1.ExplanationMe"
+          + "tadataOverride.InputsEntryB\003\340A\002\032H\n\025Input"
+          + "MetadataOverride\022/\n\017input_baselines\030\001 \003("
+          + "\0132\026.google.protobuf.Value\032\201\001\n\013InputsEntr"
+          + "y\022\013\n\003key\030\001 \001(\t\022a\n\005value\030\002 \001(\0132R.google.c"
+          + "loud.aiplatform.v1beta1.ExplanationMetad"
+          + "ataOverride.InputMetadataOverride:\0028\001B\204\001"
+          + "\n#com.google.cloud.aiplatform.v1beta1B\020E"
+          + "xplanationProtoP\001ZIgoogle.golang.org/gen"
+          + "proto/googleapis/cloud/aiplatform/v1beta"
+          + "1;aiplatformb\006proto3"
     };
     descriptor =
         com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -243,6 +272,42 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
             new java.lang.String[] {
               "Name", "Sigma",
             });
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor =
+        getDescriptor().getMessageTypes().get(10);
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable =
+        new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor,
+            new java.lang.String[] {
+              "Parameters", "Metadata",
+            });
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor =
+        getDescriptor().getMessageTypes().get(11);
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable =
+        new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor,
+            new java.lang.String[] {
+              "Inputs",
+            });
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor =
+        internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor
+            .getNestedTypes()
+            .get(0);
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable =
+        new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor,
+            new java.lang.String[] {
+              "InputBaselines",
+            });
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor =
+        internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor
+            .getNestedTypes()
+            .get(1);
+    internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_fieldAccessorTable =
+        new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor,
+            new java.lang.String[] {
+              "Key", "Value",
+            });
     com.google.protobuf.ExtensionRegistry registry =
         com.google.protobuf.ExtensionRegistry.newInstance();
     registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java
new file mode 100644
index 000000000..0ceb1bee8
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java
@@ -0,0 +1,1051 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/cloud/aiplatform/v1beta1/explanation.proto
+
+package com.google.cloud.aiplatform.v1beta1;
+
+/**
+ *
+ *
+ * 
+ * The [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] entries that can be overridden at [online
+ * explanation][PredictionService.Explain][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpecOverride} + */ +public final class ExplanationSpecOverride extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) + ExplanationSpecOverrideOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExplanationSpecOverride.newBuilder() to construct. + private ExplanationSpecOverride(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ExplanationSpecOverride() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ExplanationSpecOverride(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ExplanationSpecOverride( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder subBuilder = null; + if (parameters_ != null) { + subBuilder = parameters_.toBuilder(); + } + parameters_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(parameters_); + parameters_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder subBuilder = + null; + if (metadata_ != null) { + subBuilder = metadata_.toBuilder(); + } + metadata_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(metadata_); + metadata_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.class, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder.class); + } + + public static final int PARAMETERS_FIELD_NUMBER = 1; + private com.google.cloud.aiplatform.v1beta1.ExplanationParameters parameters_; + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return Whether the parameters field is set. + */ + @java.lang.Override + public boolean hasParameters() { + return parameters_ != null; + } + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return The parameters. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() { + return parameters_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() + : parameters_; + } + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder + getParametersOrBuilder() { + return getParameters(); + } + + public static final int METADATA_FIELD_NUMBER = 2; + private com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata_; + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return Whether the metadata field is set. + */ + @java.lang.Override + public boolean hasMetadata() { + return metadata_ != null; + } + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return The metadata. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata() { + return metadata_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance() + : metadata_; + } + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder + getMetadataOrBuilder() { + return getMetadata(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (parameters_ != null) { + output.writeMessage(1, getParameters()); + } + if (metadata_ != null) { + output.writeMessage(2, getMetadata()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (parameters_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getParameters()); + } + if (metadata_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMetadata()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride other = + (com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) obj; + + if (hasParameters() != other.hasParameters()) return false; + if (hasParameters()) { + if (!getParameters().equals(other.getParameters())) return false; + } + if (hasMetadata() != other.hasMetadata()) return false; + if (hasMetadata()) { + if (!getMetadata().equals(other.getMetadata())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasParameters()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + getParameters().hashCode(); + } + if (hasMetadata()) { + hash = (37 * hash) + METADATA_FIELD_NUMBER; + hash = (53 * hash) + getMetadata().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * The [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] entries that can be overridden at [online
+   * explanation][PredictionService.Explain][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+   * 
+ * + * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpecOverride} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.class, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder.class); + } + + // Construct using com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (parametersBuilder_ == null) { + parameters_ = null; + } else { + parameters_ = null; + parametersBuilder_ = null; + } + if (metadataBuilder_ == null) { + metadata_ = null; + } else { + metadata_ = null; + metadataBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationProto + .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride build() { + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride buildPartial() { + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride result = + new com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride(this); + if (parametersBuilder_ == null) { + result.parameters_ = parameters_; + } else { + result.parameters_ = parametersBuilder_.build(); + } + if (metadataBuilder_ == null) { + result.metadata_ = metadata_; + } else { + result.metadata_ = metadataBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) { + return mergeFrom((com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride other) { + if (other == com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance()) + return this; + if (other.hasParameters()) { + mergeParameters(other.getParameters()); + } + if (other.hasMetadata()) { + mergeMetadata(other.getMetadata()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.cloud.aiplatform.v1beta1.ExplanationParameters parameters_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationParameters, + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder> + parametersBuilder_; + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return Whether the parameters field is set. + */ + public boolean hasParameters() { + return parametersBuilder_ != null || parameters_ != null; + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return The parameters. + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() { + if (parametersBuilder_ == null) { + return parameters_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() + : parameters_; + } else { + return parametersBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public Builder setParameters(com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + parameters_ = value; + onChanged(); + } else { + parametersBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public Builder setParameters( + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder builderForValue) { + if (parametersBuilder_ == null) { + parameters_ = builderForValue.build(); + onChanged(); + } else { + parametersBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public Builder mergeParameters( + com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) { + if (parametersBuilder_ == null) { + if (parameters_ != null) { + parameters_ = + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.newBuilder(parameters_) + .mergeFrom(value) + .buildPartial(); + } else { + parameters_ = value; + } + onChanged(); + } else { + parametersBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public Builder clearParameters() { + if (parametersBuilder_ == null) { + parameters_ = null; + onChanged(); + } else { + parameters_ = null; + parametersBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder + getParametersBuilder() { + + onChanged(); + return getParametersFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder + getParametersOrBuilder() { + if (parametersBuilder_ != null) { + return parametersBuilder_.getMessageOrBuilder(); + } else { + return parameters_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance() + : parameters_; + } + } + /** + * + * + *
+     * The parameters to be overridden. Note that the
+     * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+     * no parameter is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationParameters, + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder> + getParametersFieldBuilder() { + if (parametersBuilder_ == null) { + parametersBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationParameters, + com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder>( + getParameters(), getParentForChildren(), isClean()); + parameters_ = null; + } + return parametersBuilder_; + } + + private com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder> + metadataBuilder_; + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return Whether the metadata field is set. + */ + public boolean hasMetadata() { + return metadataBuilder_ != null || metadata_ != null; + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return The metadata. + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata() { + if (metadataBuilder_ == null) { + return metadata_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance() + : metadata_; + } else { + return metadataBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public Builder setMetadata( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride value) { + if (metadataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + metadata_ = value; + onChanged(); + } else { + metadataBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public Builder setMetadata( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder builderForValue) { + if (metadataBuilder_ == null) { + metadata_ = builderForValue.build(); + onChanged(); + } else { + metadataBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public Builder mergeMetadata( + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride value) { + if (metadataBuilder_ == null) { + if (metadata_ != null) { + metadata_ = + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.newBuilder(metadata_) + .mergeFrom(value) + .buildPartial(); + } else { + metadata_ = value; + } + onChanged(); + } else { + metadataBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public Builder clearMetadata() { + if (metadataBuilder_ == null) { + metadata_ = null; + onChanged(); + } else { + metadata_ = null; + metadataBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder + getMetadataBuilder() { + + onChanged(); + return getMetadataFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder + getMetadataOrBuilder() { + if (metadataBuilder_ != null) { + return metadataBuilder_.getMessageOrBuilder(); + } else { + return metadata_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance() + : metadata_; + } + } + /** + * + * + *
+     * The metadata to be overridden. If not specified, no metadata is overridden.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder> + getMetadataFieldBuilder() { + if (metadataBuilder_ == null) { + metadataBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder>( + getMetadata(), getParentForChildren(), isClean()); + metadata_ = null; + } + return metadataBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) + private static final com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride(); + } + + public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExplanationSpecOverride parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExplanationSpecOverride(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverrideOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverrideOrBuilder.java new file mode 100644 index 000000000..390082aab --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverrideOrBuilder.java @@ -0,0 +1,101 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/aiplatform/v1beta1/explanation.proto + +package com.google.cloud.aiplatform.v1beta1; + +public interface ExplanationSpecOverrideOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return Whether the parameters field is set. + */ + boolean hasParameters(); + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + * + * @return The parameters. + */ + com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters(); + /** + * + * + *
+   * The parameters to be overridden. Note that the
+   * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+   * no parameter is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1; + */ + com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder getParametersOrBuilder(); + + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return Whether the metadata field is set. + */ + boolean hasMetadata(); + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + * + * @return The metadata. + */ + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata(); + /** + * + * + *
+   * The metadata to be overridden. If not specified, no metadata is overridden.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2; + */ + com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder getMetadataOrBuilder(); +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java index 12a7d312f..15577f997 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java @@ -169,14 +169,14 @@ public interface OutputConfigOrBuilder * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -188,14 +188,14 @@ public interface OutputConfigOrBuilder * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -207,14 +207,14 @@ public interface OutputConfigOrBuilder * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -227,8 +227,8 @@ public interface OutputConfigOrBuilder *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -243,8 +243,8 @@ public interface OutputConfigOrBuilder *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -259,8 +259,8 @@ public interface OutputConfigOrBuilder *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -458,14 +458,14 @@ public com.google.protobuf.ByteString getExportFormatIdBytes() { * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -480,14 +480,14 @@ public boolean hasArtifactDestination() { * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -504,14 +504,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination * * *
-     * The Google Cloud Storage location where the Model artifact is to be
+     * The Cloud Storage location where the Model artifact is to be
      * written to. Under the directory given as the destination a new one with
      * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
      * will be created. Inside, the Model and any of its supporting files
      * will be written.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `ARTIFACT`.
      * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -530,8 +530,8 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -549,8 +549,8 @@ public boolean hasImageDestination() { *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -570,8 +570,8 @@ public com.google.cloud.aiplatform.v1beta1.ContainerRegistryDestination getImage *
      * The Google Container Registry or Artifact Registry uri where the
      * Model container image will be copied to.
-     * This field should only be set when
-     * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+     * This field should only be set when the `exportableContent` field of the
+     * [Model.supported_export_formats] object contains `IMAGE`.
      * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1102,14 +1102,14 @@ public Builder setExportFormatIdBytes(com.google.protobuf.ByteString value) { * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1123,14 +1123,14 @@ public boolean hasArtifactDestination() { * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1150,14 +1150,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1180,14 +1180,14 @@ public Builder setArtifactDestination( * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1207,14 +1207,14 @@ public Builder setArtifactDestination( * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1241,14 +1241,14 @@ public Builder mergeArtifactDestination( * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1268,14 +1268,14 @@ public Builder clearArtifactDestination() { * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1290,14 +1290,14 @@ public Builder clearArtifactDestination() { * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1316,14 +1316,14 @@ public Builder clearArtifactDestination() { * * *
-       * The Google Cloud Storage location where the Model artifact is to be
+       * The Cloud Storage location where the Model artifact is to be
        * written to. Under the directory given as the destination a new one with
        * name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
        * will be created. Inside, the Model and any of its supporting files
        * will be written.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `ARTIFACT`.
        * 
* * .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3; @@ -1357,8 +1357,8 @@ public Builder clearArtifactDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1375,8 +1375,8 @@ public boolean hasImageDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1401,8 +1401,8 @@ public boolean hasImageDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1428,8 +1428,8 @@ public Builder setImageDestination( *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1453,8 +1453,8 @@ public Builder setImageDestination( *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1485,8 +1485,8 @@ public Builder mergeImageDestination( *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1509,8 +1509,8 @@ public Builder clearImageDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1528,8 +1528,8 @@ public Builder clearImageDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; @@ -1552,8 +1552,8 @@ public Builder clearImageDestination() { *
        * The Google Container Registry or Artifact Registry uri where the
        * Model container image will be copied to.
-       * This field should only be set when
-       * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+       * This field should only be set when the `exportableContent` field of the
+       * [Model.supported_export_formats] object contains `IMAGE`.
        * 
* * .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java index ec7a1316b..b76ec6200 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java @@ -27,6 +27,7 @@ * supported for Datasets containing DataItems. * If any of the filters in this message are to match nothing, then they can be * set as '-' (the minus sign). + * Supported only for unstructured Datasets. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.FilterSplit} @@ -141,7 +142,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -169,7 +170,7 @@ public java.lang.String getTrainingFilter() { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -200,7 +201,7 @@ public com.google.protobuf.ByteString getTrainingFilterBytes() { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -228,7 +229,7 @@ public java.lang.String getValidationFilter() { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -259,7 +260,7 @@ public com.google.protobuf.ByteString getValidationFilterBytes() { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -287,7 +288,7 @@ public java.lang.String getTestFilter() { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -494,6 +495,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * supported for Datasets containing DataItems. * If any of the filters in this message are to match nothing, then they can be * set as '-' (the minus sign). + * Supported only for unstructured Datasets. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.FilterSplit} @@ -670,7 +672,7 @@ public Builder mergeFrom( * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -697,7 +699,7 @@ public java.lang.String getTrainingFilter() { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -724,7 +726,7 @@ public com.google.protobuf.ByteString getTrainingFilterBytes() { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -750,7 +752,7 @@ public Builder setTrainingFilter(java.lang.String value) { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -772,7 +774,7 @@ public Builder clearTrainingFilter() { * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -801,7 +803,7 @@ public Builder setTrainingFilterBytes(com.google.protobuf.ByteString value) { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -828,7 +830,7 @@ public java.lang.String getValidationFilter() { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -855,7 +857,7 @@ public com.google.protobuf.ByteString getValidationFilterBytes() { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -881,7 +883,7 @@ public Builder setValidationFilter(java.lang.String value) { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -903,7 +905,7 @@ public Builder clearValidationFilter() { * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -932,7 +934,7 @@ public Builder setValidationFilterBytes(com.google.protobuf.ByteString value) { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -959,7 +961,7 @@ public java.lang.String getTestFilter() { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -986,7 +988,7 @@ public com.google.protobuf.ByteString getTestFilterBytes() { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -1012,7 +1014,7 @@ public Builder setTestFilter(java.lang.String value) { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -1034,7 +1036,7 @@ public Builder clearTestFilter() { * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java index 1ea8fdbbd..b859b4ca0 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java @@ -31,7 +31,7 @@ public interface FilterSplitOrBuilder * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -48,7 +48,7 @@ public interface FilterSplitOrBuilder * this filter are used to train the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -66,7 +66,7 @@ public interface FilterSplitOrBuilder * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -83,7 +83,7 @@ public interface FilterSplitOrBuilder * this filter are used to validate the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -101,7 +101,7 @@ public interface FilterSplitOrBuilder * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* @@ -118,7 +118,7 @@ public interface FilterSplitOrBuilder * this filter are used to test the Model. A filter with same syntax * as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a * single DataItem is matched by more than one of the FilterSplit filters, - * then it will be assigned to the first set that applies to it in the + * then it is assigned to the first set that applies to it in the * training, validation, test order. *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java index 5f81ef874..090e468b4 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java @@ -27,7 +27,7 @@ * `test_fraction` may optionally be provided, they must sum to up to 1. If the * provided ones sum to less than 1, the remainder is assigned to sets as * decided by AI Platform. If none of the fractions are set, by default roughly - * 80% of data will be used for training, 10% for validation, and 10% for test. + * 80% of data is used for training, 10% for validation, and 10% for test. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.FractionSplit} @@ -374,7 +374,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * `test_fraction` may optionally be provided, they must sum to up to 1. If the * provided ones sum to less than 1, the remainder is assigned to sets as * decided by AI Platform. If none of the fractions are set, by default roughly - * 80% of data will be used for training, 10% for validation, and 10% for test. + * 80% of data is used for training, 10% for validation, and 10% for test. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.FractionSplit} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java index 69a860a4e..cd90bc6c8 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java @@ -241,6 +241,23 @@ private HyperparameterTuningJob( input.readMessage( LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); labels_.getMutableMap().put(labels__.getKey(), labels__.getValue()); + break; + } + case 138: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } default: @@ -1045,6 +1062,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) { return map.get(key); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 17; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -1103,6 +1174,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 16); + if (encryptionSpec_ != null) { + output.writeMessage(17, getEncryptionSpec()); + } unknownFields.writeTo(output); } @@ -1164,6 +1238,9 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(16, labels__); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(17, getEncryptionSpec()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1216,6 +1293,10 @@ public boolean equals(final java.lang.Object obj) { if (!getError().equals(other.getError())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1275,6 +1356,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1506,6 +1591,12 @@ public Builder clear() { errorBuilder_ = null; } internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -1586,6 +1677,11 @@ public com.google.cloud.aiplatform.v1beta1.HyperparameterTuningJob buildPartial( } result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -1705,6 +1801,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.HyperparameterTunin mergeError(other.getError()); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -4199,6 +4298,211 @@ public Builder putAllLabels(java.util.Map va return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key options for a HyperparameterTuningJob.
+     * If this is set, then all resources created by the HyperparameterTuningJob
+     * will be encrypted with the provided encryption key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java index 7634369d1..8de927964 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java @@ -564,4 +564,45 @@ public interface HyperparameterTuningJobOrBuilder * map<string, string> labels = 16; */ java.lang.String getLabelsOrThrow(java.lang.String key); + + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key options for a HyperparameterTuningJob.
+   * If this is set, then all resources created by the HyperparameterTuningJob
+   * will be encrypted with the provided encryption key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java index d96f4404b..fac475138 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java @@ -49,40 +49,43 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "aiplatform.v1beta1\032\037google/api/field_beh" + "avior.proto\032\031google/api/resource.proto\0320" + "google/cloud/aiplatform/v1beta1/custom_j" - + "ob.proto\032/google/cloud/aiplatform/v1beta" - + "1/job_state.proto\032+google/cloud/aiplatfo" - + "rm/v1beta1/study.proto\032\037google/protobuf/" - + "timestamp.proto\032\027google/rpc/status.proto" - + "\032\034google/api/annotations.proto\"\317\007\n\027Hyper" - + "parameterTuningJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n" - + "\014display_name\030\002 \001(\tB\003\340A\002\022C\n\nstudy_spec\030\004" - + " \001(\0132*.google.cloud.aiplatform.v1beta1.S" - + "tudySpecB\003\340A\002\022\034\n\017max_trial_count\030\005 \001(\005B\003" - + "\340A\002\022!\n\024parallel_trial_count\030\006 \001(\005B\003\340A\002\022\036" - + "\n\026max_failed_trial_count\030\007 \001(\005\022K\n\016trial_" - + "job_spec\030\010 \001(\0132..google.cloud.aiplatform" - + ".v1beta1.CustomJobSpecB\003\340A\002\022;\n\006trials\030\t " - + "\003(\0132&.google.cloud.aiplatform.v1beta1.Tr" - + "ialB\003\340A\003\022=\n\005state\030\n \001(\0162).google.cloud.a" - + "iplatform.v1beta1.JobStateB\003\340A\003\0224\n\013creat" - + "e_time\030\013 \001(\0132\032.google.protobuf.Timestamp" - + "B\003\340A\003\0223\n\nstart_time\030\014 \001(\0132\032.google.proto" - + "buf.TimestampB\003\340A\003\0221\n\010end_time\030\r \001(\0132\032.g" - + "oogle.protobuf.TimestampB\003\340A\003\0224\n\013update_" - + "time\030\016 \001(\0132\032.google.protobuf.TimestampB\003" - + "\340A\003\022&\n\005error\030\017 \001(\0132\022.google.rpc.StatusB\003" - + "\340A\003\022T\n\006labels\030\020 \003(\0132D.google.cloud.aipla" - + "tform.v1beta1.HyperparameterTuningJob.La" - + "belsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n" - + "\005value\030\002 \001(\t:\0028\001:\225\001\352A\221\001\n1aiplatform.goog" - + "leapis.com/HyperparameterTuningJob\022\\proj" - + "ects/{project}/locations/{location}/hype" - + "rparameterTuningJobs/{hyperparameter_tun" - + "ing_job}B\220\001\n#com.google.cloud.aiplatform" - + ".v1beta1B\034HyperparameterTuningJobProtoP\001" - + "ZIgoogle.golang.org/genproto/googleapis/" - + "cloud/aiplatform/v1beta1;aiplatformb\006pro" - + "to3" + + "ob.proto\0325google/cloud/aiplatform/v1beta" + + "1/encryption_spec.proto\032/google/cloud/ai" + + "platform/v1beta1/job_state.proto\032+google" + + "/cloud/aiplatform/v1beta1/study.proto\032\037g" + + "oogle/protobuf/timestamp.proto\032\027google/r" + + "pc/status.proto\032\034google/api/annotations." + + "proto\"\231\010\n\027HyperparameterTuningJob\022\021\n\004nam" + + "e\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022" + + "C\n\nstudy_spec\030\004 \001(\0132*.google.cloud.aipla" + + "tform.v1beta1.StudySpecB\003\340A\002\022\034\n\017max_tria" + + "l_count\030\005 \001(\005B\003\340A\002\022!\n\024parallel_trial_cou" + + "nt\030\006 \001(\005B\003\340A\002\022\036\n\026max_failed_trial_count\030" + + "\007 \001(\005\022K\n\016trial_job_spec\030\010 \001(\0132..google.c" + + "loud.aiplatform.v1beta1.CustomJobSpecB\003\340" + + "A\002\022;\n\006trials\030\t \003(\0132&.google.cloud.aiplat" + + "form.v1beta1.TrialB\003\340A\003\022=\n\005state\030\n \001(\0162)" + + ".google.cloud.aiplatform.v1beta1.JobStat" + + "eB\003\340A\003\0224\n\013create_time\030\013 \001(\0132\032.google.pro" + + "tobuf.TimestampB\003\340A\003\0223\n\nstart_time\030\014 \001(\013" + + "2\032.google.protobuf.TimestampB\003\340A\003\0221\n\010end" + + "_time\030\r \001(\0132\032.google.protobuf.TimestampB" + + "\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google.proto" + + "buf.TimestampB\003\340A\003\022&\n\005error\030\017 \001(\0132\022.goog" + + "le.rpc.StatusB\003\340A\003\022T\n\006labels\030\020 \003(\0132D.goo" + + "gle.cloud.aiplatform.v1beta1.Hyperparame" + + "terTuningJob.LabelsEntry\022H\n\017encryption_s" + + "pec\030\021 \001(\0132/.google.cloud.aiplatform.v1be" + + "ta1.EncryptionSpec\032-\n\013LabelsEntry\022\013\n\003key" + + "\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\225\001\352A\221\001\n1aiplat" + + "form.googleapis.com/HyperparameterTuning" + + "Job\022\\projects/{project}/locations/{locat" + + "ion}/hyperparameterTuningJobs/{hyperpara" + + "meter_tuning_job}B\220\001\n#com.google.cloud.a" + + "iplatform.v1beta1B\034HyperparameterTuningJ" + + "obProtoP\001ZIgoogle.golang.org/genproto/go" + + "ogleapis/cloud/aiplatform/v1beta1;aiplat" + + "formb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -91,6 +94,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.CustomJobProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.StudyProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), @@ -118,6 +122,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "UpdateTime", "Error", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_HyperparameterTuningJob_LabelsEntry_descriptor = internal_static_google_cloud_aiplatform_v1beta1_HyperparameterTuningJob_descriptor @@ -138,6 +143,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.CustomJobProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.StudyProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java index 3b6605cc0..eb71199dc 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java @@ -553,17 +553,17 @@ public com.google.cloud.aiplatform.v1beta1.TimestampSplitOrBuilder getTimestampS * * *
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -583,17 +583,17 @@ public boolean hasGcsDestination() {
    *
    *
    * 
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -616,17 +616,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
    *
    *
    * 
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -649,12 +649,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder getGcsDestina
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -676,12 +678,14 @@ public boolean hasBigqueryDestination() {
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -706,12 +710,14 @@ public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestin
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -799,7 +805,7 @@ public com.google.protobuf.ByteString getDatasetIdBytes() {
    *
    *
    * 
-   * Only applicable to Datasets that have DataItems and Annotations.
+   * Applicable only to Datasets that have DataItems and Annotations.
    * A filter on Annotations of the Dataset. Only Annotations that both
    * match this filter and belong to DataItems not ignored by the split method
    * are used in respectively training, validation or test role, depending on
@@ -830,7 +836,7 @@ public java.lang.String getAnnotationsFilter() {
    *
    *
    * 
-   * Only applicable to Datasets that have DataItems and Annotations.
+   * Applicable only to Datasets that have DataItems and Annotations.
    * A filter on Annotations of the Dataset. Only Annotations that both
    * match this filter and belong to DataItems not ignored by the split method
    * are used in respectively training, validation or test role, depending on
@@ -864,14 +870,13 @@ public com.google.protobuf.ByteString getAnnotationsFilterBytes() {
    *
    *
    * 
-   * Only applicable to custom training.
-   * Google Cloud Storage URI points to a YAML file describing annotation
-   * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-   * https:
-   * //github.com/OAI/OpenAPI-Specification/b
-   * // lob/master/versions/3.0.2.md#schema-object)
+   * Applicable only to custom training with Datasets that have DataItems and
+   * Annotations.
+   * Cloud Storage URI that points to a YAML file describing the annotation
+   * schema. The schema is defined as an OpenAPI 3.0.2
+   * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
    * The schema files that can be used here are found in
-   * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+   * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
    * chosen schema must be consistent with
    * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
    * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -903,14 +908,13 @@ public java.lang.String getAnnotationSchemaUri() {
    *
    *
    * 
-   * Only applicable to custom training.
-   * Google Cloud Storage URI points to a YAML file describing annotation
-   * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-   * https:
-   * //github.com/OAI/OpenAPI-Specification/b
-   * // lob/master/versions/3.0.2.md#schema-object)
+   * Applicable only to custom training with Datasets that have DataItems and
+   * Annotations.
+   * Cloud Storage URI that points to a YAML file describing the annotation
+   * schema. The schema is defined as an OpenAPI 3.0.2
+   * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
    * The schema files that can be used here are found in
-   * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+   * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
    * chosen schema must be consistent with
    * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
    * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -2382,17 +2386,17 @@ public com.google.cloud.aiplatform.v1beta1.TimestampSplit.Builder getTimestampSp
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2412,17 +2416,17 @@ public boolean hasGcsDestination() {
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2452,17 +2456,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2489,17 +2493,17 @@ public Builder setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestinat
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2524,17 +2528,17 @@ public Builder setGcsDestination(
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2571,17 +2575,17 @@ public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestin
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2611,17 +2615,17 @@ public Builder clearGcsDestination() {
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2638,17 +2642,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2674,17 +2678,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
      *
      *
      * 
-     * The Google Cloud Storage location where the training data is to be
-     * written to. In the given directory a new directory will be created with
+     * The Cloud Storage location where the training data is to be
+     * written to. In the given directory a new directory is created with
      * name:
      * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-     * All training input data will be written into that directory.
-     * The AI Platform environment variables representing Google Cloud Storage
-     * data URIs will always be represented in the Google Cloud Storage wildcard
+     * All training input data is written into that directory.
+     * The AI Platform environment variables representing Cloud Storage
+     * data URIs are represented in the Cloud Storage wildcard
      * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
      * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-     * * AIP_TRAINING_DATA_URI  =
+     * * AIP_TRAINING_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
      * * AIP_VALIDATION_DATA_URI =
      * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2728,12 +2732,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2755,12 +2761,14 @@ public boolean hasBigqueryDestination() {
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2792,12 +2800,14 @@ public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestin
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2827,12 +2837,14 @@ public Builder setBigqueryDestination(
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2859,12 +2871,14 @@ public Builder setBigqueryDestination(
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2904,12 +2918,14 @@ public Builder mergeBigqueryDestination(
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2941,12 +2957,14 @@ public Builder clearBigqueryDestination() {
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2966,12 +2984,14 @@ public Builder clearBigqueryDestination() {
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2999,12 +3019,14 @@ public Builder clearBigqueryDestination() {
      *
      *
      * 
+     * Only applicable to custom training with tabular Dataset with BigQuery
+     * source.
      * The BigQuery project location where the training data is to be written
      * to. In the given project a new dataset is created with name
      * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
      * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-     * input data will be written into that dataset. In the dataset three
-     * tables will be created, `training`, `validation` and `test`.
+     * input data is written into that dataset. In the dataset three
+     * tables are created, `training`, `validation` and `test`.
      * * AIP_DATA_FORMAT = "bigquery".
      * * AIP_TRAINING_DATA_URI  =
      * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -3183,7 +3205,7 @@ public Builder setDatasetIdBytes(com.google.protobuf.ByteString value) {
      *
      *
      * 
-     * Only applicable to Datasets that have DataItems and Annotations.
+     * Applicable only to Datasets that have DataItems and Annotations.
      * A filter on Annotations of the Dataset. Only Annotations that both
      * match this filter and belong to DataItems not ignored by the split method
      * are used in respectively training, validation or test role, depending on
@@ -3213,7 +3235,7 @@ public java.lang.String getAnnotationsFilter() {
      *
      *
      * 
-     * Only applicable to Datasets that have DataItems and Annotations.
+     * Applicable only to Datasets that have DataItems and Annotations.
      * A filter on Annotations of the Dataset. Only Annotations that both
      * match this filter and belong to DataItems not ignored by the split method
      * are used in respectively training, validation or test role, depending on
@@ -3243,7 +3265,7 @@ public com.google.protobuf.ByteString getAnnotationsFilterBytes() {
      *
      *
      * 
-     * Only applicable to Datasets that have DataItems and Annotations.
+     * Applicable only to Datasets that have DataItems and Annotations.
      * A filter on Annotations of the Dataset. Only Annotations that both
      * match this filter and belong to DataItems not ignored by the split method
      * are used in respectively training, validation or test role, depending on
@@ -3272,7 +3294,7 @@ public Builder setAnnotationsFilter(java.lang.String value) {
      *
      *
      * 
-     * Only applicable to Datasets that have DataItems and Annotations.
+     * Applicable only to Datasets that have DataItems and Annotations.
      * A filter on Annotations of the Dataset. Only Annotations that both
      * match this filter and belong to DataItems not ignored by the split method
      * are used in respectively training, validation or test role, depending on
@@ -3297,7 +3319,7 @@ public Builder clearAnnotationsFilter() {
      *
      *
      * 
-     * Only applicable to Datasets that have DataItems and Annotations.
+     * Applicable only to Datasets that have DataItems and Annotations.
      * A filter on Annotations of the Dataset. Only Annotations that both
      * match this filter and belong to DataItems not ignored by the split method
      * are used in respectively training, validation or test role, depending on
@@ -3329,14 +3351,13 @@ public Builder setAnnotationsFilterBytes(com.google.protobuf.ByteString value) {
      *
      *
      * 
-     * Only applicable to custom training.
-     * Google Cloud Storage URI points to a YAML file describing annotation
-     * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-     * https:
-     * //github.com/OAI/OpenAPI-Specification/b
-     * // lob/master/versions/3.0.2.md#schema-object)
+     * Applicable only to custom training with Datasets that have DataItems and
+     * Annotations.
+     * Cloud Storage URI that points to a YAML file describing the annotation
+     * schema. The schema is defined as an OpenAPI 3.0.2
+     * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
      * The schema files that can be used here are found in
-     * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+     * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
      * chosen schema must be consistent with
      * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
      * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3367,14 +3388,13 @@ public java.lang.String getAnnotationSchemaUri() {
      *
      *
      * 
-     * Only applicable to custom training.
-     * Google Cloud Storage URI points to a YAML file describing annotation
-     * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-     * https:
-     * //github.com/OAI/OpenAPI-Specification/b
-     * // lob/master/versions/3.0.2.md#schema-object)
+     * Applicable only to custom training with Datasets that have DataItems and
+     * Annotations.
+     * Cloud Storage URI that points to a YAML file describing the annotation
+     * schema. The schema is defined as an OpenAPI 3.0.2
+     * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
      * The schema files that can be used here are found in
-     * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+     * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
      * chosen schema must be consistent with
      * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
      * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3405,14 +3425,13 @@ public com.google.protobuf.ByteString getAnnotationSchemaUriBytes() {
      *
      *
      * 
-     * Only applicable to custom training.
-     * Google Cloud Storage URI points to a YAML file describing annotation
-     * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-     * https:
-     * //github.com/OAI/OpenAPI-Specification/b
-     * // lob/master/versions/3.0.2.md#schema-object)
+     * Applicable only to custom training with Datasets that have DataItems and
+     * Annotations.
+     * Cloud Storage URI that points to a YAML file describing the annotation
+     * schema. The schema is defined as an OpenAPI 3.0.2
+     * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
      * The schema files that can be used here are found in
-     * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+     * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
      * chosen schema must be consistent with
      * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
      * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3442,14 +3461,13 @@ public Builder setAnnotationSchemaUri(java.lang.String value) {
      *
      *
      * 
-     * Only applicable to custom training.
-     * Google Cloud Storage URI points to a YAML file describing annotation
-     * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-     * https:
-     * //github.com/OAI/OpenAPI-Specification/b
-     * // lob/master/versions/3.0.2.md#schema-object)
+     * Applicable only to custom training with Datasets that have DataItems and
+     * Annotations.
+     * Cloud Storage URI that points to a YAML file describing the annotation
+     * schema. The schema is defined as an OpenAPI 3.0.2
+     * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
      * The schema files that can be used here are found in
-     * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+     * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
      * chosen schema must be consistent with
      * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
      * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3475,14 +3493,13 @@ public Builder clearAnnotationSchemaUri() {
      *
      *
      * 
-     * Only applicable to custom training.
-     * Google Cloud Storage URI points to a YAML file describing annotation
-     * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-     * https:
-     * //github.com/OAI/OpenAPI-Specification/b
-     * // lob/master/versions/3.0.2.md#schema-object)
+     * Applicable only to custom training with Datasets that have DataItems and
+     * Annotations.
+     * Cloud Storage URI that points to a YAML file describing the annotation
+     * schema. The schema is defined as an OpenAPI 3.0.2
+     * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
      * The schema files that can be used here are found in
-     * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+     * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
      * chosen schema must be consistent with
      * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
      * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
index aff3c1765..552e4ae07 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
@@ -173,17 +173,17 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -200,17 +200,17 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -227,17 +227,17 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * The Google Cloud Storage location where the training data is to be
-   * written to. In the given directory a new directory will be created with
+   * The Cloud Storage location where the training data is to be
+   * written to. In the given directory a new directory is created with
    * name:
    * `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
    * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
-   * All training input data will be written into that directory.
-   * The AI Platform environment variables representing Google Cloud Storage
-   * data URIs will always be represented in the Google Cloud Storage wildcard
+   * All training input data is written into that directory.
+   * The AI Platform environment variables representing Cloud Storage
+   * data URIs are represented in the Cloud Storage wildcard
    * format to support sharded data. e.g.: "gs://.../training-*.jsonl"
    * * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
-   * * AIP_TRAINING_DATA_URI  =
+   * * AIP_TRAINING_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
    * * AIP_VALIDATION_DATA_URI =
    * "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -253,12 +253,14 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -277,12 +279,14 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -301,12 +305,14 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
+   * Only applicable to custom training with tabular Dataset with BigQuery
+   * source.
    * The BigQuery project location where the training data is to be written
    * to. In the given project a new dataset is created with name
    * `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
    * where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
-   * input data will be written into that dataset. In the dataset three
-   * tables will be created, `training`, `validation` and `test`.
+   * input data is written into that dataset. In the dataset three
+   * tables are created, `training`, `validation` and `test`.
    * * AIP_DATA_FORMAT = "bigquery".
    * * AIP_TRAINING_DATA_URI  =
    * "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -362,7 +368,7 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * Only applicable to Datasets that have DataItems and Annotations.
+   * Applicable only to Datasets that have DataItems and Annotations.
    * A filter on Annotations of the Dataset. Only Annotations that both
    * match this filter and belong to DataItems not ignored by the split method
    * are used in respectively training, validation or test role, depending on
@@ -382,7 +388,7 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * Only applicable to Datasets that have DataItems and Annotations.
+   * Applicable only to Datasets that have DataItems and Annotations.
    * A filter on Annotations of the Dataset. Only Annotations that both
    * match this filter and belong to DataItems not ignored by the split method
    * are used in respectively training, validation or test role, depending on
@@ -403,14 +409,13 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * Only applicable to custom training.
-   * Google Cloud Storage URI points to a YAML file describing annotation
-   * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-   * https:
-   * //github.com/OAI/OpenAPI-Specification/b
-   * // lob/master/versions/3.0.2.md#schema-object)
+   * Applicable only to custom training with Datasets that have DataItems and
+   * Annotations.
+   * Cloud Storage URI that points to a YAML file describing the annotation
+   * schema. The schema is defined as an OpenAPI 3.0.2
+   * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
    * The schema files that can be used here are found in
-   * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+   * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
    * chosen schema must be consistent with
    * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
    * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -431,14 +436,13 @@ public interface InputDataConfigOrBuilder
    *
    *
    * 
-   * Only applicable to custom training.
-   * Google Cloud Storage URI points to a YAML file describing annotation
-   * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
-   * https:
-   * //github.com/OAI/OpenAPI-Specification/b
-   * // lob/master/versions/3.0.2.md#schema-object)
+   * Applicable only to custom training with Datasets that have DataItems and
+   * Annotations.
+   * Cloud Storage URI that points to a YAML file describing the annotation
+   * schema. The schema is defined as an OpenAPI 3.0.2
+   * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
    * The schema files that can be used here are found in
-   * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+   * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
    * chosen schema must be consistent with
    * [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
    * [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
index 5f182f9f1..f90dbd52a 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
@@ -142,212 +142,214 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
           + "form/v1beta1/custom_job.proto\0327google/cl"
           + "oud/aiplatform/v1beta1/data_labeling_job"
           + ".proto\032?google/cloud/aiplatform/v1beta1/"
-          + "hyperparameter_tuning_job.proto\032#google/"
-          + "longrunning/operations.proto\032\033google/pro"
-          + "tobuf/empty.proto\032 google/protobuf/field"
-          + "_mask.proto\"\230\001\n\026CreateCustomJobRequest\0229"
-          + "\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.google"
-          + "apis.com/Location\022C\n\ncustom_job\030\002 \001(\0132*."
-          + "google.cloud.aiplatform.v1beta1.CustomJo"
-          + "bB\003\340A\002\"P\n\023GetCustomJobRequest\0229\n\004name\030\001 "
-          + "\001(\tB+\340A\002\372A%\n#aiplatform.googleapis.com/C"
-          + "ustomJob\"\270\001\n\025ListCustomJobsRequest\0229\n\006pa"
-          + "rent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleapis"
-          + ".com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_si"
-          + "ze\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread_mas"
-          + "k\030\005 \001(\0132\032.google.protobuf.FieldMask\"r\n\026L"
-          + "istCustomJobsResponse\022?\n\013custom_jobs\030\001 \003"
-          + "(\0132*.google.cloud.aiplatform.v1beta1.Cus"
-          + "tomJob\022\027\n\017next_page_token\030\002 \001(\t\"S\n\026Delet"
-          + "eCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A\002\372A%\n"
-          + "#aiplatform.googleapis.com/CustomJob\"S\n\026"
-          + "CancelCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A"
-          + "\002\372A%\n#aiplatform.googleapis.com/CustomJo"
-          + "b\"\253\001\n\034CreateDataLabelingJobRequest\0229\n\006pa"
-          + "rent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleapis"
-          + ".com/Location\022P\n\021data_labeling_job\030\002 \001(\013"
-          + "20.google.cloud.aiplatform.v1beta1.DataL"
-          + "abelingJobB\003\340A\002\"\\\n\031GetDataLabelingJobReq"
-          + "uest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatform.g"
-          + "oogleapis.com/DataLabelingJob\"\320\001\n\033ListDa"
-          + "taLabelingJobsRequest\0229\n\006parent\030\001 \001(\tB)\340"
-          + "A\002\372A#\n!locations.googleapis.com/Location"
-          + "\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\022\022\n\np"
-          + "age_token\030\004 \001(\t\022-\n\tread_mask\030\005 \001(\0132\032.goo"
-          + "gle.protobuf.FieldMask\022\020\n\010order_by\030\006 \001(\t"
-          + "\"\205\001\n\034ListDataLabelingJobsResponse\022L\n\022dat"
-          + "a_labeling_jobs\030\001 \003(\01320.google.cloud.aip"
-          + "latform.v1beta1.DataLabelingJob\022\027\n\017next_"
-          + "page_token\030\002 \001(\t\"_\n\034DeleteDataLabelingJo"
-          + "bRequest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatfo"
-          + "rm.googleapis.com/DataLabelingJob\"_\n\034Can"
-          + "celDataLabelingJobRequest\022?\n\004name\030\001 \001(\tB"
-          + "1\340A\002\372A+\n)aiplatform.googleapis.com/DataL"
-          + "abelingJob\"\303\001\n$CreateHyperparameterTunin"
-          + "gJobRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!loc"
-          + "ations.googleapis.com/Location\022`\n\031hyperp"
-          + "arameter_tuning_job\030\002 \001(\01328.google.cloud"
-          + ".aiplatform.v1beta1.HyperparameterTuning"
-          + "JobB\003\340A\002\"l\n!GetHyperparameterTuningJobRe"
-          + "quest\022G\n\004name\030\001 \001(\tB9\340A\002\372A3\n1aiplatform."
-          + "googleapis.com/HyperparameterTuningJob\"\306"
-          + "\001\n#ListHyperparameterTuningJobsRequest\0229"
-          + "\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.google"
-          + "apis.com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpag"
-          + "e_size\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread"
-          + "_mask\030\005 \001(\0132\032.google.protobuf.FieldMask\""
-          + "\235\001\n$ListHyperparameterTuningJobsResponse"
-          + "\022\\\n\032hyperparameter_tuning_jobs\030\001 \003(\01328.g"
-          + "oogle.cloud.aiplatform.v1beta1.Hyperpara"
-          + "meterTuningJob\022\027\n\017next_page_token\030\002 \001(\t\""
-          + "o\n$DeleteHyperparameterTuningJobRequest\022"
-          + "G\n\004name\030\001 \001(\tB9\340A\002\372A3\n1aiplatform.google"
-          + "apis.com/HyperparameterTuningJob\"o\n$Canc"
-          + "elHyperparameterTuningJobRequest\022G\n\004name"
-          + "\030\001 \001(\tB9\340A\002\372A3\n1aiplatform.googleapis.co"
-          + "m/HyperparameterTuningJob\"\264\001\n\037CreateBatc"
-          + "hPredictionJobRequest\0229\n\006parent\030\001 \001(\tB)\340"
-          + "A\002\372A#\n!locations.googleapis.com/Location"
-          + "\022V\n\024batch_prediction_job\030\002 \001(\01323.google."
-          + "cloud.aiplatform.v1beta1.BatchPrediction"
-          + "JobB\003\340A\002\"b\n\034GetBatchPredictionJobRequest"
-          + "\022B\n\004name\030\001 \001(\tB4\340A\002\372A.\n,aiplatform.googl"
-          + "eapis.com/BatchPredictionJob\"\301\001\n\036ListBat"
-          + "chPredictionJobsRequest\0229\n\006parent\030\001 \001(\tB"
+          + "hyperparameter_tuning_job.proto\032/google/"
+          + "cloud/aiplatform/v1beta1/operation.proto"
+          + "\032#google/longrunning/operations.proto\032\033g"
+          + "oogle/protobuf/empty.proto\032 google/proto"
+          + "buf/field_mask.proto\032\037google/protobuf/ti"
+          + "mestamp.proto\"\230\001\n\026CreateCustomJobRequest"
+          + "\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.goog"
+          + "leapis.com/Location\022C\n\ncustom_job\030\002 \001(\0132"
+          + "*.google.cloud.aiplatform.v1beta1.Custom"
+          + "JobB\003\340A\002\"P\n\023GetCustomJobRequest\0229\n\004name\030"
+          + "\001 \001(\tB+\340A\002\372A%\n#aiplatform.googleapis.com"
+          + "/CustomJob\"\270\001\n\025ListCustomJobsRequest\0229\n\006"
+          + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap"
+          + "is.com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_"
+          + "size\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread_m"
+          + "ask\030\005 \001(\0132\032.google.protobuf.FieldMask\"r\n"
+          + "\026ListCustomJobsResponse\022?\n\013custom_jobs\030\001"
+          + " \003(\0132*.google.cloud.aiplatform.v1beta1.C"
+          + "ustomJob\022\027\n\017next_page_token\030\002 \001(\t\"S\n\026Del"
+          + "eteCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A\002\372A"
+          + "%\n#aiplatform.googleapis.com/CustomJob\"S"
+          + "\n\026CancelCustomJobRequest\0229\n\004name\030\001 \001(\tB+"
+          + "\340A\002\372A%\n#aiplatform.googleapis.com/Custom"
+          + "Job\"\253\001\n\034CreateDataLabelingJobRequest\0229\n\006"
+          + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap"
+          + "is.com/Location\022P\n\021data_labeling_job\030\002 \001"
+          + "(\01320.google.cloud.aiplatform.v1beta1.Dat"
+          + "aLabelingJobB\003\340A\002\"\\\n\031GetDataLabelingJobR"
+          + "equest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatform"
+          + ".googleapis.com/DataLabelingJob\"\320\001\n\033List"
+          + "DataLabelingJobsRequest\0229\n\006parent\030\001 \001(\tB"
           + ")\340A\002\372A#\n!locations.googleapis.com/Locati"
           + "on\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\022\022\n"
           + "\npage_token\030\004 \001(\t\022-\n\tread_mask\030\005 \001(\0132\032.g"
-          + "oogle.protobuf.FieldMask\"\216\001\n\037ListBatchPr"
-          + "edictionJobsResponse\022R\n\025batch_prediction"
-          + "_jobs\030\001 \003(\01323.google.cloud.aiplatform.v1"
-          + "beta1.BatchPredictionJob\022\027\n\017next_page_to"
-          + "ken\030\002 \001(\t\"e\n\037DeleteBatchPredictionJobReq"
-          + "uest\022B\n\004name\030\001 \001(\tB4\340A\002\372A.\n,aiplatform.g"
-          + "oogleapis.com/BatchPredictionJob\"e\n\037Canc"
-          + "elBatchPredictionJobRequest\022B\n\004name\030\001 \001("
-          + "\tB4\340A\002\372A.\n,aiplatform.googleapis.com/Bat"
-          + "chPredictionJob2\333$\n\nJobService\022\323\001\n\017Creat"
-          + "eCustomJob\0227.google.cloud.aiplatform.v1b"
-          + "eta1.CreateCustomJobRequest\032*.google.clo"
-          + "ud.aiplatform.v1beta1.CustomJob\"[\202\323\344\223\002A\""
-          + "3/v1beta1/{parent=projects/*/locations/*"
-          + "}/customJobs:\ncustom_job\332A\021parent,custom"
-          + "_job\022\264\001\n\014GetCustomJob\0224.google.cloud.aip"
-          + "latform.v1beta1.GetCustomJobRequest\032*.go"
-          + "ogle.cloud.aiplatform.v1beta1.CustomJob\""
-          + "B\202\323\344\223\0025\0223/v1beta1/{name=projects/*/locat"
-          + "ions/*/customJobs/*}\332A\004name\022\307\001\n\016ListCust"
-          + "omJobs\0226.google.cloud.aiplatform.v1beta1"
-          + ".ListCustomJobsRequest\0327.google.cloud.ai"
-          + "platform.v1beta1.ListCustomJobsResponse\""
-          + "D\202\323\344\223\0025\0223/v1beta1/{parent=projects/*/loc"
-          + "ations/*}/customJobs\332A\006parent\022\340\001\n\017Delete"
-          + "CustomJob\0227.google.cloud.aiplatform.v1be"
-          + "ta1.DeleteCustomJobRequest\032\035.google.long"
-          + "running.Operation\"u\202\323\344\223\0025*3/v1beta1/{nam"
-          + "e=projects/*/locations/*/customJobs/*}\332A"
-          + "\004name\312A0\n\025google.protobuf.Empty\022\027DeleteO"
-          + "perationMetadata\022\260\001\n\017CancelCustomJob\0227.g"
-          + "oogle.cloud.aiplatform.v1beta1.CancelCus"
-          + "tomJobRequest\032\026.google.protobuf.Empty\"L\202"
-          + "\323\344\223\002?\":/v1beta1/{name=projects/*/locatio"
-          + "ns/*/customJobs/*}:cancel:\001*\332A\004name\022\371\001\n\025"
-          + "CreateDataLabelingJob\022=.google.cloud.aip"
-          + "latform.v1beta1.CreateDataLabelingJobReq"
-          + "uest\0320.google.cloud.aiplatform.v1beta1.D"
-          + "ataLabelingJob\"o\202\323\344\223\002N\"9/v1beta1/{parent"
-          + "=projects/*/locations/*}/dataLabelingJob"
-          + "s:\021data_labeling_job\332A\030parent,data_label"
-          + "ing_job\022\314\001\n\022GetDataLabelingJob\022:.google."
-          + "cloud.aiplatform.v1beta1.GetDataLabeling"
-          + "JobRequest\0320.google.cloud.aiplatform.v1b"
-          + "eta1.DataLabelingJob\"H\202\323\344\223\002;\0229/v1beta1/{"
-          + "name=projects/*/locations/*/dataLabeling"
-          + "Jobs/*}\332A\004name\022\337\001\n\024ListDataLabelingJobs\022"
-          + "<.google.cloud.aiplatform.v1beta1.ListDa"
-          + "taLabelingJobsRequest\032=.google.cloud.aip"
-          + "latform.v1beta1.ListDataLabelingJobsResp"
-          + "onse\"J\202\323\344\223\002;\0229/v1beta1/{parent=projects/"
-          + "*/locations/*}/dataLabelingJobs\332A\006parent"
-          + "\022\362\001\n\025DeleteDataLabelingJob\022=.google.clou"
-          + "d.aiplatform.v1beta1.DeleteDataLabelingJ"
-          + "obRequest\032\035.google.longrunning.Operation"
-          + "\"{\202\323\344\223\002;*9/v1beta1/{name=projects/*/loca"
-          + "tions/*/dataLabelingJobs/*}\332A\004name\312A0\n\025g"
-          + "oogle.protobuf.Empty\022\027DeleteOperationMet"
-          + "adata\022\302\001\n\025CancelDataLabelingJob\022=.google"
-          + ".cloud.aiplatform.v1beta1.CancelDataLabe"
-          + "lingJobRequest\032\026.google.protobuf.Empty\"R"
-          + "\202\323\344\223\002E\"@/v1beta1/{name=projects/*/locati"
-          + "ons/*/dataLabelingJobs/*}:cancel:\001*\332A\004na"
-          + "me\022\252\002\n\035CreateHyperparameterTuningJob\022E.g"
-          + "oogle.cloud.aiplatform.v1beta1.CreateHyp"
-          + "erparameterTuningJobRequest\0328.google.clo"
+          + "oogle.protobuf.FieldMask\022\020\n\010order_by\030\006 \001"
+          + "(\t\"\205\001\n\034ListDataLabelingJobsResponse\022L\n\022d"
+          + "ata_labeling_jobs\030\001 \003(\01320.google.cloud.a"
+          + "iplatform.v1beta1.DataLabelingJob\022\027\n\017nex"
+          + "t_page_token\030\002 \001(\t\"_\n\034DeleteDataLabeling"
+          + "JobRequest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplat"
+          + "form.googleapis.com/DataLabelingJob\"_\n\034C"
+          + "ancelDataLabelingJobRequest\022?\n\004name\030\001 \001("
+          + "\tB1\340A\002\372A+\n)aiplatform.googleapis.com/Dat"
+          + "aLabelingJob\"\303\001\n$CreateHyperparameterTun"
+          + "ingJobRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!l"
+          + "ocations.googleapis.com/Location\022`\n\031hype"
+          + "rparameter_tuning_job\030\002 \001(\01328.google.clo"
           + "ud.aiplatform.v1beta1.HyperparameterTuni"
-          + "ngJob\"\207\001\202\323\344\223\002^\"A/v1beta1/{parent=project"
-          + "s/*/locations/*}/hyperparameterTuningJob"
-          + "s:\031hyperparameter_tuning_job\332A parent,hy"
-          + "perparameter_tuning_job\022\354\001\n\032GetHyperpara"
-          + "meterTuningJob\022B.google.cloud.aiplatform"
-          + ".v1beta1.GetHyperparameterTuningJobReque"
-          + "st\0328.google.cloud.aiplatform.v1beta1.Hyp"
-          + "erparameterTuningJob\"P\202\323\344\223\002C\022A/v1beta1/{"
-          + "name=projects/*/locations/*/hyperparamet"
-          + "erTuningJobs/*}\332A\004name\022\377\001\n\034ListHyperpara"
-          + "meterTuningJobs\022D.google.cloud.aiplatfor"
-          + "m.v1beta1.ListHyperparameterTuningJobsRe"
-          + "quest\032E.google.cloud.aiplatform.v1beta1."
-          + "ListHyperparameterTuningJobsResponse\"R\202\323"
-          + "\344\223\002C\022A/v1beta1/{parent=projects/*/locati"
-          + "ons/*}/hyperparameterTuningJobs\332A\006parent"
-          + "\022\213\002\n\035DeleteHyperparameterTuningJob\022E.goo"
-          + "gle.cloud.aiplatform.v1beta1.DeleteHyper"
-          + "parameterTuningJobRequest\032\035.google.longr"
-          + "unning.Operation\"\203\001\202\323\344\223\002C*A/v1beta1/{nam"
-          + "e=projects/*/locations/*/hyperparameterT"
-          + "uningJobs/*}\332A\004name\312A0\n\025google.protobuf."
-          + "Empty\022\027DeleteOperationMetadata\022\332\001\n\035Cance"
-          + "lHyperparameterTuningJob\022E.google.cloud."
-          + "aiplatform.v1beta1.CancelHyperparameterT"
-          + "uningJobRequest\032\026.google.protobuf.Empty\""
-          + "Z\202\323\344\223\002M\"H/v1beta1/{name=projects/*/locat"
-          + "ions/*/hyperparameterTuningJobs/*}:cance"
-          + "l:\001*\332A\004name\022\213\002\n\030CreateBatchPredictionJob"
-          + "\022@.google.cloud.aiplatform.v1beta1.Creat"
-          + "eBatchPredictionJobRequest\0323.google.clou"
-          + "d.aiplatform.v1beta1.BatchPredictionJob\""
-          + "x\202\323\344\223\002T\"\022\022*\022\022*
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `display_name`: supports = and !=
+   *   * `metadata_schema_uri`: supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -239,7 +249,17 @@ public java.lang.String getFilter() { * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `display_name`: supports = and !=
+   *   * `metadata_schema_uri`: supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -382,7 +402,7 @@ public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -410,7 +430,7 @@ public java.lang.String getOrderBy() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -967,7 +987,17 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `display_name`: supports = and !=
+     *   * `metadata_schema_uri`: supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -989,7 +1019,17 @@ public java.lang.String getFilter() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `display_name`: supports = and !=
+     *   * `metadata_schema_uri`: supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -1011,7 +1051,17 @@ public com.google.protobuf.ByteString getFilterBytes() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `display_name`: supports = and !=
+     *   * `metadata_schema_uri`: supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -1032,7 +1082,17 @@ public Builder setFilter(java.lang.String value) { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `display_name`: supports = and !=
+     *   * `metadata_schema_uri`: supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -1049,7 +1109,17 @@ public Builder clearFilter() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `display_name`: supports = and !=
+     *   * `metadata_schema_uri`: supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -1414,7 +1484,7 @@ public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -1441,7 +1511,7 @@ public java.lang.String getOrderBy() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -1468,7 +1538,7 @@ public com.google.protobuf.ByteString getOrderByBytes() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -1494,7 +1564,7 @@ public Builder setOrderBy(java.lang.String value) { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -1516,7 +1586,7 @@ public Builder clearOrderBy() { * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java index 53573bf34..67015fddf 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java @@ -58,7 +58,17 @@ public interface ListDatasetsRequestOrBuilder * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `display_name`: supports = and !=
+   *   * `metadata_schema_uri`: supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -70,7 +80,17 @@ public interface ListDatasetsRequestOrBuilder * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `display_name`: supports = and !=
+   *   * `metadata_schema_uri`: supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -160,7 +180,7 @@ public interface ListDatasetsRequestOrBuilder * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* @@ -177,7 +197,7 @@ public interface ListDatasetsRequestOrBuilder * Use "desc" after a field name for descending. * Supported fields: * * `display_name` - * * `data_item_count` * `create_time` + * * `create_time` * * `update_time` *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java index ff2e69216..e39ebcc74 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java @@ -211,17 +211,15 @@ public com.google.protobuf.ByteString getParentBytes() { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -248,17 +246,15 @@ public java.lang.String getFilter() { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -924,17 +920,15 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -960,17 +954,15 @@ public java.lang.String getFilter() { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -996,17 +988,15 @@ public com.google.protobuf.ByteString getFilterBytes() { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -1031,17 +1021,15 @@ public Builder setFilter(java.lang.String value) { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -1062,17 +1050,15 @@ public Builder clearFilter() { * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java index 11a5d8957..526e58a30 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java @@ -61,17 +61,15 @@ public interface ListEndpointsRequestOrBuilder * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* @@ -87,17 +85,15 @@ public interface ListEndpointsRequestOrBuilder * Optional. An expression for filtering the results of the request. For field names * both snake_case and camelCase are supported. * * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - * * `display_name` supports =, != and regex() - * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + * * `display_name` supports = and, != * * `labels` supports general map functions that is: - * `labels.key=value` - key:value equality - * `labels.key:* or labels:key - key existence - * A key including a space must be quoted. `labels."a key"`. + * * `labels.key=value` - key:value equality + * * `labels.key:* or labels:key - key existence + * * A key including a space must be quoted. `labels."a key"`. * Some examples: * * `endpoint=1` * * `displayName="myDisplayName"` - * * `regex(display_name, "^A") -> The display name starts with an A. * * `labels.myKey="myValue"` *
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java index 4378609ba..f91dd6a21 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java @@ -208,7 +208,19 @@ public com.google.protobuf.ByteString getParentBytes() { * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `model` supports = and !=. `model` represents the Model ID,
+   *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+   *   * `display_name` supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `model=1234`
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -231,7 +243,19 @@ public java.lang.String getFilter() { * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `model` supports = and !=. `model` represents the Model ID,
+   *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+   *   * `display_name` supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `model=1234`
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -890,7 +914,19 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `model` supports = and !=. `model` represents the Model ID,
+     *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+     *   * `display_name` supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `model=1234`
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -912,7 +948,19 @@ public java.lang.String getFilter() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `model` supports = and !=. `model` represents the Model ID,
+     *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+     *   * `display_name` supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `model=1234`
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -934,7 +982,19 @@ public com.google.protobuf.ByteString getFilterBytes() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `model` supports = and !=. `model` represents the Model ID,
+     *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+     *   * `display_name` supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `model=1234`
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -955,7 +1015,19 @@ public Builder setFilter(java.lang.String value) { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `model` supports = and !=. `model` represents the Model ID,
+     *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+     *   * `display_name` supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `model=1234`
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; @@ -972,7 +1044,19 @@ public Builder clearFilter() { * * *
-     * The standard list filter.
+     * An expression for filtering the results of the request. For field names
+     * both snake_case and camelCase are supported.
+     *   * `model` supports = and !=. `model` represents the Model ID,
+     *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+     *   * `display_name` supports = and !=
+     *   * `labels` supports general map functions that is:
+     *     * `labels.key=value` - key:value equality
+     *     * `labels.key:* or labels:key - key existence
+     *     * A key including a space must be quoted. `labels."a key"`.
+     * Some examples:
+     *   * `model=1234`
+     *   * `displayName="myDisplayName"`
+     *   * `labels.myKey="myValue"`
      * 
* * string filter = 2; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java index b72b77ea2..8b752cb44 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java @@ -58,7 +58,19 @@ public interface ListModelsRequestOrBuilder * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `model` supports = and !=. `model` represents the Model ID,
+   *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+   *   * `display_name` supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `model=1234`
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; @@ -70,7 +82,19 @@ public interface ListModelsRequestOrBuilder * * *
-   * The standard list filter.
+   * An expression for filtering the results of the request. For field names
+   * both snake_case and camelCase are supported.
+   *   * `model` supports = and !=. `model` represents the Model ID,
+   *     i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+   *   * `display_name` supports = and !=
+   *   * `labels` supports general map functions that is:
+   *     * `labels.key=value` - key:value equality
+   *     * `labels.key:* or labels:key - key existence
+   *     * A key including a space must be quoted. `labels."a key"`.
+   * Some examples:
+   *   * `model=1234`
+   *   * `displayName="myDisplayName"`
+   *   * `labels.myKey="myValue"`
    * 
* * string filter = 2; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java index 44b16c16c..1f71333d7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java @@ -130,25 +130,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Immutable. The type of the machine.
-   * Following machine types are supported:
-   * * `n1-standard-2`
-   * * `n1-standard-4`
-   * * `n1-standard-8`
-   * * `n1-standard-16`
-   * * `n1-standard-32`
-   * * `n1-highmem-2`
-   * * `n1-highmem-4`
-   * * `n1-highmem-8`
-   * * `n1-highmem-16`
-   * * `n1-highmem-32`
-   * * `n1-highcpu-2`
-   * * `n1-highcpu-4`
-   * * `n1-highcpu-8`
-   * * `n1-highcpu-16`
-   * * `n1-highcpu-32`
-   * When used for [DeployedMode][] this field is optional and the default value
-   * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+   * Immutable. The type of the machine. For the machine types supported for prediction,
+   * see https://tinyurl.com/aip-docs/predictions/machine-types.
+   * For machine types supported for creating a custom training job, see
+   * https://tinyurl.com/aip-docs/training/configure-compute.
+   * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+   * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
    * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
    * 
* @@ -172,25 +159,12 @@ public java.lang.String getMachineType() { * * *
-   * Immutable. The type of the machine.
-   * Following machine types are supported:
-   * * `n1-standard-2`
-   * * `n1-standard-4`
-   * * `n1-standard-8`
-   * * `n1-standard-16`
-   * * `n1-standard-32`
-   * * `n1-highmem-2`
-   * * `n1-highmem-4`
-   * * `n1-highmem-8`
-   * * `n1-highmem-16`
-   * * `n1-highmem-32`
-   * * `n1-highcpu-2`
-   * * `n1-highcpu-4`
-   * * `n1-highcpu-8`
-   * * `n1-highcpu-16`
-   * * `n1-highcpu-32`
-   * When used for [DeployedMode][] this field is optional and the default value
-   * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+   * Immutable. The type of the machine. For the machine types supported for prediction,
+   * see https://tinyurl.com/aip-docs/predictions/machine-types.
+   * For machine types supported for creating a custom training job, see
+   * https://tinyurl.com/aip-docs/training/configure-compute.
+   * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+   * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
    * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
    * 
* @@ -629,25 +603,12 @@ public Builder mergeFrom( * * *
-     * Immutable. The type of the machine.
-     * Following machine types are supported:
-     * * `n1-standard-2`
-     * * `n1-standard-4`
-     * * `n1-standard-8`
-     * * `n1-standard-16`
-     * * `n1-standard-32`
-     * * `n1-highmem-2`
-     * * `n1-highmem-4`
-     * * `n1-highmem-8`
-     * * `n1-highmem-16`
-     * * `n1-highmem-32`
-     * * `n1-highcpu-2`
-     * * `n1-highcpu-4`
-     * * `n1-highcpu-8`
-     * * `n1-highcpu-16`
-     * * `n1-highcpu-32`
-     * When used for [DeployedMode][] this field is optional and the default value
-     * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+     * Immutable. The type of the machine. For the machine types supported for prediction,
+     * see https://tinyurl.com/aip-docs/predictions/machine-types.
+     * For machine types supported for creating a custom training job, see
+     * https://tinyurl.com/aip-docs/training/configure-compute.
+     * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+     * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
      * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
      * 
* @@ -670,25 +631,12 @@ public java.lang.String getMachineType() { * * *
-     * Immutable. The type of the machine.
-     * Following machine types are supported:
-     * * `n1-standard-2`
-     * * `n1-standard-4`
-     * * `n1-standard-8`
-     * * `n1-standard-16`
-     * * `n1-standard-32`
-     * * `n1-highmem-2`
-     * * `n1-highmem-4`
-     * * `n1-highmem-8`
-     * * `n1-highmem-16`
-     * * `n1-highmem-32`
-     * * `n1-highcpu-2`
-     * * `n1-highcpu-4`
-     * * `n1-highcpu-8`
-     * * `n1-highcpu-16`
-     * * `n1-highcpu-32`
-     * When used for [DeployedMode][] this field is optional and the default value
-     * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+     * Immutable. The type of the machine. For the machine types supported for prediction,
+     * see https://tinyurl.com/aip-docs/predictions/machine-types.
+     * For machine types supported for creating a custom training job, see
+     * https://tinyurl.com/aip-docs/training/configure-compute.
+     * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+     * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
      * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
      * 
* @@ -711,25 +659,12 @@ public com.google.protobuf.ByteString getMachineTypeBytes() { * * *
-     * Immutable. The type of the machine.
-     * Following machine types are supported:
-     * * `n1-standard-2`
-     * * `n1-standard-4`
-     * * `n1-standard-8`
-     * * `n1-standard-16`
-     * * `n1-standard-32`
-     * * `n1-highmem-2`
-     * * `n1-highmem-4`
-     * * `n1-highmem-8`
-     * * `n1-highmem-16`
-     * * `n1-highmem-32`
-     * * `n1-highcpu-2`
-     * * `n1-highcpu-4`
-     * * `n1-highcpu-8`
-     * * `n1-highcpu-16`
-     * * `n1-highcpu-32`
-     * When used for [DeployedMode][] this field is optional and the default value
-     * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+     * Immutable. The type of the machine. For the machine types supported for prediction,
+     * see https://tinyurl.com/aip-docs/predictions/machine-types.
+     * For machine types supported for creating a custom training job, see
+     * https://tinyurl.com/aip-docs/training/configure-compute.
+     * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+     * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
      * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
      * 
* @@ -751,25 +686,12 @@ public Builder setMachineType(java.lang.String value) { * * *
-     * Immutable. The type of the machine.
-     * Following machine types are supported:
-     * * `n1-standard-2`
-     * * `n1-standard-4`
-     * * `n1-standard-8`
-     * * `n1-standard-16`
-     * * `n1-standard-32`
-     * * `n1-highmem-2`
-     * * `n1-highmem-4`
-     * * `n1-highmem-8`
-     * * `n1-highmem-16`
-     * * `n1-highmem-32`
-     * * `n1-highcpu-2`
-     * * `n1-highcpu-4`
-     * * `n1-highcpu-8`
-     * * `n1-highcpu-16`
-     * * `n1-highcpu-32`
-     * When used for [DeployedMode][] this field is optional and the default value
-     * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+     * Immutable. The type of the machine. For the machine types supported for prediction,
+     * see https://tinyurl.com/aip-docs/predictions/machine-types.
+     * For machine types supported for creating a custom training job, see
+     * https://tinyurl.com/aip-docs/training/configure-compute.
+     * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+     * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
      * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
      * 
* @@ -787,25 +709,12 @@ public Builder clearMachineType() { * * *
-     * Immutable. The type of the machine.
-     * Following machine types are supported:
-     * * `n1-standard-2`
-     * * `n1-standard-4`
-     * * `n1-standard-8`
-     * * `n1-standard-16`
-     * * `n1-standard-32`
-     * * `n1-highmem-2`
-     * * `n1-highmem-4`
-     * * `n1-highmem-8`
-     * * `n1-highmem-16`
-     * * `n1-highmem-32`
-     * * `n1-highcpu-2`
-     * * `n1-highcpu-4`
-     * * `n1-highcpu-8`
-     * * `n1-highcpu-16`
-     * * `n1-highcpu-32`
-     * When used for [DeployedMode][] this field is optional and the default value
-     * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+     * Immutable. The type of the machine. For the machine types supported for prediction,
+     * see https://tinyurl.com/aip-docs/predictions/machine-types.
+     * For machine types supported for creating a custom training job, see
+     * https://tinyurl.com/aip-docs/training/configure-compute.
+     * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+     * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
      * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
      * 
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java index 854e04550..43a7bc1d7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java @@ -27,25 +27,12 @@ public interface MachineSpecOrBuilder * * *
-   * Immutable. The type of the machine.
-   * Following machine types are supported:
-   * * `n1-standard-2`
-   * * `n1-standard-4`
-   * * `n1-standard-8`
-   * * `n1-standard-16`
-   * * `n1-standard-32`
-   * * `n1-highmem-2`
-   * * `n1-highmem-4`
-   * * `n1-highmem-8`
-   * * `n1-highmem-16`
-   * * `n1-highmem-32`
-   * * `n1-highcpu-2`
-   * * `n1-highcpu-4`
-   * * `n1-highcpu-8`
-   * * `n1-highcpu-16`
-   * * `n1-highcpu-32`
-   * When used for [DeployedMode][] this field is optional and the default value
-   * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+   * Immutable. The type of the machine. For the machine types supported for prediction,
+   * see https://tinyurl.com/aip-docs/predictions/machine-types.
+   * For machine types supported for creating a custom training job, see
+   * https://tinyurl.com/aip-docs/training/configure-compute.
+   * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+   * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
    * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
    * 
* @@ -58,25 +45,12 @@ public interface MachineSpecOrBuilder * * *
-   * Immutable. The type of the machine.
-   * Following machine types are supported:
-   * * `n1-standard-2`
-   * * `n1-standard-4`
-   * * `n1-standard-8`
-   * * `n1-standard-16`
-   * * `n1-standard-32`
-   * * `n1-highmem-2`
-   * * `n1-highmem-4`
-   * * `n1-highmem-8`
-   * * `n1-highmem-16`
-   * * `n1-highmem-32`
-   * * `n1-highcpu-2`
-   * * `n1-highcpu-4`
-   * * `n1-highcpu-8`
-   * * `n1-highcpu-16`
-   * * `n1-highcpu-32`
-   * When used for [DeployedMode][] this field is optional and the default value
-   * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+   * Immutable. The type of the machine. For the machine types supported for prediction,
+   * see https://tinyurl.com/aip-docs/predictions/machine-types.
+   * For machine types supported for creating a custom training job, see
+   * https://tinyurl.com/aip-docs/training/configure-compute.
+   * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+   * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
    * [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
    * 
* diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java index 787ef260e..81177ae42 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java @@ -75,6 +75,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -95,86 +99,97 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "rm/v1beta1/migratable_resource.proto\032/go" + "ogle/cloud/aiplatform/v1beta1/operation." + "proto\032#google/longrunning/operations.pro" - + "to\"\204\001\n SearchMigratableResourcesRequest\022" - + "9\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googl" - + "eapis.com/Location\022\021\n\tpage_size\030\002 \001(\005\022\022\n" - + "\npage_token\030\003 \001(\t\"\217\001\n!SearchMigratableRe" - + "sourcesResponse\022Q\n\024migratable_resources\030" - + "\001 \003(\01323.google.cloud.aiplatform.v1beta1." - + "MigratableResource\022\027\n\017next_page_token\030\002 " - + "\001(\t\"\272\001\n\034BatchMigrateResourcesRequest\0229\n\006" - + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap" - + "is.com/Location\022_\n\031migrate_resource_requ" - + "ests\030\002 \003(\01327.google.cloud.aiplatform.v1b" - + "eta1.MigrateResourceRequestB\003\340A\002\"\374\n\n\026Mig" - + "rateResourceRequest\022\213\001\n&migrate_ml_engin" - + "e_model_version_config\030\001 \001(\0132Y.google.cl" - + "oud.aiplatform.v1beta1.MigrateResourceRe" - + "quest.MigrateMlEngineModelVersionConfigH" - + "\000\022w\n\033migrate_automl_model_config\030\002 \001(\0132P" + + "to\032\027google/rpc/status.proto\"\224\001\n SearchMi" + + "gratableResourcesRequest\0229\n\006parent\030\001 \001(\t" + + "B)\340A\002\372A#\n!locations.googleapis.com/Locat" + + "ion\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001" + + "(\t\022\016\n\006filter\030\004 \001(\t\"\217\001\n!SearchMigratableR" + + "esourcesResponse\022Q\n\024migratable_resources" + + "\030\001 \003(\01323.google.cloud.aiplatform.v1beta1" + + ".MigratableResource\022\027\n\017next_page_token\030\002" + + " \001(\t\"\272\001\n\034BatchMigrateResourcesRequest\0229\n" + + "\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googlea" + + "pis.com/Location\022_\n\031migrate_resource_req" + + "uests\030\002 \003(\01327.google.cloud.aiplatform.v1" + + "beta1.MigrateResourceRequestB\003\340A\002\"\374\n\n\026Mi" + + "grateResourceRequest\022\213\001\n&migrate_ml_engi" + + "ne_model_version_config\030\001 \001(\0132Y.google.c" + + "loud.aiplatform.v1beta1.MigrateResourceR" + + "equest.MigrateMlEngineModelVersionConfig" + + "H\000\022w\n\033migrate_automl_model_config\030\002 \001(\0132" + + "P.google.cloud.aiplatform.v1beta1.Migrat" + + "eResourceRequest.MigrateAutomlModelConfi" + + "gH\000\022{\n\035migrate_automl_dataset_config\030\003 \001" + + "(\0132R.google.cloud.aiplatform.v1beta1.Mig" + + "rateResourceRequest.MigrateAutomlDataset" + + "ConfigH\000\022\210\001\n$migrate_data_labeling_datas" + + "et_config\030\004 \001(\0132X.google.cloud.aiplatfor" + + "m.v1beta1.MigrateResourceRequest.Migrate" + + "DataLabelingDatasetConfigH\000\032\225\001\n!MigrateM" + + "lEngineModelVersionConfig\022\025\n\010endpoint\030\001 " + + "\001(\tB\003\340A\002\0228\n\rmodel_version\030\002 \001(\tB!\340A\002\372A\033\n" + + "\031ml.googleapis.com/Version\022\037\n\022model_disp" + + "lay_name\030\003 \001(\tB\003\340A\002\032o\n\030MigrateAutomlMode" + + "lConfig\0222\n\005model\030\001 \001(\tB#\340A\002\372A\035\n\033automl.g" + + "oogleapis.com/Model\022\037\n\022model_display_nam" + + "e\030\002 \001(\tB\003\340A\001\032w\n\032MigrateAutomlDatasetConf" + + "ig\0226\n\007dataset\030\001 \001(\tB%\340A\002\372A\037\n\035automl.goog" + + "leapis.com/Dataset\022!\n\024dataset_display_na" + + "me\030\002 \001(\tB\003\340A\002\032\305\003\n MigrateDataLabelingDat" + + "asetConfig\022<\n\007dataset\030\001 \001(\tB+\340A\002\372A%\n#dat" + + "alabeling.googleapis.com/Dataset\022!\n\024data" + + "set_display_name\030\002 \001(\tB\003\340A\001\022\301\001\n/migrate_" + + "data_labeling_annotated_dataset_configs\030" + + "\003 \003(\0132\202\001.google.cloud.aiplatform.v1beta1" + + ".MigrateResourceRequest.MigrateDataLabel" + + "ingDatasetConfig.MigrateDataLabelingAnno" + + "tatedDatasetConfigB\003\340A\001\032|\n)MigrateDataLa" + + "belingAnnotatedDatasetConfig\022O\n\021annotate" + + "d_dataset\030\001 \001(\tB4\340A\002\372A.\n,datalabeling.go" + + "ogleapis.com/AnnotatedDatasetB\t\n\007request" + + "\"}\n\035BatchMigrateResourcesResponse\022\\\n\032mig" + + "rate_resource_responses\030\001 \003(\01328.google.c" + + "loud.aiplatform.v1beta1.MigrateResourceR" + + "esponse\"\362\001\n\027MigrateResourceResponse\0229\n\007d" + + "ataset\030\001 \001(\tB&\372A#\n!aiplatform.googleapis" + + ".com/DatasetH\000\0225\n\005model\030\002 \001(\tB$\372A!\n\037aipl" + + "atform.googleapis.com/ModelH\000\022P\n\023migrata" + + "ble_resource\030\003 \001(\01323.google.cloud.aiplat" + + "form.v1beta1.MigratableResourceB\023\n\021migra" + + "ted_resource\"\352\003\n&BatchMigrateResourcesOp" + + "erationMetadata\022S\n\020generic_metadata\030\001 \001(" + + "\01329.google.cloud.aiplatform.v1beta1.Gene" + + "ricOperationMetadata\022n\n\017partial_results\030" + + "\002 \003(\0132U.google.cloud.aiplatform.v1beta1." + + "BatchMigrateResourcesOperationMetadata.P" + + "artialResult\032\372\001\n\rPartialResult\022#\n\005error\030" + + "\002 \001(\0132\022.google.rpc.StatusH\000\0225\n\005model\030\003 \001" + + "(\tB$\372A!\n\037aiplatform.googleapis.com/Model" + + "H\000\0229\n\007dataset\030\004 \001(\tB&\372A#\n!aiplatform.goo" + + "gleapis.com/DatasetH\000\022H\n\007request\030\001 \001(\01327" + ".google.cloud.aiplatform.v1beta1.Migrate" - + "ResourceRequest.MigrateAutomlModelConfig" - + "H\000\022{\n\035migrate_automl_dataset_config\030\003 \001(" - + "\0132R.google.cloud.aiplatform.v1beta1.Migr" - + "ateResourceRequest.MigrateAutomlDatasetC" - + "onfigH\000\022\210\001\n$migrate_data_labeling_datase" - + "t_config\030\004 \001(\0132X.google.cloud.aiplatform" - + ".v1beta1.MigrateResourceRequest.MigrateD" - + "ataLabelingDatasetConfigH\000\032\225\001\n!MigrateMl" - + "EngineModelVersionConfig\022\025\n\010endpoint\030\001 \001" - + "(\tB\003\340A\002\0228\n\rmodel_version\030\002 \001(\tB!\340A\002\372A\033\n\031" - + "ml.googleapis.com/Version\022\037\n\022model_displ" - + "ay_name\030\003 \001(\tB\003\340A\002\032o\n\030MigrateAutomlModel" - + "Config\0222\n\005model\030\001 \001(\tB#\340A\002\372A\035\n\033automl.go" - + "ogleapis.com/Model\022\037\n\022model_display_name" - + "\030\002 \001(\tB\003\340A\001\032w\n\032MigrateAutomlDatasetConfi" - + "g\0226\n\007dataset\030\001 \001(\tB%\340A\002\372A\037\n\035automl.googl" - + "eapis.com/Dataset\022!\n\024dataset_display_nam" - + "e\030\002 \001(\tB\003\340A\002\032\305\003\n MigrateDataLabelingData" - + "setConfig\022<\n\007dataset\030\001 \001(\tB+\340A\002\372A%\n#data" - + "labeling.googleapis.com/Dataset\022!\n\024datas" - + "et_display_name\030\002 \001(\tB\003\340A\001\022\301\001\n/migrate_d" - + "ata_labeling_annotated_dataset_configs\030\003" - + " \003(\0132\202\001.google.cloud.aiplatform.v1beta1." - + "MigrateResourceRequest.MigrateDataLabeli" - + "ngDatasetConfig.MigrateDataLabelingAnnot" - + "atedDatasetConfigB\003\340A\001\032|\n)MigrateDataLab" - + "elingAnnotatedDatasetConfig\022O\n\021annotated" - + "_dataset\030\001 \001(\tB4\340A\002\372A.\n,datalabeling.goo" - + "gleapis.com/AnnotatedDatasetB\t\n\007request\"" - + "}\n\035BatchMigrateResourcesResponse\022\\\n\032migr" - + "ate_resource_responses\030\001 \003(\01328.google.cl" - + "oud.aiplatform.v1beta1.MigrateResourceRe" - + "sponse\"\362\001\n\027MigrateResourceResponse\0229\n\007da" - + "taset\030\001 \001(\tB&\372A#\n!aiplatform.googleapis." - + "com/DatasetH\000\0225\n\005model\030\002 \001(\tB$\372A!\n\037aipla" - + "tform.googleapis.com/ModelH\000\022P\n\023migratab" - + "le_resource\030\003 \001(\01323.google.cloud.aiplatf" - + "orm.v1beta1.MigratableResourceB\023\n\021migrat" - + "ed_resource\"}\n&BatchMigrateResourcesOper" - + "ationMetadata\022S\n\020generic_metadata\030\001 \001(\0132" - + "9.google.cloud.aiplatform.v1beta1.Generi" - + "cOperationMetadata2\233\005\n\020MigrationService\022" - + "\373\001\n\031SearchMigratableResources\022A.google.c" - + "loud.aiplatform.v1beta1.SearchMigratable" - + "ResourcesRequest\032B.google.cloud.aiplatfo" - + "rm.v1beta1.SearchMigratableResourcesResp" - + "onse\"W\202\323\344\223\002H\"C/v1beta1/{parent=projects/" - + "*/locations/*}/migratableResources:searc" - + "h:\001*\332A\006parent\022\271\002\n\025BatchMigrateResources\022" - + "=.google.cloud.aiplatform.v1beta1.BatchM" - + "igrateResourcesRequest\032\035.google.longrunn" - + "ing.Operation\"\301\001\202\323\344\223\002N\"I/v1beta1/{parent" - + "=projects/*/locations/*}/migratableResou" - + "rces:batchMigrate:\001*\332A parent,migrate_re" - + "source_requests\312AG\n\035BatchMigrateResource" - + "sResponse\022&BatchMigrateResourcesOperatio" - + "nMetadata\032M\312A\031aiplatform.googleapis.com\322" - + "A.https://www.googleapis.com/auth/cloud-" - + "platformB\211\001\n#com.google.cloud.aiplatform" - + ".v1beta1B\025MigrationServiceProtoP\001ZIgoogl" - + "e.golang.org/genproto/googleapis/cloud/a" - + "iplatform/v1beta1;aiplatformb\006proto3" + + "ResourceRequestB\010\n\006result2\233\005\n\020MigrationS" + + "ervice\022\373\001\n\031SearchMigratableResources\022A.g" + + "oogle.cloud.aiplatform.v1beta1.SearchMig" + + "ratableResourcesRequest\032B.google.cloud.a" + + "iplatform.v1beta1.SearchMigratableResour" + + "cesResponse\"W\202\323\344\223\002H\"C/v1beta1/{parent=pr" + + "ojects/*/locations/*}/migratableResource" + + "s:search:\001*\332A\006parent\022\271\002\n\025BatchMigrateRes" + + "ources\022=.google.cloud.aiplatform.v1beta1" + + ".BatchMigrateResourcesRequest\032\035.google.l" + + "ongrunning.Operation\"\301\001\202\323\344\223\002N\"I/v1beta1/" + + "{parent=projects/*/locations/*}/migratab" + + "leResources:batchMigrate:\001*\332A parent,mig" + + "rate_resource_requests\312AG\n\035BatchMigrateR" + + "esourcesResponse\022&BatchMigrateResourcesO" + + "perationMetadata\032M\312A\031aiplatform.googleap" + + "is.com\322A.https://www.googleapis.com/auth" + + "/cloud-platformB\211\001\n#com.google.cloud.aip" + + "latform.v1beta1B\025MigrationServiceProtoP\001" + + "ZIgoogle.golang.org/genproto/googleapis/" + + "cloud/aiplatform/v1beta1;aiplatformb\006pro" + + "to3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -189,6 +204,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.cloud.aiplatform.v1beta1.MigratableResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), + com.google.rpc.StatusProto.getDescriptor(), }); internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesRequest_descriptor = getDescriptor().getMessageTypes().get(0); @@ -196,7 +212,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesRequest_descriptor, new java.lang.String[] { - "Parent", "PageSize", "PageToken", + "Parent", "PageSize", "PageToken", "Filter", }); internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesResponse_descriptor = getDescriptor().getMessageTypes().get(1); @@ -298,7 +314,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor, new java.lang.String[] { - "GenericMetadata", + "GenericMetadata", "PartialResults", + }); + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor = + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor + .getNestedTypes() + .get(0); + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor, + new java.lang.String[] { + "Error", "Model", "Dataset", "Request", "Result", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); @@ -320,6 +346,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.cloud.aiplatform.v1beta1.MigratableResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); + com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java index 14d76fee0..e7fcfc7af 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java @@ -302,6 +302,23 @@ private Model( explanationSpec_ = subBuilder.buildPartial(); } + break; + } + case 194: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } case 210: @@ -2790,6 +2807,10 @@ public int getSupportedDeploymentResourcesTypesValue(int index) { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -2831,6 +2852,10 @@ public com.google.protobuf.ProtocolStringList getSupportedInputStorageFormatsLis * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -2872,6 +2897,10 @@ public int getSupportedInputStorageFormatsCount() { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -2914,6 +2943,10 @@ public java.lang.String getSupportedInputStorageFormats(int index) { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -3287,19 +3320,20 @@ public com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployed * * *
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return Whether the explanationSpec field is set. */ @@ -3311,19 +3345,20 @@ public boolean hasExplanationSpec() { * * *
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return The explanationSpec. */ @@ -3337,19 +3372,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() * * *
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder @@ -3522,6 +3558,57 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) { return map.get(key); } + public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24; + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -3596,6 +3683,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (explanationSpec_ != null) { output.writeMessage(23, getExplanationSpec()); } + if (encryptionSpec_ != null) { + output.writeMessage(24, getEncryptionSpec()); + } if (!getArtifactUriBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 26, artifactUri_); } @@ -3692,6 +3782,9 @@ public int getSerializedSize() { if (explanationSpec_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(23, getExplanationSpec()); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec()); + } if (!getArtifactUriBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(26, artifactUri_); } @@ -3752,6 +3845,10 @@ public boolean equals(final java.lang.Object obj) { } if (!getEtag().equals(other.getEtag())) return false; if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -3825,6 +3922,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -4062,6 +4163,12 @@ public Builder clear() { etag_ = ""; internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -4163,6 +4270,11 @@ public com.google.cloud.aiplatform.v1beta1.Model buildPartial() { result.etag_ = etag_; result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -4343,6 +4455,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Model other) { onChanged(); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -6564,6 +6679,10 @@ private void ensureSupportedInputStorageFormatsIsMutable() { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6605,6 +6724,10 @@ public com.google.protobuf.ProtocolStringList getSupportedInputStorageFormatsLis * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6646,6 +6769,10 @@ public int getSupportedInputStorageFormatsCount() { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6688,6 +6815,10 @@ public java.lang.String getSupportedInputStorageFormats(int index) { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6730,6 +6861,10 @@ public com.google.protobuf.ByteString getSupportedInputStorageFormatsBytes(int i * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6779,6 +6914,10 @@ public Builder setSupportedInputStorageFormats(int index, java.lang.String value * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6827,6 +6966,10 @@ public Builder addSupportedInputStorageFormats(java.lang.String value) { * * `bigquery` * Each instance is a single row in BigQuery. Uses * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. + * * `file-list` + * Each line of the file is the location of an instance to process, uses + * `gcs_source` field of the + * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. * If this Model doesn't support any of these formats it means it cannot be * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online @@ -6872,6 +7015,10 @@ public Builder addAllSupportedInputStorageFormats(java.lang.Iterable - * Output only. The default explanation specification for this Model. - * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] - * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated. + * The default explanation specification for this Model. + * The Model can be used for [requesting + * explanation][PredictionService.Explain] after being + * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated. + * The Model can be used for [batch + * explanation][BatchPredictionJob.generate_explanation] iff it is populated. * All fields of the explanation_spec can be overridden by * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of - * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model]. - * This field is populated only for tabular AutoML Models. - * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported. + * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or + * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of + * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. *
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return Whether the explanationSpec field is set. */ @@ -8197,19 +8349,20 @@ public boolean hasExplanationSpec() { * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return The explanationSpec. */ @@ -8226,19 +8379,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { if (explanationSpecBuilder_ == null) { @@ -8257,19 +8411,20 @@ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanatio * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public Builder setExplanationSpec( com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue) { @@ -8286,19 +8441,20 @@ public Builder setExplanationSpec( * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { if (explanationSpecBuilder_ == null) { @@ -8321,19 +8477,20 @@ public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanat * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public Builder clearExplanationSpec() { if (explanationSpecBuilder_ == null) { @@ -8350,19 +8507,20 @@ public Builder clearExplanationSpec() { * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanationSpecBuilder() { @@ -8373,19 +8531,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder() { @@ -8401,19 +8560,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio * * *
-     * Output only. The default explanation specification for this Model.
-     * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-     * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The default explanation specification for this Model.
+     * The Model can be used for [requesting
+     * explanation][PredictionService.Explain] after being
+     * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+     * The Model can be used for [batch
+     * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
      * All fields of the explanation_spec can be overridden by
      * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-     * This field is populated only for tabular AutoML Models.
-     * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+     * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+     * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+     * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
      * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ExplanationSpec, @@ -8728,6 +8888,202 @@ public Builder putAllLabels(java.util.Map va return this; } + private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + encryptionSpecBuilder_; + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a Model. If set, this
+     * Model and all sub-resources of this Model will be secured by this key.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java index 3db576bb5..0f8cd2ff7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java @@ -981,9 +981,8 @@ public com.google.cloud.aiplatform.v1beta1.PortOrBuilder getPortsOrBuilder(int i * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -1028,9 +1027,8 @@ public java.lang.String getPredictRoute() { * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -1077,9 +1075,8 @@ public com.google.protobuf.ByteString getPredictRouteBytes() { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -1123,9 +1120,8 @@ public java.lang.String getHealthRoute() { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4183,9 +4179,8 @@ public java.util.List getPorts * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4229,9 +4224,8 @@ public java.lang.String getPredictRoute() { * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4275,9 +4269,8 @@ public com.google.protobuf.ByteString getPredictRouteBytes() { * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4320,9 +4313,8 @@ public Builder setPredictRoute(java.lang.String value) { * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4361,9 +4353,8 @@ public Builder clearPredictRoute() { * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4408,9 +4399,8 @@ public Builder setPredictRouteBytes(com.google.protobuf.ByteString value) { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4453,9 +4443,8 @@ public java.lang.String getHealthRoute() { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4498,9 +4487,8 @@ public com.google.protobuf.ByteString getHealthRouteBytes() { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4542,9 +4530,8 @@ public Builder setHealthRoute(java.lang.String value) { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -4582,9 +4569,8 @@ public Builder clearHealthRoute() { * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpecOrBuilder.java index d6d497de8..4a8124685 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpecOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpecOrBuilder.java @@ -736,9 +736,8 @@ public interface ModelContainerSpecOrBuilder * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -772,9 +771,8 @@ public interface ModelContainerSpecOrBuilder * container's response in the API response. * For example, if you set this field to `/foo`, then when AI Platform * receives a prediction request, it forwards the request body in a POST - * request to the following URL on the container: - * <code>localhost:<var>PORT</var>/foo</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * request to the `/foo` path on the port of your container specified by the + * first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -808,9 +806,8 @@ public interface ModelContainerSpecOrBuilder * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: @@ -843,9 +840,8 @@ public interface ModelContainerSpecOrBuilder * [health * checks](https://tinyurl.com/cust-cont-reqs#checks). * For example, if you set this field to `/bar`, then AI Platform - * intermittently sends a GET request to the following URL on the container: - * <code>localhost:<var>PORT</var>/bar</code> - * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s + * intermittently sends a GET request to the `/bar` path on the port of your + * container specified by the first value of this `ModelContainerSpec`'s * [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. * If you don't specify this field, it defaults to the following value when * you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluation.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluation.java index 66f27a69e..a18f74929 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluation.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluation.java @@ -42,6 +42,7 @@ private ModelEvaluation() { name_ = ""; metricsSchemaUri_ = ""; sliceDimensions_ = com.google.protobuf.LazyStringArrayList.EMPTY; + explanationSpecs_ = java.util.Collections.emptyList(); } @java.lang.Override @@ -144,6 +145,22 @@ private ModelEvaluation( break; } + case 74: + { + if (!((mutable_bitField0_ & 0x00000002) != 0)) { + explanationSpecs_ = + new java.util.ArrayList< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpec>(); + mutable_bitField0_ |= 0x00000002; + } + explanationSpecs_.add( + input.readMessage( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpec.parser(), + extensionRegistry)); + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -161,6 +178,9 @@ private ModelEvaluation( if (((mutable_bitField0_ & 0x00000001) != 0)) { sliceDimensions_ = sliceDimensions_.getUnmodifiableView(); } + if (((mutable_bitField0_ & 0x00000002) != 0)) { + explanationSpecs_ = java.util.Collections.unmodifiableList(explanationSpecs_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -181,6 +201,1031 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { com.google.cloud.aiplatform.v1beta1.ModelEvaluation.Builder.class); } + public interface ModelEvaluationExplanationSpecOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * Explanation type.
+     * For AutoML Image Classification models, possible values are:
+     *   * `image-integrated-gradients`
+     *   * `image-xrai`
+     * 
+ * + * string explanation_type = 1; + * + * @return The explanationType. + */ + java.lang.String getExplanationType(); + /** + * + * + *
+     * Explanation type.
+     * For AutoML Image Classification models, possible values are:
+     *   * `image-integrated-gradients`
+     *   * `image-xrai`
+     * 
+ * + * string explanation_type = 1; + * + * @return The bytes for explanationType. + */ + com.google.protobuf.ByteString getExplanationTypeBytes(); + + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return Whether the explanationSpec field is set. + */ + boolean hasExplanationSpec(); + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return The explanationSpec. + */ + com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec(); + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder(); + } + /** + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec} + */ + public static final class ModelEvaluationExplanationSpec + extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + ModelEvaluationExplanationSpecOrBuilder { + private static final long serialVersionUID = 0L; + // Use ModelEvaluationExplanationSpec.newBuilder() to construct. + private ModelEvaluationExplanationSpec( + com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ModelEvaluationExplanationSpec() { + explanationType_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ModelEvaluationExplanationSpec(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ModelEvaluationExplanationSpec( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + explanationType_ = s; + break; + } + case 18: + { + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder subBuilder = null; + if (explanationSpec_ != null) { + subBuilder = explanationSpec_.toBuilder(); + } + explanationSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(explanationSpec_); + explanationSpec_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto + .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto + .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .class, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder.class); + } + + public static final int EXPLANATION_TYPE_FIELD_NUMBER = 1; + private volatile java.lang.Object explanationType_; + /** + * + * + *
+     * Explanation type.
+     * For AutoML Image Classification models, possible values are:
+     *   * `image-integrated-gradients`
+     *   * `image-xrai`
+     * 
+ * + * string explanation_type = 1; + * + * @return The explanationType. + */ + @java.lang.Override + public java.lang.String getExplanationType() { + java.lang.Object ref = explanationType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + explanationType_ = s; + return s; + } + } + /** + * + * + *
+     * Explanation type.
+     * For AutoML Image Classification models, possible values are:
+     *   * `image-integrated-gradients`
+     *   * `image-xrai`
+     * 
+ * + * string explanation_type = 1; + * + * @return The bytes for explanationType. + */ + @java.lang.Override + public com.google.protobuf.ByteString getExplanationTypeBytes() { + java.lang.Object ref = explanationType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + explanationType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int EXPLANATION_SPEC_FIELD_NUMBER = 2; + private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_; + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return Whether the explanationSpec field is set. + */ + @java.lang.Override + public boolean hasExplanationSpec() { + return explanationSpec_ != null; + } + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return The explanationSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() { + return explanationSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() + : explanationSpec_; + } + /** + * + * + *
+     * Explanation spec details.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder + getExplanationSpecOrBuilder() { + return getExplanationSpec(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getExplanationTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, explanationType_); + } + if (explanationSpec_ != null) { + output.writeMessage(2, getExplanationSpec()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getExplanationTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, explanationType_); + } + if (explanationSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExplanationSpec()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec)) { + return super.equals(obj); + } + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec other = + (com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) obj; + + if (!getExplanationType().equals(other.getExplanationType())) return false; + if (hasExplanationSpec() != other.hasExplanationSpec()) return false; + if (hasExplanationSpec()) { + if (!getExplanationSpec().equals(other.getExplanationSpec())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + EXPLANATION_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getExplanationType().hashCode(); + if (hasExplanationSpec()) { + hash = (37 * hash) + EXPLANATION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getExplanationSpec().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code + * google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto + .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto + .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .class, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder.class); + } + + // Construct using + // com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + explanationType_ = ""; + + if (explanationSpecBuilder_ == null) { + explanationSpec_ = null; + } else { + explanationSpec_ = null; + explanationSpecBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto + .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getDefaultInstanceForType() { + return com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + build() { + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + buildPartial() { + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec result = + new com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec( + this); + result.explanationType_ = explanationType_; + if (explanationSpecBuilder_ == null) { + result.explanationSpec_ = explanationSpec_; + } else { + result.explanationSpec_ = explanationSpecBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) { + return mergeFrom( + (com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + other) { + if (other + == com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .getDefaultInstance()) return this; + if (!other.getExplanationType().isEmpty()) { + explanationType_ = other.explanationType_; + onChanged(); + } + if (other.hasExplanationSpec()) { + mergeExplanationSpec(other.getExplanationSpec()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object explanationType_ = ""; + /** + * + * + *
+       * Explanation type.
+       * For AutoML Image Classification models, possible values are:
+       *   * `image-integrated-gradients`
+       *   * `image-xrai`
+       * 
+ * + * string explanation_type = 1; + * + * @return The explanationType. + */ + public java.lang.String getExplanationType() { + java.lang.Object ref = explanationType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + explanationType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * Explanation type.
+       * For AutoML Image Classification models, possible values are:
+       *   * `image-integrated-gradients`
+       *   * `image-xrai`
+       * 
+ * + * string explanation_type = 1; + * + * @return The bytes for explanationType. + */ + public com.google.protobuf.ByteString getExplanationTypeBytes() { + java.lang.Object ref = explanationType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + explanationType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * Explanation type.
+       * For AutoML Image Classification models, possible values are:
+       *   * `image-integrated-gradients`
+       *   * `image-xrai`
+       * 
+ * + * string explanation_type = 1; + * + * @param value The explanationType to set. + * @return This builder for chaining. + */ + public Builder setExplanationType(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + explanationType_ = value; + onChanged(); + return this; + } + /** + * + * + *
+       * Explanation type.
+       * For AutoML Image Classification models, possible values are:
+       *   * `image-integrated-gradients`
+       *   * `image-xrai`
+       * 
+ * + * string explanation_type = 1; + * + * @return This builder for chaining. + */ + public Builder clearExplanationType() { + + explanationType_ = getDefaultInstance().getExplanationType(); + onChanged(); + return this; + } + /** + * + * + *
+       * Explanation type.
+       * For AutoML Image Classification models, possible values are:
+       *   * `image-integrated-gradients`
+       *   * `image-xrai`
+       * 
+ * + * string explanation_type = 1; + * + * @param value The bytes for explanationType to set. + * @return This builder for chaining. + */ + public Builder setExplanationTypeBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + explanationType_ = value; + onChanged(); + return this; + } + + private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder> + explanationSpecBuilder_; + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return Whether the explanationSpec field is set. + */ + public boolean hasExplanationSpec() { + return explanationSpecBuilder_ != null || explanationSpec_ != null; + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + * + * @return The explanationSpec. + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() { + if (explanationSpecBuilder_ == null) { + return explanationSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() + : explanationSpec_; + } else { + return explanationSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { + if (explanationSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + explanationSpec_ = value; + onChanged(); + } else { + explanationSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public Builder setExplanationSpec( + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue) { + if (explanationSpecBuilder_ == null) { + explanationSpec_ = builderForValue.build(); + onChanged(); + } else { + explanationSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public Builder mergeExplanationSpec( + com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { + if (explanationSpecBuilder_ == null) { + if (explanationSpec_ != null) { + explanationSpec_ = + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.newBuilder(explanationSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + explanationSpec_ = value; + } + onChanged(); + } else { + explanationSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public Builder clearExplanationSpec() { + if (explanationSpecBuilder_ == null) { + explanationSpec_ = null; + onChanged(); + } else { + explanationSpec_ = null; + explanationSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder + getExplanationSpecBuilder() { + + onChanged(); + return getExplanationSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder + getExplanationSpecOrBuilder() { + if (explanationSpecBuilder_ != null) { + return explanationSpecBuilder_.getMessageOrBuilder(); + } else { + return explanationSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() + : explanationSpec_; + } + } + /** + * + * + *
+       * Explanation spec details.
+       * 
+ * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder> + getExplanationSpecFieldBuilder() { + if (explanationSpecBuilder_ == null) { + explanationSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, + com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder>( + getExplanationSpec(), getParentForChildren(), isClean()); + explanationSpec_ = null; + } + return explanationSpecBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + } + + // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) + private static final com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpec + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec(); + } + + public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ModelEvaluationExplanationSpec parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModelEvaluationExplanationSpec(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** @@ -517,6 +1562,99 @@ public com.google.cloud.aiplatform.v1beta1.ModelExplanation getModelExplanation( return getModelExplanation(); } + public static final int EXPLANATION_SPECS_FIELD_NUMBER = 9; + private java.util.List< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec> + explanationSpecs_; + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public java.util.List< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec> + getExplanationSpecsList() { + return explanationSpecs_; + } + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder> + getExplanationSpecsOrBuilderList() { + return explanationSpecs_; + } + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public int getExplanationSpecsCount() { + return explanationSpecs_.size(); + } + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getExplanationSpecs(int index) { + return explanationSpecs_.get(index); + } + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpecOrBuilder + getExplanationSpecsOrBuilder(int index) { + return explanationSpecs_.get(index); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -549,6 +1687,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (modelExplanation_ != null) { output.writeMessage(8, getModelExplanation()); } + for (int i = 0; i < explanationSpecs_.size(); i++) { + output.writeMessage(9, explanationSpecs_.get(i)); + } unknownFields.writeTo(output); } @@ -581,6 +1722,9 @@ public int getSerializedSize() { if (modelExplanation_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getModelExplanation()); } + for (int i = 0; i < explanationSpecs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, explanationSpecs_.get(i)); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -612,6 +1756,7 @@ public boolean equals(final java.lang.Object obj) { if (hasModelExplanation()) { if (!getModelExplanation().equals(other.getModelExplanation())) return false; } + if (!getExplanationSpecsList().equals(other.getExplanationSpecsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -643,6 +1788,10 @@ public int hashCode() { hash = (37 * hash) + MODEL_EXPLANATION_FIELD_NUMBER; hash = (53 * hash) + getModelExplanation().hashCode(); } + if (getExplanationSpecsCount() > 0) { + hash = (37 * hash) + EXPLANATION_SPECS_FIELD_NUMBER; + hash = (53 * hash) + getExplanationSpecsList().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -783,7 +1932,9 @@ private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getExplanationSpecsFieldBuilder(); + } } @java.lang.Override @@ -813,6 +1964,12 @@ public Builder clear() { modelExplanation_ = null; modelExplanationBuilder_ = null; } + if (explanationSpecsBuilder_ == null) { + explanationSpecs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + explanationSpecsBuilder_.clear(); + } return this; } @@ -863,6 +2020,15 @@ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation buildPartial() { } else { result.modelExplanation_ = modelExplanationBuilder_.build(); } + if (explanationSpecsBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0)) { + explanationSpecs_ = java.util.Collections.unmodifiableList(explanationSpecs_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.explanationSpecs_ = explanationSpecs_; + } else { + result.explanationSpecs_ = explanationSpecsBuilder_.build(); + } onBuilt(); return result; } @@ -940,6 +2106,33 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ModelEvaluation oth if (other.hasModelExplanation()) { mergeModelExplanation(other.getModelExplanation()); } + if (explanationSpecsBuilder_ == null) { + if (!other.explanationSpecs_.isEmpty()) { + if (explanationSpecs_.isEmpty()) { + explanationSpecs_ = other.explanationSpecs_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureExplanationSpecsIsMutable(); + explanationSpecs_.addAll(other.explanationSpecs_); + } + onChanged(); + } + } else { + if (!other.explanationSpecs_.isEmpty()) { + if (explanationSpecsBuilder_.isEmpty()) { + explanationSpecsBuilder_.dispose(); + explanationSpecsBuilder_ = null; + explanationSpecs_ = other.explanationSpecs_; + bitField0_ = (bitField0_ & ~0x00000002); + explanationSpecsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getExplanationSpecsFieldBuilder() + : null; + } else { + explanationSpecsBuilder_.addAllMessages(other.explanationSpecs_); + } + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2016,6 +3209,458 @@ public Builder clearModelExplanation() { return modelExplanationBuilder_; } + private java.util.List< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec> + explanationSpecs_ = java.util.Collections.emptyList(); + + private void ensureExplanationSpecsIsMutable() { + if (!((bitField0_ & 0x00000002) != 0)) { + explanationSpecs_ = + new java.util.ArrayList< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec>( + explanationSpecs_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder> + explanationSpecsBuilder_; + + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec> + getExplanationSpecsList() { + if (explanationSpecsBuilder_ == null) { + return java.util.Collections.unmodifiableList(explanationSpecs_); + } else { + return explanationSpecsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public int getExplanationSpecsCount() { + if (explanationSpecsBuilder_ == null) { + return explanationSpecs_.size(); + } else { + return explanationSpecsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getExplanationSpecs(int index) { + if (explanationSpecsBuilder_ == null) { + return explanationSpecs_.get(index); + } else { + return explanationSpecsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExplanationSpecs( + int index, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) { + if (explanationSpecsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExplanationSpecsIsMutable(); + explanationSpecs_.set(index, value); + onChanged(); + } else { + explanationSpecsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExplanationSpecs( + int index, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder + builderForValue) { + if (explanationSpecsBuilder_ == null) { + ensureExplanationSpecsIsMutable(); + explanationSpecs_.set(index, builderForValue.build()); + onChanged(); + } else { + explanationSpecsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addExplanationSpecs( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) { + if (explanationSpecsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExplanationSpecsIsMutable(); + explanationSpecs_.add(value); + onChanged(); + } else { + explanationSpecsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addExplanationSpecs( + int index, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) { + if (explanationSpecsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExplanationSpecsIsMutable(); + explanationSpecs_.add(index, value); + onChanged(); + } else { + explanationSpecsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addExplanationSpecs( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder + builderForValue) { + if (explanationSpecsBuilder_ == null) { + ensureExplanationSpecsIsMutable(); + explanationSpecs_.add(builderForValue.build()); + onChanged(); + } else { + explanationSpecsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addExplanationSpecs( + int index, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder + builderForValue) { + if (explanationSpecsBuilder_ == null) { + ensureExplanationSpecsIsMutable(); + explanationSpecs_.add(index, builderForValue.build()); + onChanged(); + } else { + explanationSpecsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addAllExplanationSpecs( + java.lang.Iterable< + ? extends + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpec> + values) { + if (explanationSpecsBuilder_ == null) { + ensureExplanationSpecsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, explanationSpecs_); + onChanged(); + } else { + explanationSpecsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearExplanationSpecs() { + if (explanationSpecsBuilder_ == null) { + explanationSpecs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + explanationSpecsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder removeExplanationSpecs(int index) { + if (explanationSpecsBuilder_ == null) { + ensureExplanationSpecsIsMutable(); + explanationSpecs_.remove(index); + onChanged(); + } else { + explanationSpecsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder + getExplanationSpecsBuilder(int index) { + return getExplanationSpecsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder + getExplanationSpecsOrBuilder(int index) { + if (explanationSpecsBuilder_ == null) { + return explanationSpecs_.get(index); + } else { + return explanationSpecsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder> + getExplanationSpecsOrBuilderList() { + if (explanationSpecsBuilder_ != null) { + return explanationSpecsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(explanationSpecs_); + } + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder + addExplanationSpecsBuilder() { + return getExplanationSpecsFieldBuilder() + .addBuilder( + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .getDefaultInstance()); + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder + addExplanationSpecsBuilder(int index) { + return getExplanationSpecsFieldBuilder() + .addBuilder( + index, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .getDefaultInstance()); + } + /** + * + * + *
+     * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+     * the predicted values on the evaluated data.
+     * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder> + getExplanationSpecsBuilderList() { + return getExplanationSpecsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder> + getExplanationSpecsFieldBuilder() { + if (explanationSpecsBuilder_ == null) { + explanationSpecsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + .Builder, + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder>( + explanationSpecs_, + ((bitField0_ & 0x00000002) != 0), + getParentForChildren(), + isClean()); + explanationSpecs_ = null; + } + return explanationSpecsBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java index 57cf0caf2..40c7fdd28 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java @@ -264,4 +264,77 @@ public interface ModelEvaluationOrBuilder * */ com.google.cloud.aiplatform.v1beta1.ModelExplanationOrBuilder getModelExplanationOrBuilder(); + + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + java.util.List + getExplanationSpecsList(); + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec + getExplanationSpecs(int index); + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + int getExplanationSpecsCount(); + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + java.util.List< + ? extends + com.google.cloud.aiplatform.v1beta1.ModelEvaluation + .ModelEvaluationExplanationSpecOrBuilder> + getExplanationSpecsOrBuilderList(); + /** + * + * + *
+   * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+   * the predicted values on the evaluated data.
+   * 
+ * + * + * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpecOrBuilder + getExplanationSpecsOrBuilder(int index); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java index bca98ace2..b219e56e7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java @@ -31,6 +31,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -47,21 +51,27 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "oud/aiplatform/v1beta1/explanation.proto" + "\032\034google/protobuf/struct.proto\032\037google/p" + "rotobuf/timestamp.proto\032\034google/api/anno" - + "tations.proto\"\234\003\n\017ModelEvaluation\022\021\n\004nam" + + "tations.proto\"\226\005\n\017ModelEvaluation\022\021\n\004nam" + "e\030\001 \001(\tB\003\340A\003\022\037\n\022metrics_schema_uri\030\002 \001(\t" + "B\003\340A\003\022,\n\007metrics\030\003 \001(\0132\026.google.protobuf" + ".ValueB\003\340A\003\0224\n\013create_time\030\004 \001(\0132\032.googl" + "e.protobuf.TimestampB\003\340A\003\022\035\n\020slice_dimen" + "sions\030\005 \003(\tB\003\340A\003\022Q\n\021model_explanation\030\010 " + "\001(\01321.google.cloud.aiplatform.v1beta1.Mo" - + "delExplanationB\003\340A\003:\177\352A|\n)aiplatform.goo" - + "gleapis.com/ModelEvaluation\022Oprojects/{p" - + "roject}/locations/{location}/models/{mod" - + "el}/evaluations/{evaluation}B\210\001\n#com.goo" - + "gle.cloud.aiplatform.v1beta1B\024ModelEvalu" - + "ationProtoP\001ZIgoogle.golang.org/genproto" - + "/googleapis/cloud/aiplatform/v1beta1;aip" - + "latformb\006proto3" + + "delExplanationB\003\340A\003\022o\n\021explanation_specs" + + "\030\t \003(\0132O.google.cloud.aiplatform.v1beta1" + + ".ModelEvaluation.ModelEvaluationExplanat" + + "ionSpecB\003\340A\003\032\206\001\n\036ModelEvaluationExplanat" + + "ionSpec\022\030\n\020explanation_type\030\001 \001(\t\022J\n\020exp" + + "lanation_spec\030\002 \001(\01320.google.cloud.aipla" + + "tform.v1beta1.ExplanationSpec:\177\352A|\n)aipl" + + "atform.googleapis.com/ModelEvaluation\022Op" + + "rojects/{project}/locations/{location}/m" + + "odels/{model}/evaluations/{evaluation}B\210" + + "\001\n#com.google.cloud.aiplatform.v1beta1B\024" + + "ModelEvaluationProtoP\001ZIgoogle.golang.or" + + "g/genproto/googleapis/cloud/aiplatform/v" + + "1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -86,6 +96,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "CreateTime", "SliceDimensions", "ModelExplanation", + "ExplanationSpecs", + }); + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor = + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_descriptor + .getNestedTypes() + .get(0); + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor, + new java.lang.String[] { + "ExplanationType", "ExplanationSpec", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java index 7afe891e0..42b2d89da 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java @@ -126,7 +126,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -155,7 +155,7 @@ public java.util.List getMeanAt
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -185,7 +185,7 @@ public java.util.List getMeanAt
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -214,7 +214,7 @@ public int getMeanAttributionsCount() {
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -243,7 +243,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution getMeanAttributions(int i
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -652,7 +652,7 @@ private void ensureMeanAttributionsIsMutable() {
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -685,7 +685,7 @@ private void ensureMeanAttributionsIsMutable() {
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -717,7 +717,7 @@ public int getMeanAttributionsCount() {
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -749,7 +749,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution getMeanAttributions(int i
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -788,7 +788,7 @@ public Builder setMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -824,7 +824,7 @@ public Builder setMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -862,7 +862,7 @@ public Builder addMeanAttributions(com.google.cloud.aiplatform.v1beta1.Attributi
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -901,7 +901,7 @@ public Builder addMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -937,7 +937,7 @@ public Builder addMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -973,7 +973,7 @@ public Builder addMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1009,7 +1009,7 @@ public Builder addAllMeanAttributions(
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1044,7 +1044,7 @@ public Builder clearMeanAttributions() {
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1079,7 +1079,7 @@ public Builder removeMeanAttributions(int index) {
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1108,7 +1108,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder getMeanAttributio
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1141,7 +1141,7 @@ public com.google.cloud.aiplatform.v1beta1.AttributionOrBuilder getMeanAttributi
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1174,7 +1174,7 @@ public com.google.cloud.aiplatform.v1beta1.AttributionOrBuilder getMeanAttributi
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1203,7 +1203,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder addMeanAttributio
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
@@ -1233,7 +1233,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder addMeanAttributio
      *
      *
      * 
-     * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+     * Output only. Aggregated attributions explaining the Model's prediction outputs over the
      * set of instances. The attributions are grouped by outputs.
      * For Models that predict only one output, such as regression Models that
      * predict only one score, there is only one attibution that explains the
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
index 1ab25e538..8871d76a7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
@@ -27,7 +27,7 @@ public interface ModelExplanationOrBuilder
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -53,7 +53,7 @@ public interface ModelExplanationOrBuilder
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -79,7 +79,7 @@ public interface ModelExplanationOrBuilder
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -105,7 +105,7 @@ public interface ModelExplanationOrBuilder
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
@@ -132,7 +132,7 @@ public interface ModelExplanationOrBuilder
    *
    *
    * 
-   * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+   * Output only. Aggregated attributions explaining the Model's prediction outputs over the
    * set of instances. The attributions are grouped by outputs.
    * For Models that predict only one output, such as regression Models that
    * predict only one score, there is only one attibution that explains the
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
index b15effdb0..226bd2b21 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
@@ -559,6 +559,10 @@ public interface ModelOrBuilder
    * * `bigquery`
    * Each instance is a single row in BigQuery. Uses
    * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+   * * `file-list`
+   * Each line of the file is the location of an instance to process, uses
+   * `gcs_source` field of the
+   * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
    * If this Model doesn't support any of these formats it means it cannot be
    * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
    * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -598,6 +602,10 @@ public interface ModelOrBuilder
    * * `bigquery`
    * Each instance is a single row in BigQuery. Uses
    * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+   * * `file-list`
+   * Each line of the file is the location of an instance to process, uses
+   * `gcs_source` field of the
+   * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
    * If this Model doesn't support any of these formats it means it cannot be
    * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
    * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -637,6 +645,10 @@ public interface ModelOrBuilder
    * * `bigquery`
    * Each instance is a single row in BigQuery. Uses
    * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+   * * `file-list`
+   * Each line of the file is the location of an instance to process, uses
+   * `gcs_source` field of the
+   * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
    * If this Model doesn't support any of these formats it means it cannot be
    * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
    * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -677,6 +689,10 @@ public interface ModelOrBuilder
    * * `bigquery`
    * Each instance is a single row in BigQuery. Uses
    * [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+   * * `file-list`
+   * Each line of the file is the location of an instance to process, uses
+   * `gcs_source` field of the
+   * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
    * If this Model doesn't support any of these formats it means it cannot be
    * used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
    * [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -996,19 +1012,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO
    *
    *
    * 
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return Whether the explanationSpec field is set. */ @@ -1017,19 +1034,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO * * *
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; * * @return The explanationSpec. */ @@ -1038,19 +1056,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO * * *
-   * Output only. The default explanation specification for this Model.
-   * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
-   * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The default explanation specification for this Model.
+   * The Model can be used for [requesting
+   * explanation][PredictionService.Explain] after being
+   * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+   * The Model can be used for [batch
+   * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
    * All fields of the explanation_spec can be overridden by
    * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
-   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
-   * This field is populated only for tabular AutoML Models.
-   * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+   * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+   * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+   * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
    * 
* - * - * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; - * + * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23; */ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder(); @@ -1154,4 +1173,42 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO * map<string, string> labels = 17; */ java.lang.String getLabelsOrThrow(java.lang.String key); + + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a Model. If set, this
+   * Model and all sub-resources of this Model will be secured by this key.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java index 76874664e..c8b0bde7f 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java @@ -66,64 +66,67 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "api/resource.proto\032-google/cloud/aiplatf" + "orm/v1beta1/dataset.proto\0328google/cloud/" + "aiplatform/v1beta1/deployed_model_ref.pr" - + "oto\032-google/cloud/aiplatform/v1beta1/env" - + "_var.proto\0321google/cloud/aiplatform/v1be" - + "ta1/explanation.proto\032\034google/protobuf/s" - + "truct.proto\032\037google/protobuf/timestamp.p" - + "roto\032\034google/api/annotations.proto\"\214\014\n\005M" - + "odel\022\014\n\004name\030\001 \001(\t\022\031\n\014display_name\030\002 \001(\t" - + "B\003\340A\002\022\023\n\013description\030\003 \001(\t\022J\n\020predict_sc" - + "hemata\030\004 \001(\01320.google.cloud.aiplatform.v" - + "1beta1.PredictSchemata\022 \n\023metadata_schem" - + "a_uri\030\005 \001(\tB\003\340A\005\022-\n\010metadata\030\006 \001(\0132\026.goo" - + "gle.protobuf.ValueB\003\340A\005\022Z\n\030supported_exp" - + "ort_formats\030\024 \003(\01323.google.cloud.aiplatf" - + "orm.v1beta1.Model.ExportFormatB\003\340A\003\022M\n\021t" - + "raining_pipeline\030\007 \001(\tB2\340A\003\372A,\n*aiplatfo" - + "rm.googleapis.com/TrainingPipeline\022P\n\016co" - + "ntainer_spec\030\t \001(\01323.google.cloud.aiplat" - + "form.v1beta1.ModelContainerSpecB\003\340A\004\022\031\n\014" - + "artifact_uri\030\032 \001(\tB\003\340A\005\022q\n$supported_dep" - + "loyment_resources_types\030\n \003(\0162>.google.c" - + "loud.aiplatform.v1beta1.Model.Deployment" - + "ResourcesTypeB\003\340A\003\022,\n\037supported_input_st" - + "orage_formats\030\013 \003(\tB\003\340A\003\022-\n supported_ou" - + "tput_storage_formats\030\014 \003(\tB\003\340A\003\0224\n\013creat" - + "e_time\030\r \001(\0132\032.google.protobuf.Timestamp" - + "B\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google.prot" - + "obuf.TimestampB\003\340A\003\022O\n\017deployed_models\030\017" - + " \003(\01321.google.cloud.aiplatform.v1beta1.D" - + "eployedModelRefB\003\340A\003\022O\n\020explanation_spec" - + "\030\027 \001(\01320.google.cloud.aiplatform.v1beta1" - + ".ExplanationSpecB\003\340A\003\022\014\n\004etag\030\020 \001(\t\022B\n\006l" - + "abels\030\021 \003(\01322.google.cloud.aiplatform.v1" - + "beta1.Model.LabelsEntry\032\332\001\n\014ExportFormat" - + "\022\017\n\002id\030\001 \001(\tB\003\340A\003\022g\n\023exportable_contents" - + "\030\002 \003(\0162E.google.cloud.aiplatform.v1beta1" - + ".Model.ExportFormat.ExportableContentB\003\340" - + "A\003\"P\n\021ExportableContent\022\"\n\036EXPORTABLE_CO" - + "NTENT_UNSPECIFIED\020\000\022\014\n\010ARTIFACT\020\001\022\t\n\005IMA" - + "GE\020\002\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu" - + "e\030\002 \001(\t:\0028\001\"v\n\027DeploymentResourcesType\022)" - + "\n%DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED\020" - + "\000\022\027\n\023DEDICATED_RESOURCES\020\001\022\027\n\023AUTOMATIC_" - + "RESOURCES\020\002:\\\352AY\n\037aiplatform.googleapis." - + "com/Model\0226projects/{project}/locations/" - + "{location}/models/{model}\"{\n\017PredictSche" - + "mata\022 \n\023instance_schema_uri\030\001 \001(\tB\003\340A\005\022\"" - + "\n\025parameters_schema_uri\030\002 \001(\tB\003\340A\005\022\"\n\025pr" - + "ediction_schema_uri\030\003 \001(\tB\003\340A\005\"\205\002\n\022Model" - + "ContainerSpec\022\031\n\timage_uri\030\001 \001(\tB\006\340A\002\340A\005" - + "\022\024\n\007command\030\002 \003(\tB\003\340A\005\022\021\n\004args\030\003 \003(\tB\003\340A" - + "\005\0229\n\003env\030\004 \003(\0132\'.google.cloud.aiplatform" - + ".v1beta1.EnvVarB\003\340A\005\0229\n\005ports\030\005 \003(\0132%.go" - + "ogle.cloud.aiplatform.v1beta1.PortB\003\340A\005\022" - + "\032\n\rpredict_route\030\006 \001(\tB\003\340A\005\022\031\n\014health_ro" - + "ute\030\007 \001(\tB\003\340A\005\"\036\n\004Port\022\026\n\016container_port" - + "\030\003 \001(\005B~\n#com.google.cloud.aiplatform.v1" - + "beta1B\nModelProtoP\001ZIgoogle.golang.org/g" - + "enproto/googleapis/cloud/aiplatform/v1be" - + "ta1;aiplatformb\006proto3" + + "oto\0325google/cloud/aiplatform/v1beta1/enc" + + "ryption_spec.proto\032-google/cloud/aiplatf" + + "orm/v1beta1/env_var.proto\0321google/cloud/" + + "aiplatform/v1beta1/explanation.proto\032\034go" + + "ogle/protobuf/struct.proto\032\037google/proto" + + "buf/timestamp.proto\032\034google/api/annotati" + + "ons.proto\"\321\014\n\005Model\022\014\n\004name\030\001 \001(\t\022\031\n\014dis" + + "play_name\030\002 \001(\tB\003\340A\002\022\023\n\013description\030\003 \001(" + + "\t\022J\n\020predict_schemata\030\004 \001(\01320.google.clo" + + "ud.aiplatform.v1beta1.PredictSchemata\022 \n" + + "\023metadata_schema_uri\030\005 \001(\tB\003\340A\005\022-\n\010metad" + + "ata\030\006 \001(\0132\026.google.protobuf.ValueB\003\340A\005\022Z" + + "\n\030supported_export_formats\030\024 \003(\01323.googl" + + "e.cloud.aiplatform.v1beta1.Model.ExportF" + + "ormatB\003\340A\003\022M\n\021training_pipeline\030\007 \001(\tB2\340" + + "A\003\372A,\n*aiplatform.googleapis.com/Trainin" + + "gPipeline\022P\n\016container_spec\030\t \001(\01323.goog" + + "le.cloud.aiplatform.v1beta1.ModelContain" + + "erSpecB\003\340A\004\022\031\n\014artifact_uri\030\032 \001(\tB\003\340A\005\022q" + + "\n$supported_deployment_resources_types\030\n" + + " \003(\0162>.google.cloud.aiplatform.v1beta1.M" + + "odel.DeploymentResourcesTypeB\003\340A\003\022,\n\037sup" + + "ported_input_storage_formats\030\013 \003(\tB\003\340A\003\022" + + "-\n supported_output_storage_formats\030\014 \003(" + + "\tB\003\340A\003\0224\n\013create_time\030\r \001(\0132\032.google.pro" + + "tobuf.TimestampB\003\340A\003\0224\n\013update_time\030\016 \001(" + + "\0132\032.google.protobuf.TimestampB\003\340A\003\022O\n\017de" + + "ployed_models\030\017 \003(\01321.google.cloud.aipla" + + "tform.v1beta1.DeployedModelRefB\003\340A\003\022J\n\020e" + + "xplanation_spec\030\027 \001(\01320.google.cloud.aip" + + "latform.v1beta1.ExplanationSpec\022\014\n\004etag\030" + + "\020 \001(\t\022B\n\006labels\030\021 \003(\01322.google.cloud.aip" + + "latform.v1beta1.Model.LabelsEntry\022H\n\017enc" + + "ryption_spec\030\030 \001(\0132/.google.cloud.aiplat" + + "form.v1beta1.EncryptionSpec\032\332\001\n\014ExportFo" + + "rmat\022\017\n\002id\030\001 \001(\tB\003\340A\003\022g\n\023exportable_cont" + + "ents\030\002 \003(\0162E.google.cloud.aiplatform.v1b" + + "eta1.Model.ExportFormat.ExportableConten" + + "tB\003\340A\003\"P\n\021ExportableContent\022\"\n\036EXPORTABL" + + "E_CONTENT_UNSPECIFIED\020\000\022\014\n\010ARTIFACT\020\001\022\t\n" + + "\005IMAGE\020\002\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005" + + "value\030\002 \001(\t:\0028\001\"v\n\027DeploymentResourcesTy" + + "pe\022)\n%DEPLOYMENT_RESOURCES_TYPE_UNSPECIF" + + "IED\020\000\022\027\n\023DEDICATED_RESOURCES\020\001\022\027\n\023AUTOMA" + + "TIC_RESOURCES\020\002:\\\352AY\n\037aiplatform.googlea" + + "pis.com/Model\0226projects/{project}/locati" + + "ons/{location}/models/{model}\"{\n\017Predict" + + "Schemata\022 \n\023instance_schema_uri\030\001 \001(\tB\003\340" + + "A\005\022\"\n\025parameters_schema_uri\030\002 \001(\tB\003\340A\005\022\"" + + "\n\025prediction_schema_uri\030\003 \001(\tB\003\340A\005\"\205\002\n\022M" + + "odelContainerSpec\022\031\n\timage_uri\030\001 \001(\tB\006\340A" + + "\002\340A\005\022\024\n\007command\030\002 \003(\tB\003\340A\005\022\021\n\004args\030\003 \003(\t" + + "B\003\340A\005\0229\n\003env\030\004 \003(\0132\'.google.cloud.aiplat" + + "form.v1beta1.EnvVarB\003\340A\005\0229\n\005ports\030\005 \003(\0132" + + "%.google.cloud.aiplatform.v1beta1.PortB\003" + + "\340A\005\022\032\n\rpredict_route\030\006 \001(\tB\003\340A\005\022\031\n\014healt" + + "h_route\030\007 \001(\tB\003\340A\005\"\036\n\004Port\022\026\n\016container_" + + "port\030\003 \001(\005B~\n#com.google.cloud.aiplatfor" + + "m.v1beta1B\nModelProtoP\001ZIgoogle.golang.o" + + "rg/genproto/googleapis/cloud/aiplatform/" + + "v1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -133,6 +136,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.ResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.DatasetProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.DeployedModelNameProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), @@ -164,6 +168,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "ExplanationSpec", "Etag", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_Model_ExportFormat_descriptor = internal_static_google_cloud_aiplatform_v1beta1_Model_descriptor.getNestedTypes().get(0); @@ -216,6 +221,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.api.ResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.DatasetProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.DeployedModelNameProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java index b8d6202ed..317cc7b1d 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java @@ -65,34 +65,36 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "tobuf.ValueB\003\340A\002\022*\n\nparameters\030\003 \001(\0132\026.g" + "oogle.protobuf.Value\"Y\n\017PredictResponse\022" + "+\n\013predictions\030\001 \003(\0132\026.google.protobuf.V" - + "alue\022\031\n\021deployed_model_id\030\002 \001(\t\"\305\001\n\016Expl" + + "alue\022\031\n\021deployed_model_id\030\002 \001(\t\"\242\002\n\016Expl" + "ainRequest\022<\n\010endpoint\030\001 \001(\tB*\340A\002\372A$\n\"ai" + "platform.googleapis.com/Endpoint\022.\n\tinst" + "ances\030\002 \003(\0132\026.google.protobuf.ValueB\003\340A\002" + "\022*\n\nparameters\030\004 \001(\0132\026.google.protobuf.V" - + "alue\022\031\n\021deployed_model_id\030\003 \001(\t\"\235\001\n\017Expl" - + "ainResponse\022B\n\014explanations\030\001 \003(\0132,.goog" - + "le.cloud.aiplatform.v1beta1.Explanation\022" - + "\031\n\021deployed_model_id\030\002 \001(\t\022+\n\013prediction" - + "s\030\003 \003(\0132\026.google.protobuf.Value2\250\004\n\021Pred" - + "ictionService\022\327\001\n\007Predict\022/.google.cloud" - + ".aiplatform.v1beta1.PredictRequest\0320.goo" - + "gle.cloud.aiplatform.v1beta1.PredictResp" - + "onse\"i\202\323\344\223\002C\">/v1beta1/{endpoint=project" - + "s/*/locations/*/endpoints/*}:predict:\001*\332" - + "A\035endpoint,instances,parameters\022\351\001\n\007Expl" - + "ain\022/.google.cloud.aiplatform.v1beta1.Ex" - + "plainRequest\0320.google.cloud.aiplatform.v" - + "1beta1.ExplainResponse\"{\202\323\344\223\002C\">/v1beta1" - + "/{endpoint=projects/*/locations/*/endpoi" - + "nts/*}:explain:\001*\332A/endpoint,instances,p" - + "arameters,deployed_model_id\032M\312A\031aiplatfo" - + "rm.googleapis.com\322A.https://www.googleap" - + "is.com/auth/cloud-platformB\212\001\n#com.googl" - + "e.cloud.aiplatform.v1beta1B\026PredictionSe" - + "rviceProtoP\001ZIgoogle.golang.org/genproto" - + "/googleapis/cloud/aiplatform/v1beta1;aip" - + "latformb\006proto3" + + "alue\022[\n\031explanation_spec_override\030\005 \001(\0132" + + "8.google.cloud.aiplatform.v1beta1.Explan" + + "ationSpecOverride\022\031\n\021deployed_model_id\030\003" + + " \001(\t\"\235\001\n\017ExplainResponse\022B\n\014explanations" + + "\030\001 \003(\0132,.google.cloud.aiplatform.v1beta1" + + ".Explanation\022\031\n\021deployed_model_id\030\002 \001(\t\022" + + "+\n\013predictions\030\003 \003(\0132\026.google.protobuf.V" + + "alue2\250\004\n\021PredictionService\022\327\001\n\007Predict\022/" + + ".google.cloud.aiplatform.v1beta1.Predict" + + "Request\0320.google.cloud.aiplatform.v1beta" + + "1.PredictResponse\"i\202\323\344\223\002C\">/v1beta1/{end" + + "point=projects/*/locations/*/endpoints/*" + + "}:predict:\001*\332A\035endpoint,instances,parame" + + "ters\022\351\001\n\007Explain\022/.google.cloud.aiplatfo" + + "rm.v1beta1.ExplainRequest\0320.google.cloud" + + ".aiplatform.v1beta1.ExplainResponse\"{\202\323\344" + + "\223\002C\">/v1beta1/{endpoint=projects/*/locat" + + "ions/*/endpoints/*}:explain:\001*\332A/endpoin" + + "t,instances,parameters,deployed_model_id" + + "\032M\312A\031aiplatform.googleapis.com\322A.https:/" + + "/www.googleapis.com/auth/cloud-platformB" + + "\212\001\n#com.google.cloud.aiplatform.v1beta1B" + + "\026PredictionServiceProtoP\001ZIgoogle.golang" + + ".org/genproto/googleapis/cloud/aiplatfor" + + "m/v1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -127,7 +129,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_ExplainRequest_descriptor, new java.lang.String[] { - "Endpoint", "Instances", "Parameters", "DeployedModelId", + "Endpoint", "Instances", "Parameters", "ExplanationSpecOverride", "DeployedModelId", }); internal_static_google_cloud_aiplatform_v1beta1_ExplainResponse_descriptor = getDescriptor().getMessageTypes().get(3); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java index 53ac48b90..281e54cbc 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java @@ -41,6 +41,7 @@ private SearchMigratableResourcesRequest( private SearchMigratableResourcesRequest() { parent_ = ""; pageToken_ = ""; + filter_ = ""; } @java.lang.Override @@ -91,6 +92,13 @@ private SearchMigratableResourcesRequest( pageToken_ = s; break; } + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + + filter_ = s; + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -254,6 +262,71 @@ public com.google.protobuf.ByteString getPageTokenBytes() { } } + public static final int FILTER_FIELD_NUMBER = 4; + private volatile java.lang.Object filter_; + /** + * + * + *
+   * Supported filters are:
+   * * Resource type: For a specific type of MigratableResource.
+   *   * `ml_engine_model_version:*`
+   *   * `automl_model:*`,
+   *   * `automl_dataset:*`
+   *   * `data_labeling_dataset:*`.
+   * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+   *   * `last_migrate_time:*` will filter migrated resources.
+   *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+   * 
+ * + * string filter = 4; + * + * @return The filter. + */ + @java.lang.Override + public java.lang.String getFilter() { + java.lang.Object ref = filter_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + filter_ = s; + return s; + } + } + /** + * + * + *
+   * Supported filters are:
+   * * Resource type: For a specific type of MigratableResource.
+   *   * `ml_engine_model_version:*`
+   *   * `automl_model:*`,
+   *   * `automl_dataset:*`
+   *   * `data_labeling_dataset:*`.
+   * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+   *   * `last_migrate_time:*` will filter migrated resources.
+   *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+   * 
+ * + * string filter = 4; + * + * @return The bytes for filter. + */ + @java.lang.Override + public com.google.protobuf.ByteString getFilterBytes() { + java.lang.Object ref = filter_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + filter_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -277,6 +350,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } + if (!getFilterBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); + } unknownFields.writeTo(output); } @@ -295,6 +371,9 @@ public int getSerializedSize() { if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } + if (!getFilterBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -314,6 +393,7 @@ public boolean equals(final java.lang.Object obj) { if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; + if (!getFilter().equals(other.getFilter())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -331,6 +411,8 @@ public int hashCode() { hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -485,6 +567,8 @@ public Builder clear() { pageToken_ = ""; + filter_ = ""; + return this; } @@ -517,6 +601,7 @@ public com.google.cloud.aiplatform.v1beta1.SearchMigratableResourcesRequest buil result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; + result.filter_ = filter_; onBuilt(); return result; } @@ -581,6 +666,10 @@ public Builder mergeFrom( pageToken_ = other.pageToken_; onChanged(); } + if (!other.getFilter().isEmpty()) { + filter_ = other.filter_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -909,6 +998,152 @@ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { return this; } + private java.lang.Object filter_ = ""; + /** + * + * + *
+     * Supported filters are:
+     * * Resource type: For a specific type of MigratableResource.
+     *   * `ml_engine_model_version:*`
+     *   * `automl_model:*`,
+     *   * `automl_dataset:*`
+     *   * `data_labeling_dataset:*`.
+     * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+     *   * `last_migrate_time:*` will filter migrated resources.
+     *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+     * 
+ * + * string filter = 4; + * + * @return The filter. + */ + public java.lang.String getFilter() { + java.lang.Object ref = filter_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + filter_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Supported filters are:
+     * * Resource type: For a specific type of MigratableResource.
+     *   * `ml_engine_model_version:*`
+     *   * `automl_model:*`,
+     *   * `automl_dataset:*`
+     *   * `data_labeling_dataset:*`.
+     * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+     *   * `last_migrate_time:*` will filter migrated resources.
+     *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+     * 
+ * + * string filter = 4; + * + * @return The bytes for filter. + */ + public com.google.protobuf.ByteString getFilterBytes() { + java.lang.Object ref = filter_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + filter_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Supported filters are:
+     * * Resource type: For a specific type of MigratableResource.
+     *   * `ml_engine_model_version:*`
+     *   * `automl_model:*`,
+     *   * `automl_dataset:*`
+     *   * `data_labeling_dataset:*`.
+     * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+     *   * `last_migrate_time:*` will filter migrated resources.
+     *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+     * 
+ * + * string filter = 4; + * + * @param value The filter to set. + * @return This builder for chaining. + */ + public Builder setFilter(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + filter_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Supported filters are:
+     * * Resource type: For a specific type of MigratableResource.
+     *   * `ml_engine_model_version:*`
+     *   * `automl_model:*`,
+     *   * `automl_dataset:*`
+     *   * `data_labeling_dataset:*`.
+     * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+     *   * `last_migrate_time:*` will filter migrated resources.
+     *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+     * 
+ * + * string filter = 4; + * + * @return This builder for chaining. + */ + public Builder clearFilter() { + + filter_ = getDefaultInstance().getFilter(); + onChanged(); + return this; + } + /** + * + * + *
+     * Supported filters are:
+     * * Resource type: For a specific type of MigratableResource.
+     *   * `ml_engine_model_version:*`
+     *   * `automl_model:*`,
+     *   * `automl_dataset:*`
+     *   * `data_labeling_dataset:*`.
+     * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+     *   * `last_migrate_time:*` will filter migrated resources.
+     *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+     * 
+ * + * string filter = 4; + * + * @param value The bytes for filter to set. + * @return This builder for chaining. + */ + public Builder setFilterBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + filter_ = value; + onChanged(); + return this; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java index 745f02b0f..a7fe855c4 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java @@ -98,4 +98,45 @@ public interface SearchMigratableResourcesRequestOrBuilder * @return The bytes for pageToken. */ com.google.protobuf.ByteString getPageTokenBytes(); + + /** + * + * + *
+   * Supported filters are:
+   * * Resource type: For a specific type of MigratableResource.
+   *   * `ml_engine_model_version:*`
+   *   * `automl_model:*`,
+   *   * `automl_dataset:*`
+   *   * `data_labeling_dataset:*`.
+   * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+   *   * `last_migrate_time:*` will filter migrated resources.
+   *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+   * 
+ * + * string filter = 4; + * + * @return The filter. + */ + java.lang.String getFilter(); + /** + * + * + *
+   * Supported filters are:
+   * * Resource type: For a specific type of MigratableResource.
+   *   * `ml_engine_model_version:*`
+   *   * `automl_model:*`,
+   *   * `automl_dataset:*`
+   *   * `data_labeling_dataset:*`.
+   * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+   *   * `last_migrate_time:*` will filter migrated resources.
+   *   * `NOT last_migrate_time:*` will filter not yet migrated resource.
+   * 
+ * + * string filter = 4; + * + * @return The bytes for filter. + */ + com.google.protobuf.ByteString getFilterBytes(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java index f2d574cd0..3948ee145 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java @@ -192,11 +192,9 @@ public GradientNoiseSigmaCase getGradientNoiseSigmaCase() { * This is a single float value and will be used to add noise to all the * features. Use this field when all features are normalized to have the * same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - * features are normalized to have 0-mean and 1-variance. Refer to - * this doc for more details about normalization: - * https: - * //developers.google.com/machine-learning - * // /data-prep/transform/normalization. + * features are normalized to have 0-mean and 1-variance. For more details + * about normalization: + * https://tinyurl.com/dgc-normalization. * For best results the recommended value is about 10% - 20% of the standard * deviation of the input feature. Refer to section 3.2 of the SmoothGrad * paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. @@ -717,11 +715,9 @@ public Builder clearGradientNoiseSigma() { * This is a single float value and will be used to add noise to all the * features. Use this field when all features are normalized to have the * same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - * features are normalized to have 0-mean and 1-variance. Refer to - * this doc for more details about normalization: - * https: - * //developers.google.com/machine-learning - * // /data-prep/transform/normalization. + * features are normalized to have 0-mean and 1-variance. For more details + * about normalization: + * https://tinyurl.com/dgc-normalization. * For best results the recommended value is about 10% - 20% of the standard * deviation of the input feature. Refer to section 3.2 of the SmoothGrad * paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. @@ -747,11 +743,9 @@ public float getNoiseSigma() { * This is a single float value and will be used to add noise to all the * features. Use this field when all features are normalized to have the * same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - * features are normalized to have 0-mean and 1-variance. Refer to - * this doc for more details about normalization: - * https: - * //developers.google.com/machine-learning - * // /data-prep/transform/normalization. + * features are normalized to have 0-mean and 1-variance. For more details + * about normalization: + * https://tinyurl.com/dgc-normalization. * For best results the recommended value is about 10% - 20% of the standard * deviation of the input feature. Refer to section 3.2 of the SmoothGrad * paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. @@ -778,11 +772,9 @@ public Builder setNoiseSigma(float value) { * This is a single float value and will be used to add noise to all the * features. Use this field when all features are normalized to have the * same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - * features are normalized to have 0-mean and 1-variance. Refer to - * this doc for more details about normalization: - * https: - * //developers.google.com/machine-learning - * // /data-prep/transform/normalization. + * features are normalized to have 0-mean and 1-variance. For more details + * about normalization: + * https://tinyurl.com/dgc-normalization. * For best results the recommended value is about 10% - 20% of the standard * deviation of the input feature. Refer to section 3.2 of the SmoothGrad * paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java index 47d57d5ab..fd7bd2dfe 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java @@ -30,11 +30,9 @@ public interface SmoothGradConfigOrBuilder * This is a single float value and will be used to add noise to all the * features. Use this field when all features are normalized to have the * same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - * features are normalized to have 0-mean and 1-variance. Refer to - * this doc for more details about normalization: - * https: - * //developers.google.com/machine-learning - * // /data-prep/transform/normalization. + * features are normalized to have 0-mean and 1-variance. For more details + * about normalization: + * https://tinyurl.com/dgc-normalization. * For best results the recommended value is about 10% - 20% of the standard * deviation of the input feature. Refer to section 3.2 of the SmoothGrad * paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java index 4873428eb..d769cfb2d 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java @@ -101,87 +101,100 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "google/api/field_behavior.proto\032\031google/" + "api/resource.proto\032\036google/protobuf/dura" + "tion.proto\032\034google/protobuf/struct.proto" - + "\032\037google/protobuf/timestamp.proto\032\034googl" - + "e/api/annotations.proto\"\330\004\n\005Trial\022\017\n\002id\030" - + "\002 \001(\tB\003\340A\003\022@\n\005state\030\003 \001(\0162,.google.cloud" - + ".aiplatform.v1beta1.Trial.StateB\003\340A\003\022I\n\n" - + "parameters\030\004 \003(\01320.google.cloud.aiplatfo" - + "rm.v1beta1.Trial.ParameterB\003\340A\003\022L\n\021final" - + "_measurement\030\005 \001(\0132,.google.cloud.aiplat" - + "form.v1beta1.MeasurementB\003\340A\003\0223\n\nstart_t" - + "ime\030\007 \001(\0132\032.google.protobuf.TimestampB\003\340" - + "A\003\0221\n\010end_time\030\010 \001(\0132\032.google.protobuf.T" - + "imestampB\003\340A\003\022?\n\ncustom_job\030\013 \001(\tB+\340A\003\372A" - + "%\n#aiplatform.googleapis.com/CustomJob\032R" - + "\n\tParameter\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\003\022*" - + "\n\005value\030\002 \001(\0132\026.google.protobuf.ValueB\003\340" - + "A\003\"f\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\r\n\tRE" - + "QUESTED\020\001\022\n\n\006ACTIVE\020\002\022\014\n\010STOPPING\020\003\022\r\n\tS" - + "UCCEEDED\020\004\022\016\n\nINFEASIBLE\020\005\"\310\021\n\tStudySpec" - + "\022K\n\007metrics\030\001 \003(\01325.google.cloud.aiplatf" - + "orm.v1beta1.StudySpec.MetricSpecB\003\340A\002\022Q\n" - + "\nparameters\030\002 \003(\01328.google.cloud.aiplatf" - + "orm.v1beta1.StudySpec.ParameterSpecB\003\340A\002" - + "\022G\n\talgorithm\030\003 \001(\01624.google.cloud.aipla" - + "tform.v1beta1.StudySpec.Algorithm\032\272\001\n\nMe" - + "tricSpec\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\002\022Q\n\004goal" - + "\030\002 \001(\0162>.google.cloud.aiplatform.v1beta1" - + ".StudySpec.MetricSpec.GoalTypeB\003\340A\002\"A\n\010G" - + "oalType\022\031\n\025GOAL_TYPE_UNSPECIFIED\020\000\022\014\n\010MA" - + "XIMIZE\020\001\022\014\n\010MINIMIZE\020\002\032\310\r\n\rParameterSpec" - + "\022e\n\021double_value_spec\030\002 \001(\0132H.google.clo" - + "ud.aiplatform.v1beta1.StudySpec.Paramete" - + "rSpec.DoubleValueSpecH\000\022g\n\022integer_value" - + "_spec\030\003 \001(\0132I.google.cloud.aiplatform.v1" - + "beta1.StudySpec.ParameterSpec.IntegerVal" - + "ueSpecH\000\022o\n\026categorical_value_spec\030\004 \001(\013" - + "2M.google.cloud.aiplatform.v1beta1.Study" - + "Spec.ParameterSpec.CategoricalValueSpecH" - + "\000\022i\n\023discrete_value_spec\030\005 \001(\0132J.google." - + "cloud.aiplatform.v1beta1.StudySpec.Param" - + "eterSpec.DiscreteValueSpecH\000\022\031\n\014paramete" - + "r_id\030\001 \001(\tB\003\340A\002\022V\n\nscale_type\030\006 \001(\0162B.go" + + "\032\037google/protobuf/timestamp.proto\032\036googl" + + "e/protobuf/wrappers.proto\032\034google/api/an" + + "notations.proto\"\306\005\n\005Trial\022\017\n\002id\030\002 \001(\tB\003\340" + + "A\003\022@\n\005state\030\003 \001(\0162,.google.cloud.aiplatf" + + "orm.v1beta1.Trial.StateB\003\340A\003\022I\n\nparamete" + + "rs\030\004 \003(\01320.google.cloud.aiplatform.v1bet" + + "a1.Trial.ParameterB\003\340A\003\022L\n\021final_measure" + + "ment\030\005 \001(\0132,.google.cloud.aiplatform.v1b" + + "eta1.MeasurementB\003\340A\003\0223\n\nstart_time\030\007 \001(" + + "\0132\032.google.protobuf.TimestampB\003\340A\003\0221\n\010en" + + "d_time\030\010 \001(\0132\032.google.protobuf.Timestamp" + + "B\003\340A\003\022?\n\ncustom_job\030\013 \001(\tB+\340A\003\372A%\n#aipla" + + "tform.googleapis.com/CustomJob\032R\n\tParame" + + "ter\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\003\022*\n\005value\030" + + "\002 \001(\0132\026.google.protobuf.ValueB\003\340A\003\"f\n\005St" + + "ate\022\025\n\021STATE_UNSPECIFIED\020\000\022\r\n\tREQUESTED\020" + + "\001\022\n\n\006ACTIVE\020\002\022\014\n\010STOPPING\020\003\022\r\n\tSUCCEEDED" + + "\020\004\022\016\n\nINFEASIBLE\020\005:l\352Ai\n\037aiplatform.goog" + + "leapis.com/Trial\022Fprojects/{project}/loc" + + "ations/{location}/studies/{study}/trials" + + "/{trial}\"\307\024\n\tStudySpec\022K\n\007metrics\030\001 \003(\0132" + + "5.google.cloud.aiplatform.v1beta1.StudyS" + + "pec.MetricSpecB\003\340A\002\022Q\n\nparameters\030\002 \003(\0132" + + "8.google.cloud.aiplatform.v1beta1.StudyS" + + "pec.ParameterSpecB\003\340A\002\022G\n\talgorithm\030\003 \001(" + + "\01624.google.cloud.aiplatform.v1beta1.Stud" + + "ySpec.Algorithm\022V\n\021observation_noise\030\006 \001" + + "(\0162;.google.cloud.aiplatform.v1beta1.Stu" + + "dySpec.ObservationNoise\022g\n\032measurement_s" + + "election_type\030\007 \001(\0162C.google.cloud.aipla" + + "tform.v1beta1.StudySpec.MeasurementSelec" + + "tionType\032\272\001\n\nMetricSpec\022\026\n\tmetric_id\030\001 \001" + + "(\tB\003\340A\002\022Q\n\004goal\030\002 \001(\0162>.google.cloud.aip" + + "latform.v1beta1.StudySpec.MetricSpec.Goa" + + "lTypeB\003\340A\002\"A\n\010GoalType\022\031\n\025GOAL_TYPE_UNSP" + + "ECIFIED\020\000\022\014\n\010MAXIMIZE\020\001\022\014\n\010MINIMIZE\020\002\032\310\r" + + "\n\rParameterSpec\022e\n\021double_value_spec\030\002 \001" + + "(\0132H.google.cloud.aiplatform.v1beta1.Stu" + + "dySpec.ParameterSpec.DoubleValueSpecH\000\022g" + + "\n\022integer_value_spec\030\003 \001(\0132I.google.clou" + + "d.aiplatform.v1beta1.StudySpec.Parameter" + + "Spec.IntegerValueSpecH\000\022o\n\026categorical_v" + + "alue_spec\030\004 \001(\0132M.google.cloud.aiplatfor" + + "m.v1beta1.StudySpec.ParameterSpec.Catego" + + "ricalValueSpecH\000\022i\n\023discrete_value_spec\030" + + "\005 \001(\0132J.google.cloud.aiplatform.v1beta1." + + "StudySpec.ParameterSpec.DiscreteValueSpe" + + "cH\000\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\002\022V\n\nscale_" + + "type\030\006 \001(\0162B.google.cloud.aiplatform.v1b" + + "eta1.StudySpec.ParameterSpec.ScaleType\022v" + + "\n\033conditional_parameter_specs\030\n \003(\0132Q.go" + "ogle.cloud.aiplatform.v1beta1.StudySpec." - + "ParameterSpec.ScaleType\022v\n\033conditional_p" - + "arameter_specs\030\n \003(\0132Q.google.cloud.aipl" - + "atform.v1beta1.StudySpec.ParameterSpec.C" - + "onditionalParameterSpec\032A\n\017DoubleValueSp" - + "ec\022\026\n\tmin_value\030\001 \001(\001B\003\340A\002\022\026\n\tmax_value\030" - + "\002 \001(\001B\003\340A\002\032B\n\020IntegerValueSpec\022\026\n\tmin_va" - + "lue\030\001 \001(\003B\003\340A\002\022\026\n\tmax_value\030\002 \001(\003B\003\340A\002\032+" - + "\n\024CategoricalValueSpec\022\023\n\006values\030\001 \003(\tB\003" - + "\340A\002\032(\n\021DiscreteValueSpec\022\023\n\006values\030\001 \003(\001" - + "B\003\340A\002\032\271\005\n\030ConditionalParameterSpec\022\212\001\n\026p" - + "arent_discrete_values\030\002 \001(\0132h.google.clo" - + "ud.aiplatform.v1beta1.StudySpec.Paramete" - + "rSpec.ConditionalParameterSpec.DiscreteV" - + "alueConditionH\000\022\200\001\n\021parent_int_values\030\003 " - + "\001(\0132c.google.cloud.aiplatform.v1beta1.St" - + "udySpec.ParameterSpec.ConditionalParamet" - + "erSpec.IntValueConditionH\000\022\220\001\n\031parent_ca" - + "tegorical_values\030\004 \001(\0132k.google.cloud.ai" - + "platform.v1beta1.StudySpec.ParameterSpec" - + ".ConditionalParameterSpec.CategoricalVal" - + "ueConditionH\000\022U\n\016parameter_spec\030\001 \001(\01328." + + "ParameterSpec.ConditionalParameterSpec\032A" + + "\n\017DoubleValueSpec\022\026\n\tmin_value\030\001 \001(\001B\003\340A" + + "\002\022\026\n\tmax_value\030\002 \001(\001B\003\340A\002\032B\n\020IntegerValu" + + "eSpec\022\026\n\tmin_value\030\001 \001(\003B\003\340A\002\022\026\n\tmax_val" + + "ue\030\002 \001(\003B\003\340A\002\032+\n\024CategoricalValueSpec\022\023\n" + + "\006values\030\001 \003(\tB\003\340A\002\032(\n\021DiscreteValueSpec\022" + + "\023\n\006values\030\001 \003(\001B\003\340A\002\032\271\005\n\030ConditionalPara" + + "meterSpec\022\212\001\n\026parent_discrete_values\030\002 \001" + + "(\0132h.google.cloud.aiplatform.v1beta1.Stu" + + "dySpec.ParameterSpec.ConditionalParamete" + + "rSpec.DiscreteValueConditionH\000\022\200\001\n\021paren" + + "t_int_values\030\003 \001(\0132c.google.cloud.aiplat" + + "form.v1beta1.StudySpec.ParameterSpec.Con" + + "ditionalParameterSpec.IntValueConditionH" + + "\000\022\220\001\n\031parent_categorical_values\030\004 \001(\0132k." + "google.cloud.aiplatform.v1beta1.StudySpe" - + "c.ParameterSpecB\003\340A\002\032-\n\026DiscreteValueCon" - + "dition\022\023\n\006values\030\001 \003(\001B\003\340A\002\032(\n\021IntValueC" - + "ondition\022\023\n\006values\030\001 \003(\003B\003\340A\002\0320\n\031Categor" - + "icalValueCondition\022\023\n\006values\030\001 \003(\tB\003\340A\002B" - + "\030\n\026parent_value_condition\"n\n\tScaleType\022\032" - + "\n\026SCALE_TYPE_UNSPECIFIED\020\000\022\025\n\021UNIT_LINEA" - + "R_SCALE\020\001\022\022\n\016UNIT_LOG_SCALE\020\002\022\032\n\026UNIT_RE" - + "VERSE_LOG_SCALE\020\003B\026\n\024parameter_value_spe" - + "c\"J\n\tAlgorithm\022\031\n\025ALGORITHM_UNSPECIFIED\020" - + "\000\022\017\n\013GRID_SEARCH\020\002\022\021\n\rRANDOM_SEARCH\020\003\"\247\001" - + "\n\013Measurement\022\027\n\nstep_count\030\002 \001(\003B\003\340A\003\022I" - + "\n\007metrics\030\003 \003(\01323.google.cloud.aiplatfor" - + "m.v1beta1.Measurement.MetricB\003\340A\003\0324\n\006Met" - + "ric\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\003\022\022\n\005value\030\002 \001" - + "(\001B\003\340A\003B~\n#com.google.cloud.aiplatform.v" - + "1beta1B\nStudyProtoP\001ZIgoogle.golang.org/" - + "genproto/googleapis/cloud/aiplatform/v1b" - + "eta1;aiplatformb\006proto3" + + "c.ParameterSpec.ConditionalParameterSpec" + + ".CategoricalValueConditionH\000\022U\n\016paramete" + + "r_spec\030\001 \001(\01328.google.cloud.aiplatform.v" + + "1beta1.StudySpec.ParameterSpecB\003\340A\002\032-\n\026D" + + "iscreteValueCondition\022\023\n\006values\030\001 \003(\001B\003\340" + + "A\002\032(\n\021IntValueCondition\022\023\n\006values\030\001 \003(\003B" + + "\003\340A\002\0320\n\031CategoricalValueCondition\022\023\n\006val" + + "ues\030\001 \003(\tB\003\340A\002B\030\n\026parent_value_condition" + + "\"n\n\tScaleType\022\032\n\026SCALE_TYPE_UNSPECIFIED\020" + + "\000\022\025\n\021UNIT_LINEAR_SCALE\020\001\022\022\n\016UNIT_LOG_SCA" + + "LE\020\002\022\032\n\026UNIT_REVERSE_LOG_SCALE\020\003B\026\n\024para" + + "meter_value_spec\"J\n\tAlgorithm\022\031\n\025ALGORIT" + + "HM_UNSPECIFIED\020\000\022\017\n\013GRID_SEARCH\020\002\022\021\n\rRAN" + + "DOM_SEARCH\020\003\"H\n\020ObservationNoise\022!\n\035OBSE" + + "RVATION_NOISE_UNSPECIFIED\020\000\022\007\n\003LOW\020\001\022\010\n\004" + + "HIGH\020\002\"r\n\030MeasurementSelectionType\022*\n&ME" + + "ASUREMENT_SELECTION_TYPE_UNSPECIFIED\020\000\022\024" + + "\n\020LAST_MEASUREMENT\020\001\022\024\n\020BEST_MEASUREMENT" + + "\020\002\"\247\001\n\013Measurement\022\027\n\nstep_count\030\002 \001(\003B\003" + + "\340A\003\022I\n\007metrics\030\003 \003(\01323.google.cloud.aipl" + + "atform.v1beta1.Measurement.MetricB\003\340A\003\0324" + + "\n\006Metric\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\003\022\022\n\005valu" + + "e\030\002 \001(\001B\003\340A\003B~\n#com.google.cloud.aiplatf" + + "orm.v1beta1B\nStudyProtoP\001ZIgoogle.golang" + + ".org/genproto/googleapis/cloud/aiplatfor" + + "m/v1beta1;aiplatformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -192,6 +205,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), + com.google.protobuf.WrappersProto.getDescriptor(), com.google.api.AnnotationsProto.getDescriptor(), }); internal_static_google_cloud_aiplatform_v1beta1_Trial_descriptor = @@ -216,7 +230,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_StudySpec_descriptor, new java.lang.String[] { - "Metrics", "Parameters", "Algorithm", + "Metrics", "Parameters", "Algorithm", "ObservationNoise", "MeasurementSelectionType", }); internal_static_google_cloud_aiplatform_v1beta1_StudySpec_MetricSpec_descriptor = internal_static_google_cloud_aiplatform_v1beta1_StudySpec_descriptor @@ -350,6 +364,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); + registry.add(com.google.api.ResourceProto.resource); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); @@ -358,6 +373,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); + com.google.protobuf.WrappersProto.getDescriptor(); com.google.api.AnnotationsProto.getDescriptor(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java index 251335b9d..4b8c8ce8b 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java @@ -41,6 +41,8 @@ private StudySpec() { metrics_ = java.util.Collections.emptyList(); parameters_ = java.util.Collections.emptyList(); algorithm_ = 0; + observationNoise_ = 0; + measurementSelectionType_ = 0; } @java.lang.Override @@ -108,6 +110,20 @@ private StudySpec( algorithm_ = rawValue; break; } + case 48: + { + int rawValue = input.readEnum(); + + observationNoise_ = rawValue; + break; + } + case 56: + { + int rawValue = input.readEnum(); + + measurementSelectionType_ = rawValue; + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -307,6 +323,345 @@ private Algorithm(int value) { // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.StudySpec.Algorithm) } + /** + * + * + *
+   * Describes the noise level of the repeated observations.
+   * "Noisy" means that the repeated observations with the same Trial parameters
+   * may lead to different metric evaluations.
+   * 
+ * + * Protobuf enum {@code google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise} + */ + public enum ObservationNoise implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The default noise level chosen by the AI Platform service.
+     * 
+ * + * OBSERVATION_NOISE_UNSPECIFIED = 0; + */ + OBSERVATION_NOISE_UNSPECIFIED(0), + /** + * + * + *
+     * AI Platform Vizier assumes that the objective function is (nearly)
+     * perfectly reproducible, and will never repeat the same Trial
+     * parameters.
+     * 
+ * + * LOW = 1; + */ + LOW(1), + /** + * + * + *
+     * AI Platform Vizier will estimate the amount of noise in metric
+     * evaluations, it may repeat the same Trial parameters more than once.
+     * 
+ * + * HIGH = 2; + */ + HIGH(2), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The default noise level chosen by the AI Platform service.
+     * 
+ * + * OBSERVATION_NOISE_UNSPECIFIED = 0; + */ + public static final int OBSERVATION_NOISE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * AI Platform Vizier assumes that the objective function is (nearly)
+     * perfectly reproducible, and will never repeat the same Trial
+     * parameters.
+     * 
+ * + * LOW = 1; + */ + public static final int LOW_VALUE = 1; + /** + * + * + *
+     * AI Platform Vizier will estimate the amount of noise in metric
+     * evaluations, it may repeat the same Trial parameters more than once.
+     * 
+ * + * HIGH = 2; + */ + public static final int HIGH_VALUE = 2; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ObservationNoise valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static ObservationNoise forNumber(int value) { + switch (value) { + case 0: + return OBSERVATION_NOISE_UNSPECIFIED; + case 1: + return LOW; + case 2: + return HIGH; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ObservationNoise findValueByNumber(int number) { + return ObservationNoise.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.StudySpec.getDescriptor().getEnumTypes().get(1); + } + + private static final ObservationNoise[] VALUES = values(); + + public static ObservationNoise valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private ObservationNoise(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise) + } + + /** + * + * + *
+   * This indicates which measurement to use if/when the service automatically
+   * selects the final measurement from previously reported intermediate
+   * measurements. Choose this based on two considerations:
+   *  A) Do you expect your measurements to monotonically improve?
+   *     If so, choose LAST_MEASUREMENT. On the other hand, if you're in a
+   *     situation where your system can "over-train" and you expect the
+   *     performance to get better for a while but then start declining,
+   *     choose BEST_MEASUREMENT.
+   *  B) Are your measurements significantly noisy and/or irreproducible?
+   *     If so, BEST_MEASUREMENT will tend to be over-optimistic, and it
+   *     may be better to choose LAST_MEASUREMENT.
+   *  If both or neither of (A) and (B) apply, it doesn't matter which
+   *  selection type is chosen.
+   * 
+ * + * Protobuf enum {@code google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType} + */ + public enum MeasurementSelectionType implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * Will be treated as LAST_MEASUREMENT.
+     * 
+ * + * MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0; + */ + MEASUREMENT_SELECTION_TYPE_UNSPECIFIED(0), + /** + * + * + *
+     * Use the last measurement reported.
+     * 
+ * + * LAST_MEASUREMENT = 1; + */ + LAST_MEASUREMENT(1), + /** + * + * + *
+     * Use the best measurement reported.
+     * 
+ * + * BEST_MEASUREMENT = 2; + */ + BEST_MEASUREMENT(2), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * Will be treated as LAST_MEASUREMENT.
+     * 
+ * + * MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0; + */ + public static final int MEASUREMENT_SELECTION_TYPE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Use the last measurement reported.
+     * 
+ * + * LAST_MEASUREMENT = 1; + */ + public static final int LAST_MEASUREMENT_VALUE = 1; + /** + * + * + *
+     * Use the best measurement reported.
+     * 
+ * + * BEST_MEASUREMENT = 2; + */ + public static final int BEST_MEASUREMENT_VALUE = 2; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static MeasurementSelectionType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static MeasurementSelectionType forNumber(int value) { + switch (value) { + case 0: + return MEASUREMENT_SELECTION_TYPE_UNSPECIFIED; + case 1: + return LAST_MEASUREMENT; + case 2: + return BEST_MEASUREMENT; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MeasurementSelectionType findValueByNumber(int number) { + return MeasurementSelectionType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.aiplatform.v1beta1.StudySpec.getDescriptor().getEnumTypes().get(2); + } + + private static final MeasurementSelectionType[] VALUES = values(); + + public static MeasurementSelectionType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private MeasurementSelectionType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType) + } + public interface MetricSpecOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.StudySpec.MetricSpec) @@ -12979,6 +13334,92 @@ public com.google.cloud.aiplatform.v1beta1.StudySpec.Algorithm getAlgorithm() { : result; } + public static final int OBSERVATION_NOISE_FIELD_NUMBER = 6; + private int observationNoise_; + /** + * + * + *
+   * The observation noise level of the study.
+   * Currently only supported by the Vizier service. Not supported by
+   * HyperparamterTuningJob or TrainingPipeline.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * @return The enum numeric value on the wire for observationNoise. + */ + @java.lang.Override + public int getObservationNoiseValue() { + return observationNoise_; + } + /** + * + * + *
+   * The observation noise level of the study.
+   * Currently only supported by the Vizier service. Not supported by
+   * HyperparamterTuningJob or TrainingPipeline.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * @return The observationNoise. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise() { + @SuppressWarnings("deprecation") + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise result = + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.valueOf(observationNoise_); + return result == null + ? com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.UNRECOGNIZED + : result; + } + + public static final int MEASUREMENT_SELECTION_TYPE_FIELD_NUMBER = 7; + private int measurementSelectionType_; + /** + * + * + *
+   * Describe which measurement selection type will be used
+   * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The enum numeric value on the wire for measurementSelectionType. + */ + @java.lang.Override + public int getMeasurementSelectionTypeValue() { + return measurementSelectionType_; + } + /** + * + * + *
+   * Describe which measurement selection type will be used
+   * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The measurementSelectionType. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType + getMeasurementSelectionType() { + @SuppressWarnings("deprecation") + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType result = + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.valueOf( + measurementSelectionType_); + return result == null + ? com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.UNRECOGNIZED + : result; + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -13004,6 +13445,18 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io .getNumber()) { output.writeEnum(3, algorithm_); } + if (observationNoise_ + != com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise + .OBSERVATION_NOISE_UNSPECIFIED + .getNumber()) { + output.writeEnum(6, observationNoise_); + } + if (measurementSelectionType_ + != com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType + .MEASUREMENT_SELECTION_TYPE_UNSPECIFIED + .getNumber()) { + output.writeEnum(7, measurementSelectionType_); + } unknownFields.writeTo(output); } @@ -13024,6 +13477,18 @@ public int getSerializedSize() { .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, algorithm_); } + if (observationNoise_ + != com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise + .OBSERVATION_NOISE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, observationNoise_); + } + if (measurementSelectionType_ + != com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType + .MEASUREMENT_SELECTION_TYPE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(7, measurementSelectionType_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -13043,6 +13508,8 @@ public boolean equals(final java.lang.Object obj) { if (!getMetricsList().equals(other.getMetricsList())) return false; if (!getParametersList().equals(other.getParametersList())) return false; if (algorithm_ != other.algorithm_) return false; + if (observationNoise_ != other.observationNoise_) return false; + if (measurementSelectionType_ != other.measurementSelectionType_) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -13064,6 +13531,10 @@ public int hashCode() { } hash = (37 * hash) + ALGORITHM_FIELD_NUMBER; hash = (53 * hash) + algorithm_; + hash = (37 * hash) + OBSERVATION_NOISE_FIELD_NUMBER; + hash = (53 * hash) + observationNoise_; + hash = (37 * hash) + MEASUREMENT_SELECTION_TYPE_FIELD_NUMBER; + hash = (53 * hash) + measurementSelectionType_; hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -13226,6 +13697,10 @@ public Builder clear() { } algorithm_ = 0; + observationNoise_ = 0; + + measurementSelectionType_ = 0; + return this; } @@ -13273,6 +13748,8 @@ public com.google.cloud.aiplatform.v1beta1.StudySpec buildPartial() { result.parameters_ = parametersBuilder_.build(); } result.algorithm_ = algorithm_; + result.observationNoise_ = observationNoise_; + result.measurementSelectionType_ = measurementSelectionType_; onBuilt(); return result; } @@ -13379,6 +13856,12 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.StudySpec other) { if (other.algorithm_ != 0) { setAlgorithmValue(other.getAlgorithmValue()); } + if (other.observationNoise_ != 0) { + setObservationNoiseValue(other.getObservationNoiseValue()); + } + if (other.measurementSelectionType_ != 0) { + setMeasurementSelectionTypeValue(other.getMeasurementSelectionTypeValue()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -14306,6 +14789,221 @@ public Builder clearAlgorithm() { return this; } + private int observationNoise_ = 0; + /** + * + * + *
+     * The observation noise level of the study.
+     * Currently only supported by the Vizier service. Not supported by
+     * HyperparamterTuningJob or TrainingPipeline.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * + * @return The enum numeric value on the wire for observationNoise. + */ + @java.lang.Override + public int getObservationNoiseValue() { + return observationNoise_; + } + /** + * + * + *
+     * The observation noise level of the study.
+     * Currently only supported by the Vizier service. Not supported by
+     * HyperparamterTuningJob or TrainingPipeline.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * + * @param value The enum numeric value on the wire for observationNoise to set. + * @return This builder for chaining. + */ + public Builder setObservationNoiseValue(int value) { + + observationNoise_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The observation noise level of the study.
+     * Currently only supported by the Vizier service. Not supported by
+     * HyperparamterTuningJob or TrainingPipeline.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * + * @return The observationNoise. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise() { + @SuppressWarnings("deprecation") + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise result = + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.valueOf(observationNoise_); + return result == null + ? com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * The observation noise level of the study.
+     * Currently only supported by the Vizier service. Not supported by
+     * HyperparamterTuningJob or TrainingPipeline.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * + * @param value The observationNoise to set. + * @return This builder for chaining. + */ + public Builder setObservationNoise( + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise value) { + if (value == null) { + throw new NullPointerException(); + } + + observationNoise_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * The observation noise level of the study.
+     * Currently only supported by the Vizier service. Not supported by
+     * HyperparamterTuningJob or TrainingPipeline.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * + * @return This builder for chaining. + */ + public Builder clearObservationNoise() { + + observationNoise_ = 0; + onChanged(); + return this; + } + + private int measurementSelectionType_ = 0; + /** + * + * + *
+     * Describe which measurement selection type will be used
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The enum numeric value on the wire for measurementSelectionType. + */ + @java.lang.Override + public int getMeasurementSelectionTypeValue() { + return measurementSelectionType_; + } + /** + * + * + *
+     * Describe which measurement selection type will be used
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @param value The enum numeric value on the wire for measurementSelectionType to set. + * @return This builder for chaining. + */ + public Builder setMeasurementSelectionTypeValue(int value) { + + measurementSelectionType_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Describe which measurement selection type will be used
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The measurementSelectionType. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType + getMeasurementSelectionType() { + @SuppressWarnings("deprecation") + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType result = + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.valueOf( + measurementSelectionType_); + return result == null + ? com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Describe which measurement selection type will be used
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @param value The measurementSelectionType to set. + * @return This builder for chaining. + */ + public Builder setMeasurementSelectionType( + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType value) { + if (value == null) { + throw new NullPointerException(); + } + + measurementSelectionType_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Describe which measurement selection type will be used
+     * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return This builder for chaining. + */ + public Builder clearMeasurementSelectionType() { + + measurementSelectionType_ = 0; + onChanged(); + return this; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java index 558724456..fcbd0fde9 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java @@ -172,4 +172,63 @@ com.google.cloud.aiplatform.v1beta1.StudySpec.ParameterSpecOrBuilder getParamete * @return The algorithm. */ com.google.cloud.aiplatform.v1beta1.StudySpec.Algorithm getAlgorithm(); + + /** + * + * + *
+   * The observation noise level of the study.
+   * Currently only supported by the Vizier service. Not supported by
+   * HyperparamterTuningJob or TrainingPipeline.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * @return The enum numeric value on the wire for observationNoise. + */ + int getObservationNoiseValue(); + /** + * + * + *
+   * The observation noise level of the study.
+   * Currently only supported by the Vizier service. Not supported by
+   * HyperparamterTuningJob or TrainingPipeline.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6; + * + * @return The observationNoise. + */ + com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise(); + + /** + * + * + *
+   * Describe which measurement selection type will be used
+   * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The enum numeric value on the wire for measurementSelectionType. + */ + int getMeasurementSelectionTypeValue(); + /** + * + * + *
+   * Describe which measurement selection type will be used
+   * 
+ * + * + * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7; + * + * + * @return The measurementSelectionType. + */ + com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType + getMeasurementSelectionType(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java index 353aa7d57..03f4b1206 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java @@ -254,6 +254,23 @@ private TrainingPipeline( input.readMessage( LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); labels_.getMutableMap().put(labels__.getKey(), labels__.getValue()); + break; + } + case 146: + { + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null; + if (encryptionSpec_ != null) { + subBuilder = encryptionSpec_.toBuilder(); + } + encryptionSpec_ = + input.readMessage( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionSpec_); + encryptionSpec_ = subBuilder.buildPartial(); + } + break; } default: @@ -655,7 +672,7 @@ public com.google.protobuf.ValueOrBuilder getTrainingTaskMetadataOrBuilder() { * * *
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -683,7 +700,7 @@ public boolean hasModelToUpload() {
    *
    *
    * 
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -713,7 +730,7 @@ public com.google.cloud.aiplatform.v1beta1.Model getModelToUpload() {
    *
    *
    * 
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -1144,6 +1161,63 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
     return map.get(key);
   }
 
+  public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 18;
+  private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+  /**
+   *
+   *
+   * 
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return Whether the encryptionSpec field is set. + */ + @java.lang.Override + public boolean hasEncryptionSpec() { + return encryptionSpec_ != null; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return The encryptionSpec. + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + /** + * + * + *
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + @java.lang.Override + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { + return getEncryptionSpec(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -1201,6 +1275,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 15); + if (encryptionSpec_ != null) { + output.writeMessage(18, getEncryptionSpec()); + } unknownFields.writeTo(output); } @@ -1262,6 +1339,9 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(15, labels__); } + if (encryptionSpec_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getEncryptionSpec()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1319,6 +1399,10 @@ public boolean equals(final java.lang.Object obj) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (!internalGetLabels().equals(other.internalGetLabels())) return false; + if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; + if (hasEncryptionSpec()) { + if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1378,6 +1462,10 @@ public int hashCode() { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + internalGetLabels().hashCode(); } + if (hasEncryptionSpec()) { + hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionSpec().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1610,6 +1698,12 @@ public Builder clear() { updateTimeBuilder_ = null; } internalGetMutableLabels().clear(); + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } return this; } @@ -1689,6 +1783,11 @@ public com.google.cloud.aiplatform.v1beta1.TrainingPipeline buildPartial() { } result.labels_ = internalGetLabels(); result.labels_.makeImmutable(); + if (encryptionSpecBuilder_ == null) { + result.encryptionSpec_ = encryptionSpec_; + } else { + result.encryptionSpec_ = encryptionSpecBuilder_.build(); + } onBuilt(); return result; } @@ -1782,6 +1881,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.TrainingPipeline ot mergeUpdateTime(other.getUpdateTime()); } internalGetMutableLabels().mergeFrom(other.internalGetLabels()); + if (other.hasEncryptionSpec()) { + mergeEncryptionSpec(other.getEncryptionSpec()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2860,7 +2962,7 @@ public com.google.protobuf.ValueOrBuilder getTrainingTaskMetadataOrBuilder() { * * *
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -2887,7 +2989,7 @@ public boolean hasModelToUpload() {
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -2920,7 +3022,7 @@ public com.google.cloud.aiplatform.v1beta1.Model getModelToUpload() {
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -2955,7 +3057,7 @@ public Builder setModelToUpload(com.google.cloud.aiplatform.v1beta1.Model value)
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -2988,7 +3090,7 @@ public Builder setModelToUpload(
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -3027,7 +3129,7 @@ public Builder mergeModelToUpload(com.google.cloud.aiplatform.v1beta1.Model valu
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -3060,7 +3162,7 @@ public Builder clearModelToUpload() {
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -3087,7 +3189,7 @@ public com.google.cloud.aiplatform.v1beta1.Model.Builder getModelToUploadBuilder
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -3118,7 +3220,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelToUploadOrBuil
      *
      *
      * 
-     * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+     * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
      * by this TrainingPipeline. The TrainingPipeline's
      * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
      * description should be populated, and if there are any special requirements
@@ -4437,6 +4539,220 @@ public Builder putAllLabels(java.util.Map va
       return this;
     }
 
+    private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+    private com.google.protobuf.SingleFieldBuilderV3<
+            com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+            com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+            com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+        encryptionSpecBuilder_;
+    /**
+     *
+     *
+     * 
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return Whether the encryptionSpec field is set. + */ + public boolean hasEncryptionSpec() { + return encryptionSpecBuilder_ != null || encryptionSpec_ != null; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return The encryptionSpec. + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } else { + return encryptionSpecBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionSpec_ = value; + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public Builder setEncryptionSpec( + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = builderForValue.build(); + onChanged(); + } else { + encryptionSpecBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { + if (encryptionSpecBuilder_ == null) { + if (encryptionSpec_ != null) { + encryptionSpec_ = + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_) + .mergeFrom(value) + .buildPartial(); + } else { + encryptionSpec_ = value; + } + onChanged(); + } else { + encryptionSpecBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public Builder clearEncryptionSpec() { + if (encryptionSpecBuilder_ == null) { + encryptionSpec_ = null; + onChanged(); + } else { + encryptionSpec_ = null; + encryptionSpecBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { + + onChanged(); + return getEncryptionSpecFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder + getEncryptionSpecOrBuilder() { + if (encryptionSpecBuilder_ != null) { + return encryptionSpecBuilder_.getMessageOrBuilder(); + } else { + return encryptionSpec_ == null + ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() + : encryptionSpec_; + } + } + /** + * + * + *
+     * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+     * TrainingPipeline will be secured by this key.
+     * Note: Model trained by this TrainingPipeline is also secured by this key if
+     * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+     * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> + getEncryptionSpecFieldBuilder() { + if (encryptionSpecBuilder_ == null) { + encryptionSpecBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.aiplatform.v1beta1.EncryptionSpec, + com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( + getEncryptionSpec(), getParentForChildren(), isClean()); + encryptionSpec_ = null; + } + return encryptionSpecBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java index 671252855..925dbdf68 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java @@ -263,7 +263,7 @@ public interface TrainingPipelineOrBuilder * * *
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -288,7 +288,7 @@ public interface TrainingPipelineOrBuilder
    *
    *
    * 
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -313,7 +313,7 @@ public interface TrainingPipelineOrBuilder
    *
    *
    * 
-   * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+   * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
    * by this TrainingPipeline. The TrainingPipeline's
    * [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
    * description should be populated, and if there are any special requirements
@@ -634,4 +634,48 @@ public interface TrainingPipelineOrBuilder
    * map<string, string> labels = 15;
    */
   java.lang.String getLabelsOrThrow(java.lang.String key);
+
+  /**
+   *
+   *
+   * 
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return Whether the encryptionSpec field is set. + */ + boolean hasEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + * + * @return The encryptionSpec. + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec(); + /** + * + * + *
+   * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+   * TrainingPipeline will be secured by this key.
+   * Note: Model trained by this TrainingPipeline is also secured by this key if
+   * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+   * 
+ * + * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18; + */ + com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder(); } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java index d5a1dc5a8..bd427e389 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java @@ -67,68 +67,71 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "\n7google/cloud/aiplatform/v1beta1/traini" + "ng_pipeline.proto\022\037google.cloud.aiplatfo" + "rm.v1beta1\032\037google/api/field_behavior.pr" - + "oto\032\031google/api/resource.proto\032(google/c" - + "loud/aiplatform/v1beta1/io.proto\0327google" - + "/cloud/aiplatform/v1beta1/machine_resour" - + "ces.proto\032Dgoogle/cloud/aiplatform/v1bet" - + "a1/manual_batch_tuning_parameters.proto\032" - + "+google/cloud/aiplatform/v1beta1/model.p" - + "roto\0324google/cloud/aiplatform/v1beta1/pi" - + "peline_state.proto\032\034google/protobuf/stru" - + "ct.proto\032\037google/protobuf/timestamp.prot" - + "o\032\027google/rpc/status.proto\032\034google/api/a" - + "nnotations.proto\"\253\007\n\020TrainingPipeline\022\021\n" - + "\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003" - + "\340A\002\022K\n\021input_data_config\030\003 \001(\01320.google." - + "cloud.aiplatform.v1beta1.InputDataConfig" - + "\022%\n\030training_task_definition\030\004 \001(\tB\003\340A\002\022" - + "9\n\024training_task_inputs\030\005 \001(\0132\026.google.p" - + "rotobuf.ValueB\003\340A\002\022;\n\026training_task_meta" - + "data\030\006 \001(\0132\026.google.protobuf.ValueB\003\340A\003\022" - + "?\n\017model_to_upload\030\007 \001(\0132&.google.cloud." - + "aiplatform.v1beta1.Model\022B\n\005state\030\t \001(\0162" - + "..google.cloud.aiplatform.v1beta1.Pipeli" - + "neStateB\003\340A\003\022&\n\005error\030\n \001(\0132\022.google.rpc" - + ".StatusB\003\340A\003\0224\n\013create_time\030\013 \001(\0132\032.goog" - + "le.protobuf.TimestampB\003\340A\003\0223\n\nstart_time" - + "\030\014 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022" - + "1\n\010end_time\030\r \001(\0132\032.google.protobuf.Time" - + "stampB\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google" - + ".protobuf.TimestampB\003\340A\003\022M\n\006labels\030\017 \003(\013" - + "2=.google.cloud.aiplatform.v1beta1.Train" - + "ingPipeline.LabelsEntry\032-\n\013LabelsEntry\022\013" - + "\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:~\352A{\n*aip" - + "latform.googleapis.com/TrainingPipeline\022" - + "Mprojects/{project}/locations/{location}" - + "/trainingPipelines/{training_pipeline}\"\311" - + "\004\n\017InputDataConfig\022H\n\016fraction_split\030\002 \001" - + "(\0132..google.cloud.aiplatform.v1beta1.Fra" - + "ctionSplitH\000\022D\n\014filter_split\030\003 \001(\0132,.goo" - + "gle.cloud.aiplatform.v1beta1.FilterSplit" - + "H\000\022L\n\020predefined_split\030\004 \001(\01320.google.cl" - + "oud.aiplatform.v1beta1.PredefinedSplitH\000" - + "\022J\n\017timestamp_split\030\005 \001(\0132/.google.cloud" - + ".aiplatform.v1beta1.TimestampSplitH\000\022J\n\017" - + "gcs_destination\030\010 \001(\0132/.google.cloud.aip" - + "latform.v1beta1.GcsDestinationH\001\022T\n\024bigq" - + "uery_destination\030\n \001(\01324.google.cloud.ai" - + "platform.v1beta1.BigQueryDestinationH\001\022\027" - + "\n\ndataset_id\030\001 \001(\tB\003\340A\002\022\032\n\022annotations_f" - + "ilter\030\006 \001(\t\022\035\n\025annotation_schema_uri\030\t \001" - + "(\tB\007\n\005splitB\r\n\013destination\"^\n\rFractionSp" - + "lit\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023valida" - + "tion_fraction\030\002 \001(\001\022\025\n\rtest_fraction\030\003 \001" - + "(\001\"e\n\013FilterSplit\022\034\n\017training_filter\030\001 \001" - + "(\tB\003\340A\002\022\036\n\021validation_filter\030\002 \001(\tB\003\340A\002\022" - + "\030\n\013test_filter\030\003 \001(\tB\003\340A\002\"#\n\017PredefinedS" - + "plit\022\020\n\003key\030\001 \001(\tB\003\340A\002\"q\n\016TimestampSplit" - + "\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023validatio" - + "n_fraction\030\002 \001(\001\022\025\n\rtest_fraction\030\003 \001(\001\022" - + "\020\n\003key\030\004 \001(\tB\003\340A\002B\211\001\n#com.google.cloud.a" - + "iplatform.v1beta1B\025TrainingPipelineProto" - + "P\001ZIgoogle.golang.org/genproto/googleapi" - + "s/cloud/aiplatform/v1beta1;aiplatformb\006p" - + "roto3" + + "oto\032\031google/api/resource.proto\0325google/c" + + "loud/aiplatform/v1beta1/encryption_spec." + + "proto\032(google/cloud/aiplatform/v1beta1/i" + + "o.proto\0327google/cloud/aiplatform/v1beta1" + + "/machine_resources.proto\032Dgoogle/cloud/a" + + "iplatform/v1beta1/manual_batch_tuning_pa" + + "rameters.proto\032+google/cloud/aiplatform/" + + "v1beta1/model.proto\0324google/cloud/aiplat" + + "form/v1beta1/pipeline_state.proto\032\034googl" + + "e/protobuf/struct.proto\032\037google/protobuf" + + "/timestamp.proto\032\027google/rpc/status.prot" + + "o\032\034google/api/annotations.proto\"\365\007\n\020Trai" + + "ningPipeline\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014displ" + + "ay_name\030\002 \001(\tB\003\340A\002\022K\n\021input_data_config\030" + + "\003 \001(\01320.google.cloud.aiplatform.v1beta1." + + "InputDataConfig\022%\n\030training_task_definit" + + "ion\030\004 \001(\tB\003\340A\002\0229\n\024training_task_inputs\030\005" + + " \001(\0132\026.google.protobuf.ValueB\003\340A\002\022;\n\026tra" + + "ining_task_metadata\030\006 \001(\0132\026.google.proto" + + "buf.ValueB\003\340A\003\022?\n\017model_to_upload\030\007 \001(\0132" + + "&.google.cloud.aiplatform.v1beta1.Model\022" + + "B\n\005state\030\t \001(\0162..google.cloud.aiplatform" + + ".v1beta1.PipelineStateB\003\340A\003\022&\n\005error\030\n \001" + + "(\0132\022.google.rpc.StatusB\003\340A\003\0224\n\013create_ti" + + "me\030\013 \001(\0132\032.google.protobuf.TimestampB\003\340A" + + "\003\0223\n\nstart_time\030\014 \001(\0132\032.google.protobuf." + + "TimestampB\003\340A\003\0221\n\010end_time\030\r \001(\0132\032.googl" + + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time" + + "\030\016 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022" + + "M\n\006labels\030\017 \003(\0132=.google.cloud.aiplatfor" + + "m.v1beta1.TrainingPipeline.LabelsEntry\022H" + + "\n\017encryption_spec\030\022 \001(\0132/.google.cloud.a" + + "iplatform.v1beta1.EncryptionSpec\032-\n\013Labe" + + "lsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:" + + "~\352A{\n*aiplatform.googleapis.com/Training" + + "Pipeline\022Mprojects/{project}/locations/{" + + "location}/trainingPipelines/{training_pi" + + "peline}\"\311\004\n\017InputDataConfig\022H\n\016fraction_" + + "split\030\002 \001(\0132..google.cloud.aiplatform.v1" + + "beta1.FractionSplitH\000\022D\n\014filter_split\030\003 " + + "\001(\0132,.google.cloud.aiplatform.v1beta1.Fi" + + "lterSplitH\000\022L\n\020predefined_split\030\004 \001(\01320." + + "google.cloud.aiplatform.v1beta1.Predefin" + + "edSplitH\000\022J\n\017timestamp_split\030\005 \001(\0132/.goo" + + "gle.cloud.aiplatform.v1beta1.TimestampSp" + + "litH\000\022J\n\017gcs_destination\030\010 \001(\0132/.google." + + "cloud.aiplatform.v1beta1.GcsDestinationH" + + "\001\022T\n\024bigquery_destination\030\n \001(\01324.google" + + ".cloud.aiplatform.v1beta1.BigQueryDestin" + + "ationH\001\022\027\n\ndataset_id\030\001 \001(\tB\003\340A\002\022\032\n\022anno" + + "tations_filter\030\006 \001(\t\022\035\n\025annotation_schem" + + "a_uri\030\t \001(\tB\007\n\005splitB\r\n\013destination\"^\n\rF" + + "ractionSplit\022\031\n\021training_fraction\030\001 \001(\001\022" + + "\033\n\023validation_fraction\030\002 \001(\001\022\025\n\rtest_fra" + + "ction\030\003 \001(\001\"e\n\013FilterSplit\022\034\n\017training_f" + + "ilter\030\001 \001(\tB\003\340A\002\022\036\n\021validation_filter\030\002 " + + "\001(\tB\003\340A\002\022\030\n\013test_filter\030\003 \001(\tB\003\340A\002\"#\n\017Pr" + + "edefinedSplit\022\020\n\003key\030\001 \001(\tB\003\340A\002\"q\n\016Times" + + "tampSplit\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023" + + "validation_fraction\030\002 \001(\001\022\025\n\rtest_fracti" + + "on\030\003 \001(\001\022\020\n\003key\030\004 \001(\tB\003\340A\002B\211\001\n#com.googl" + + "e.cloud.aiplatform.v1beta1B\025TrainingPipe" + + "lineProtoP\001ZIgoogle.golang.org/genproto/" + + "googleapis/cloud/aiplatform/v1beta1;aipl" + + "atformb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -136,6 +139,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersProto.getDescriptor(), @@ -166,6 +170,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "EndTime", "UpdateTime", "Labels", + "EncryptionSpec", }); internal_static_google_cloud_aiplatform_v1beta1_TrainingPipeline_LabelsEntry_descriptor = internal_static_google_cloud_aiplatform_v1beta1_TrainingPipeline_descriptor @@ -235,6 +240,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); + com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersProto.getDescriptor(); diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java new file mode 100644 index 000000000..9697fffbb --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java @@ -0,0 +1,257 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.aiplatform.v1beta1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class TrialName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_STUDY_TRIAL = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/studies/{study}/trials/{trial}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String study; + private final String trial; + + @Deprecated + protected TrialName() { + project = null; + location = null; + study = null; + trial = null; + } + + private TrialName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + study = Preconditions.checkNotNull(builder.getStudy()); + trial = Preconditions.checkNotNull(builder.getTrial()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStudy() { + return study; + } + + public String getTrial() { + return trial; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static TrialName of(String project, String location, String study, String trial) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setStudy(study) + .setTrial(trial) + .build(); + } + + public static String format(String project, String location, String study, String trial) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setStudy(study) + .setTrial(trial) + .build() + .toString(); + } + + public static TrialName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_STUDY_TRIAL.validatedMatch( + formattedString, "TrialName.parse: formattedString not in valid format"); + return of( + matchMap.get("project"), + matchMap.get("location"), + matchMap.get("study"), + matchMap.get("trial")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (TrialName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_STUDY_TRIAL.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (study != null) { + fieldMapBuilder.put("study", study); + } + if (trial != null) { + fieldMapBuilder.put("trial", trial); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_STUDY_TRIAL.instantiate( + "project", project, "location", location, "study", study, "trial", trial); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TrialName that = ((TrialName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.study, that.study) + && Objects.equals(this.trial, that.trial); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(study); + h *= 1000003; + h ^= Objects.hashCode(trial); + return h; + } + + /** Builder for projects/{project}/locations/{location}/studies/{study}/trials/{trial}. */ + public static class Builder { + private String project; + private String location; + private String study; + private String trial; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStudy() { + return study; + } + + public String getTrial() { + return trial; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setStudy(String study) { + this.study = study; + return this; + } + + public Builder setTrial(String trial) { + this.trial = trial; + return this; + } + + private Builder(TrialName trialName) { + project = trialName.project; + location = trialName.location; + study = trialName.study; + trial = trialName.trial; + } + + public TrialName build() { + return new TrialName(this); + } + } +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequest.java index 7327d82b5..823c09eb1 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequest.java @@ -195,8 +195,7 @@ public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetOrBuilder( *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
@@ -218,8 +217,7 @@ public boolean hasUpdateMask() {
    * 
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
@@ -241,8 +239,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
    * 
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
@@ -831,8 +828,7 @@ public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetOrBuilder(
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -853,8 +849,7 @@ public boolean hasUpdateMask() {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -881,8 +876,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -911,8 +905,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -938,8 +931,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -970,8 +962,7 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -998,8 +989,7 @@ public Builder clearUpdateMask() {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -1020,8 +1010,7 @@ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
@@ -1046,8 +1035,7 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
      * 
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * Updatable fields:
      *   * `display_name`
      *   * `description`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
index 6e34c4c84..3a5dc293a 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
@@ -70,8 +70,7 @@ public interface UpdateDatasetRequestOrBuilder
    * 
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
@@ -90,8 +89,7 @@ public interface UpdateDatasetRequestOrBuilder
    * 
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
@@ -110,8 +108,7 @@ public interface UpdateDatasetRequestOrBuilder
    * 
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * Updatable fields:
    *   * `display_name`
    *   * `description`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
index d9f0a4fbb..3e01d02c6 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
@@ -194,6 +194,8 @@ public com.google.cloud.aiplatform.v1beta1.EndpointOrBuilder getEndpointOrBuilde
    *
    * 
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -210,6 +212,8 @@ public boolean hasUpdateMask() { * *
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -226,6 +230,8 @@ public com.google.protobuf.FieldMask getUpdateMask() { * *
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -810,6 +816,8 @@ public com.google.cloud.aiplatform.v1beta1.EndpointOrBuilder getEndpointOrBuilde * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -825,6 +833,8 @@ public boolean hasUpdateMask() { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -846,6 +856,8 @@ public com.google.protobuf.FieldMask getUpdateMask() { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -869,6 +881,8 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -889,6 +903,8 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -914,6 +930,8 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -935,6 +953,8 @@ public Builder clearUpdateMask() { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -950,6 +970,8 @@ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -969,6 +991,8 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { * *
      * Required. The update mask applies to the resource.
+     * See
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java index 615096d7a..d381f83e8 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java @@ -69,6 +69,8 @@ public interface UpdateEndpointRequestOrBuilder * *
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -82,6 +84,8 @@ public interface UpdateEndpointRequestOrBuilder * *
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -95,6 +99,8 @@ public interface UpdateEndpointRequestOrBuilder * *
    * Required. The update mask applies to the resource.
+   * See
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java index bc165bd33..62902fada 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java @@ -193,9 +193,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelOrBuilder() { *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -213,9 +211,7 @@ public boolean hasUpdateMask() { *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -233,9 +229,7 @@ public com.google.protobuf.FieldMask getUpdateMask() { *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -820,9 +814,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelOrBuilder() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -839,9 +831,7 @@ public boolean hasUpdateMask() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -864,9 +854,7 @@ public com.google.protobuf.FieldMask getUpdateMask() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -891,9 +879,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -915,9 +901,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -944,9 +928,7 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -969,9 +951,7 @@ public Builder clearUpdateMask() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -988,9 +968,7 @@ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -1011,9 +989,7 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { *
      * Required. The update mask applies to the resource.
      * For the `FieldMask` definition, see
-     * [FieldMask](https:
-     * //developers.google.com/protocol-buffers
-     * // /docs/reference/google.protobuf#fieldmask).
+     * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
      * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequestOrBuilder.java index 89739f46b..c05760803 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequestOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequestOrBuilder.java @@ -70,9 +70,7 @@ public interface UpdateModelRequestOrBuilder *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -87,9 +85,7 @@ public interface UpdateModelRequestOrBuilder *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; @@ -104,9 +100,7 @@ public interface UpdateModelRequestOrBuilder *
    * Required. The update mask applies to the resource.
    * For the `FieldMask` definition, see
-   * [FieldMask](https:
-   * //developers.google.com/protocol-buffers
-   * // /docs/reference/google.protobuf#fieldmask).
+   * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
    * 
* * .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpec.java index ff1a3d185..8833bd2b2 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpec.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpec.java @@ -331,11 +331,11 @@ public com.google.cloud.aiplatform.v1beta1.PythonPackageSpec getPythonPackageSpe * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return Whether the machineSpec field is set. @@ -348,11 +348,11 @@ public boolean hasMachineSpec() { * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return The machineSpec. @@ -367,11 +367,11 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpec getMachineSpec() { * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ @java.lang.Override @@ -385,10 +385,10 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpecOrBuilder getMachineSpecOr * * *
-   * Required. The number of worker replicas to use for this worker pool.
+   * Optional. The number of worker replicas to use for this worker pool.
    * 
* - * int64 replica_count = 2 [(.google.api.field_behavior) = REQUIRED]; + * int64 replica_count = 2 [(.google.api.field_behavior) = OPTIONAL]; * * @return The replicaCount. */ @@ -1337,11 +1337,11 @@ public Builder clearPythonPackageSpec() { * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return Whether the machineSpec field is set. @@ -1353,11 +1353,11 @@ public boolean hasMachineSpec() { * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return The machineSpec. @@ -1375,11 +1375,11 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpec getMachineSpec() { * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public Builder setMachineSpec(com.google.cloud.aiplatform.v1beta1.MachineSpec value) { @@ -1399,11 +1399,11 @@ public Builder setMachineSpec(com.google.cloud.aiplatform.v1beta1.MachineSpec va * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public Builder setMachineSpec( @@ -1421,11 +1421,11 @@ public Builder setMachineSpec( * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public Builder mergeMachineSpec(com.google.cloud.aiplatform.v1beta1.MachineSpec value) { @@ -1449,11 +1449,11 @@ public Builder mergeMachineSpec(com.google.cloud.aiplatform.v1beta1.MachineSpec * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public Builder clearMachineSpec() { @@ -1471,11 +1471,11 @@ public Builder clearMachineSpec() { * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public com.google.cloud.aiplatform.v1beta1.MachineSpec.Builder getMachineSpecBuilder() { @@ -1487,11 +1487,11 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpec.Builder getMachineSpecBui * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ public com.google.cloud.aiplatform.v1beta1.MachineSpecOrBuilder getMachineSpecOrBuilder() { @@ -1507,11 +1507,11 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpecOrBuilder getMachineSpecOr * * *
-     * Required. Immutable. The specification of a single machine.
+     * Optional. Immutable. The specification of a single machine.
      * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ private com.google.protobuf.SingleFieldBuilderV3< @@ -1536,10 +1536,10 @@ public com.google.cloud.aiplatform.v1beta1.MachineSpecOrBuilder getMachineSpecOr * * *
-     * Required. The number of worker replicas to use for this worker pool.
+     * Optional. The number of worker replicas to use for this worker pool.
      * 
* - * int64 replica_count = 2 [(.google.api.field_behavior) = REQUIRED]; + * int64 replica_count = 2 [(.google.api.field_behavior) = OPTIONAL]; * * @return The replicaCount. */ @@ -1551,10 +1551,10 @@ public long getReplicaCount() { * * *
-     * Required. The number of worker replicas to use for this worker pool.
+     * Optional. The number of worker replicas to use for this worker pool.
      * 
* - * int64 replica_count = 2 [(.google.api.field_behavior) = REQUIRED]; + * int64 replica_count = 2 [(.google.api.field_behavior) = OPTIONAL]; * * @param value The replicaCount to set. * @return This builder for chaining. @@ -1569,10 +1569,10 @@ public Builder setReplicaCount(long value) { * * *
-     * Required. The number of worker replicas to use for this worker pool.
+     * Optional. The number of worker replicas to use for this worker pool.
      * 
* - * int64 replica_count = 2 [(.google.api.field_behavior) = REQUIRED]; + * int64 replica_count = 2 [(.google.api.field_behavior) = OPTIONAL]; * * @return This builder for chaining. */ diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpecOrBuilder.java index 0d65828a9..5ec0c9b05 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpecOrBuilder.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/WorkerPoolSpecOrBuilder.java @@ -97,11 +97,11 @@ public interface WorkerPoolSpecOrBuilder * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return Whether the machineSpec field is set. @@ -111,11 +111,11 @@ public interface WorkerPoolSpecOrBuilder * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * * * @return The machineSpec. @@ -125,11 +125,11 @@ public interface WorkerPoolSpecOrBuilder * * *
-   * Required. Immutable. The specification of a single machine.
+   * Optional. Immutable. The specification of a single machine.
    * 
* * - * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; + * .google.cloud.aiplatform.v1beta1.MachineSpec machine_spec = 1 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * */ com.google.cloud.aiplatform.v1beta1.MachineSpecOrBuilder getMachineSpecOrBuilder(); @@ -138,10 +138,10 @@ public interface WorkerPoolSpecOrBuilder * * *
-   * Required. The number of worker replicas to use for this worker pool.
+   * Optional. The number of worker replicas to use for this worker pool.
    * 
* - * int64 replica_count = 2 [(.google.api.field_behavior) = REQUIRED]; + * int64 replica_count = 2 [(.google.api.field_behavior) = OPTIONAL]; * * @return The replicaCount. */ diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/XraiAttribution.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/XraiAttribution.java index c4d0f45dd..c5904162b 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/XraiAttribution.java +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/XraiAttribution.java @@ -26,7 +26,7 @@ * attributions to segmented regions, taking advantage of the model's fully * differentiable structure. Refer to this paper for more details: * https://arxiv.org/abs/1906.02825 - * Only supports image Models ([modality][InputMetadata.modality] is IMAGE). + * Supported only by image Models. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.XraiAttribution} @@ -391,7 +391,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * attributions to segmented regions, taking advantage of the model's fully * differentiable structure. Refer to this paper for more details: * https://arxiv.org/abs/1906.02825 - * Only supports image Models ([modality][InputMetadata.modality] is IMAGE). + * Supported only by image Models. *
* * Protobuf type {@code google.cloud.aiplatform.v1beta1.XraiAttribution} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto index 0adbda7f2..f763df636 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/batch_prediction_job.proto @@ -19,6 +19,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/aiplatform/v1beta1/completion_stats.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/explanation.proto"; import "google/cloud/aiplatform/v1beta1/io.proto"; import "google/cloud/aiplatform/v1beta1/job_state.proto"; @@ -50,7 +51,7 @@ message BatchPredictionJob { message InputConfig { // Required. The source of the input. oneof source { - // The Google Cloud Storage location for the input instances. + // The Cloud Storage location for the input instances. GcsSource gcs_source = 2; // The BigQuery location of the input table. @@ -73,7 +74,7 @@ message BatchPredictionJob { message OutputConfig { // Required. The destination of the output. oneof destination { - // The Google Cloud Storage location of the directory where the output is + // The Cloud Storage location of the directory where the output is // to be written to. In the given directory a new directory is created. // Its name is `prediction--`, // where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. @@ -129,7 +130,7 @@ message BatchPredictionJob { message OutputInfo { // The output location into which prediction output is written. oneof output_location { - // Output only. The full path of the Google Cloud Storage directory created, into which + // Output only. The full path of the Cloud Storage directory created, into which // the prediction output is written. string gcs_output_directory = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -192,29 +193,30 @@ message BatchPredictionJob { // the tuning itself). ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(google.api.field_behavior) = IMMUTABLE]; - // Generate explanation along with the batch prediction results. + // Generate explanation with the batch prediction results. // - // When it's true, the batch prediction output will change based on the - // [output format][BatchPredictionJob.output_config.predictions_format]: + // When set to `true`, the batch prediction output changes based on the + // `predictions_format` field of the + // [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object: // - // * `bigquery`: output will include a column named `explanation`. The value + // * `bigquery`: output includes a column named `explanation`. The value // is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. - // * `jsonl`: The JSON objects on each line will include an additional entry + // * `jsonl`: The JSON objects on each line include an additional entry // keyed `explanation`. The value of the entry is a JSON object that // conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. // * `csv`: Generating explanations for CSV format is not supported. + // + // If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be + // populated. bool generate_explanation = 23; - // Explanation configuration for this BatchPredictionJob. Can only be - // specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to - // specified it with generate_explanation set to false or unset. + // Explanation configuration for this BatchPredictionJob. Can be + // specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. // // This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of - // [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of - // [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of - // [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding - // [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for - // this Model is not allowed. + // [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the + // [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of + // the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited. ExplanationSpec explanation_spec = 25; // Output only. Information further describing the output of this job. @@ -266,4 +268,9 @@ message BatchPredictionJob { // // See https://goo.gl/xmQnxf for more information and examples of labels. map labels = 19; + + // Customer-managed encryption key options for a BatchPredictionJob. If this + // is set, then all resources created by the BatchPredictionJob will be + // encrypted with the provided encryption key. + EncryptionSpec encryption_spec = 24; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/custom_job.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/custom_job.proto index 8fd3649a6..f50e56ed7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/custom_job.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/custom_job.proto @@ -18,6 +18,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/env_var.proto"; import "google/cloud/aiplatform/v1beta1/io.proto"; import "google/cloud/aiplatform/v1beta1/job_state.proto"; @@ -82,6 +83,11 @@ message CustomJob { // // See https://goo.gl/xmQnxf for more information and examples of labels. map labels = 11; + + // Customer-managed encryption key options for a CustomJob. If this is set, + // then all resources created by the CustomJob will be encrypted with the + // provided encryption key. + EncryptionSpec encryption_spec = 12; } // Represents the spec of a CustomJob. @@ -94,39 +100,40 @@ message CustomJobSpec { // Specifies the service account for workload run-as account. // Users submitting jobs must have act-as permission on this run-as account. + // If unspecified, the AI Platform Custom Code Service Agent for the + // CustomJob's project is used. string service_account = 4; // The full name of the Compute Engine // [network](/compute/docs/networks-and-firewalls#networks) to which the Job - // should be peered. For example, projects/12345/global/networks/myVPC. - // - // [Format](https: - // //cloud.google.com/compute/docs/reference/rest/v1/networks/insert) - // is of the form projects/{project}/global/networks/{network}. - // Where {project} is a project number, as in '12345', and {network} is + // should be peered. For example, `projects/12345/global/networks/myVPC`. + // [Format](/compute/docs/reference/rest/v1/networks/insert) + // is of the form `projects/{project}/global/networks/{network}`. + // Where {project} is a project number, as in `12345`, and {network} is a // network name. // // Private services access must already be configured for the network. If left // unspecified, the job is not peered with any network. string network = 5; - // The Google Cloud Storage location to store the output of this CustomJob or + // The Cloud Storage location to store the output of this CustomJob or // HyperparameterTuningJob. For HyperparameterTuningJob, - // [base_output_directory][CustomJob.job_spec.base_output_directory] of + // the baseOutputDirectory of // each child CustomJob backing a Trial is set to a subdirectory of name - // [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's - // - // [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory]. + // [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's + // baseOutputDirectory. // - // Following AI Platform environment variables will be passed to containers or - // python modules when this field is set: + // The following AI Platform environment variables will be passed to + // containers or python modules when this field is set: // // For CustomJob: + // // * AIP_MODEL_DIR = `/model/` // * AIP_CHECKPOINT_DIR = `/checkpoints/` // * AIP_TENSORBOARD_LOG_DIR = `/logs/` // // For CustomJob backing a Trial of HyperparameterTuningJob: + // // * AIP_MODEL_DIR = `//model/` // * AIP_CHECKPOINT_DIR = `//checkpoints/` // * AIP_TENSORBOARD_LOG_DIR = `//logs/` @@ -144,14 +151,14 @@ message WorkerPoolSpec { PythonPackageSpec python_package_spec = 7; } - // Required. Immutable. The specification of a single machine. + // Optional. Immutable. The specification of a single machine. MachineSpec machine_spec = 1 [ - (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = OPTIONAL, (google.api.field_behavior) = IMMUTABLE ]; - // Required. The number of worker replicas to use for this worker pool. - int64 replica_count = 2 [(google.api.field_behavior) = REQUIRED]; + // Optional. The number of worker replicas to use for this worker pool. + int64 replica_count = 2 [(google.api.field_behavior) = OPTIONAL]; // Disk spec. DiskSpec disk_spec = 5; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/data_labeling_job.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/data_labeling_job.proto index 6d2410d9c..41a4cb5d7 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/data_labeling_job.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/data_labeling_job.proto @@ -19,6 +19,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/aiplatform/v1beta1/accelerator_type.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/job_state.proto"; import "google/cloud/aiplatform/v1beta1/specialist_pool.proto"; import "google/protobuf/struct.proto"; @@ -126,13 +127,20 @@ message DataLabelingJob { // The SpecialistPools' resource names associated with this job. repeated string specialist_pools = 16; - // Paramaters that configure active learning pipeline. Active learning will + // Customer-managed encryption key spec for a DataLabelingJob. If set, this + // DataLabelingJob will be secured by this key. + // + // Note: Annotations created in the DataLabelingJob are associated with + // the EncryptionSpec of the Dataset they are exported to. + EncryptionSpec encryption_spec = 20; + + // Parameters that configure active learning pipeline. Active learning will // label the data incrementally via several iterations. For every iteration, // it will select a batch of data based on the sampling strategy. ActiveLearningConfig active_learning_config = 21; } -// Paramaters that configure active learning pipeline. Active learning will +// Parameters that configure active learning pipeline. Active learning will // label the data incrementally by several iterations. For every iteration, it // will select a batch of data based on the sampling strategy. message ActiveLearningConfig { diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset.proto index 26ba6c78b..6a7e46890 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset.proto @@ -18,6 +18,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/io.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; @@ -78,6 +79,10 @@ message Dataset { // * "aiplatform.googleapis.com/dataset_metadata_schema": output only, its // value is the [metadata_schema's][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] title. map labels = 7; + + // Customer-managed encryption key spec for a Dataset. If set, this Dataset + // and all sub-resources of this Dataset will be secured by this key. + EncryptionSpec encryption_spec = 11; } // Describes the location from where we import data into a Dataset, together diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset_service.proto index 069f251b7..17e9eb89d 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/dataset_service.proto @@ -181,9 +181,7 @@ message UpdateDatasetRequest { // Required. The update mask applies to the resource. // For the `FieldMask` definition, see - // - // [FieldMask](https: - // //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask). + // [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). // Updatable fields: // // * `display_name` @@ -203,7 +201,19 @@ message ListDatasetsRequest { } ]; - // The standard list filter. + // An expression for filtering the results of the request. For field names + // both snake_case and camelCase are supported. + // + // * `display_name`: supports = and != + // * `metadata_schema_uri`: supports = and != + // * `labels` supports general map functions that is: + // * `labels.key=value` - key:value equality + // * `labels.key:* or labels:key - key existence + // * A key including a space must be quoted. `labels."a key"`. + // + // Some examples: + // * `displayName="myDisplayName"` + // * `labels.myKey="myValue"` string filter = 2; // The standard list page size. @@ -219,7 +229,7 @@ message ListDatasetsRequest { // Use "desc" after a field name for descending. // Supported fields: // * `display_name` - // * `data_item_count` * `create_time` + // * `create_time` // * `update_time` string order_by = 6; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/encryption_spec.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/encryption_spec.proto new file mode 100644 index 000000000..b2d9f3f22 --- /dev/null +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/encryption_spec.proto @@ -0,0 +1,36 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.aiplatform.v1beta1; + +import "google/api/field_behavior.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/aiplatform/v1beta1;aiplatform"; +option java_multiple_files = true; +option java_outer_classname = "EncryptionSpecProto"; +option java_package = "com.google.cloud.aiplatform.v1beta1"; + +// Represents a customer-managed encryption key spec that can be applied to +// a top-level resource. +message EncryptionSpec { + // Required. The Cloud KMS resource identifier of the customer managed encryption key + // used to protect a resource. Has the form: + // `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`. + // The key needs to be in the same region as where the compute resource is + // created. + string kms_key_name = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint.proto index 6138ff501..19f82c5f5 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint.proto @@ -18,6 +18,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/explanation.proto"; import "google/cloud/aiplatform/v1beta1/machine_resources.proto"; import "google/protobuf/timestamp.proto"; @@ -80,6 +81,11 @@ message Endpoint { // Output only. Timestamp when this Endpoint was last updated. google.protobuf.Timestamp update_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Customer-managed encryption key spec for an Endpoint. If set, this + // Endpoint and all sub-resources of this Endpoint will be secured by + // this key. + EncryptionSpec encryption_spec = 10; } // A deployment of a Model. Endpoints contain one or more DeployedModels. @@ -141,7 +147,7 @@ message DeployedModel { // If true, the container of the DeployedModel instances will send `stderr` // and `stdout` streams to Stackdriver Logging. // - // Only supported for custom-trained Models and AutoML Tables Models. + // Only supported for custom-trained Models and AutoML Tabular Models. bool enable_container_logging = 12; // These logs are like standard server access logs, containing diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint_service.proto index 87c673cc8..8d8284b6a 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/endpoint_service.proto @@ -161,18 +161,16 @@ message ListEndpointsRequest { // both snake_case and camelCase are supported. // // * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID, - // ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. - // * `display_name` supports =, != and regex() - // (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax) + // i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name]. + // * `display_name` supports = and, != // * `labels` supports general map functions that is: - // `labels.key=value` - key:value equality - // `labels.key:* or labels:key - key existence - // A key including a space must be quoted. `labels."a key"`. + // * `labels.key=value` - key:value equality + // * `labels.key:* or labels:key - key existence + // * A key including a space must be quoted. `labels."a key"`. // // Some examples: // * `endpoint=1` // * `displayName="myDisplayName"` - // * `regex(display_name, "^A") -> The display name starts with an A. // * `labels.myKey="myValue"` string filter = 2 [(google.api.field_behavior) = OPTIONAL]; @@ -205,6 +203,8 @@ message UpdateEndpointRequest { Endpoint endpoint = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The update mask applies to the resource. + // See + // [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/env_var.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/env_var.proto index c4e1874e5..2f96fc1bc 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/env_var.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/env_var.proto @@ -29,12 +29,12 @@ message EnvVar { // Required. Name of the environment variable. Must be a valid C identifier. string name = 1 [(google.api.field_behavior) = REQUIRED]; - // Variables that reference a $(VAR_NAME) are expanded + // Required. Variables that reference a $(VAR_NAME) are expanded // using the previous defined environment variables in the container and // any service environment variables. If a variable cannot be resolved, // the reference in the input string will be unchanged. The $(VAR_NAME) // syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped // references will never be expanded, regardless of whether the variable // exists or not. - string value = 2; + string value = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation.proto index 9bc020786..a75ee628f 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation.proto @@ -48,7 +48,7 @@ message Explanation { // Aggregated explanation metrics for a Model over a set of instances. message ModelExplanation { - // Output only. Aggregated attributions explaning the Model's prediction outputs over the + // Output only. Aggregated attributions explaining the Model's prediction outputs over the // set of instances. The attributions are grouped by outputs. // // For Models that predict only one output, such as regression Models that @@ -127,8 +127,8 @@ message Attribution { // of the output vector. Indices start from 0. repeated int32 output_index = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the - // predicted class name by a multi-classification Model. + // Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example, + // the predicted class name by a multi-classification Model. // // This field is only populated iff the Model predicts display names as a // separate field along with the explained output. The predicted display name @@ -139,22 +139,20 @@ message Attribution { // Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the // explanation method. Lower value means more precise attributions. // - // * For [Sampled Shapley - // attribution][ExplanationParameters.sampled_shapley_attribution], increasing - // [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error. - // * For [Integrated Gradients - // attribution][ExplanationParameters.integrated_gradients_attribution], - // increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may + // * For Sampled Shapley + // [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution], + // increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce + // the error. + // * For Integrated Gradients + // [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution], + // increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might // reduce the error. - // * For [XRAI - // attribution][ExplanationParameters.xrai_attribution], increasing - // [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error. + // * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution], + // increasing + // [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error. // - // Refer to AI Explanations Whitepaper for more details: - // - // https: - // //storage.googleapis.com/cloud-ai-whitep - // // apers/AI%20Explainability%20Whitepaper.pdf + // See [this introduction](/ai-platform-unified/docs/explainable-ai/overview) + // for more information. double approximation_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Name of the explain output. Specified as the key in @@ -204,7 +202,7 @@ message ExplanationParameters { int32 top_k = 4; // If populated, only returns attributions that have - // [output_index][Attributions.output_index] contained in output_indices. It + // [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It // must be an ndarray of integers, with the same shape of the output it's // explaining. // @@ -253,7 +251,7 @@ message IntegratedGradientsAttribution { // differentiable structure. Refer to this paper for more details: // https://arxiv.org/abs/1906.02825 // -// Only supports image Models ([modality][InputMetadata.modality] is IMAGE). +// Supported only by image Models. message XraiAttribution { // Required. The number of steps for approximating the path integral. // A good value to start is 50 and gradually increase until the @@ -285,12 +283,9 @@ message SmoothGradConfig { // This is a single float value and will be used to add noise to all the // features. Use this field when all features are normalized to have the // same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where - // features are normalized to have 0-mean and 1-variance. Refer to - // this doc for more details about normalization: - // - // https: - // //developers.google.com/machine-learning - // // /data-prep/transform/normalization. + // features are normalized to have 0-mean and 1-variance. For more details + // about normalization: + // https://tinyurl.com/dgc-normalization. // // For best results the recommended value is about 10% - 20% of the standard // deviation of the input feature. Refer to section 3.2 of the SmoothGrad @@ -338,3 +333,38 @@ message FeatureNoiseSigma { // Noise sigma per feature. No noise is added to features that are not set. repeated NoiseSigmaForFeature noise_sigma = 1; } + +// The [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] entries that can be overridden at [online +// explanation][PredictionService.Explain][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time. +message ExplanationSpecOverride { + // The parameters to be overridden. Note that the + // [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified, + // no parameter is overridden. + ExplanationParameters parameters = 1; + + // The metadata to be overridden. If not specified, no metadata is overridden. + ExplanationMetadataOverride metadata = 2; +} + +// The [ExplanationMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata] entries that can be overridden at +// [online explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time. +message ExplanationMetadataOverride { + // The [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] entries to be + // overridden. + message InputMetadataOverride { + // Baseline inputs for this feature. + // + // This overrides the `input_baseline` field of the + // [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] + // object of the corresponding feature's input metadata. If it's not + // specified, the original baselines are not overridden. + repeated google.protobuf.Value input_baselines = 1; + } + + // Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features. + // The key is the name of the feature to be overridden. The keys specified + // here must exist in the input metadata to be overridden. If a feature is + // not specified here, the corresponding feature's input metadata is not + // overridden. + map inputs = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation_metadata.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation_metadata.proto index eae647df6..985e4478c 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation_metadata.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/explanation_metadata.proto @@ -367,8 +367,8 @@ message ExplanationMetadata { // of the empty feature is chosen by AI Platform. // // For AI Platform provided Tensorflow images, the key can be any friendly - // name of the feature . Once specified, [ - // featureAttributions][Attribution.feature_attributions] will be keyed by + // name of the feature. Once specified, + // [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by // this key (if not grouped with another feature). // // For custom images, the key must match with the key in diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/hyperparameter_tuning_job.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/hyperparameter_tuning_job.proto index c8d8e5d3c..fc6181e95 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/hyperparameter_tuning_job.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/hyperparameter_tuning_job.proto @@ -19,6 +19,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/aiplatform/v1beta1/custom_job.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/job_state.proto"; import "google/cloud/aiplatform/v1beta1/study.proto"; import "google/protobuf/timestamp.proto"; @@ -99,4 +100,9 @@ message HyperparameterTuningJob { // // See https://goo.gl/xmQnxf for more information and examples of labels. map labels = 16; + + // Customer-managed encryption key options for a HyperparameterTuningJob. + // If this is set, then all resources created by the HyperparameterTuningJob + // will be encrypted with the provided encryption key. + EncryptionSpec encryption_spec = 17; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/io.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/io.proto index 16f883db0..83e0f5dfe 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/io.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/io.proto @@ -51,10 +51,16 @@ message BigQuerySource { // The BigQuery location for the output content. message BigQueryDestination { - // Required. BigQuery URI to a project, up to 2000 characters long. + // Required. BigQuery URI to a project or table, up to 2000 characters long. + // + // When only project is specified, Dataset and Table is created. + // When full table reference is specified, Dataset must exist and table must + // not exist. + // // Accepted forms: // - // * BigQuery path. For example: `bq://projectId`. + // * BigQuery path. For example: + // `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`. string output_uri = 1 [(google.api.field_behavior) = REQUIRED]; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/job_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/job_service.proto index b75456f57..01bf83e07 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/job_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/job_service.proto @@ -24,9 +24,11 @@ import "google/cloud/aiplatform/v1beta1/batch_prediction_job.proto"; import "google/cloud/aiplatform/v1beta1/custom_job.proto"; import "google/cloud/aiplatform/v1beta1/data_labeling_job.proto"; import "google/cloud/aiplatform/v1beta1/hyperparameter_tuning_job.proto"; +import "google/cloud/aiplatform/v1beta1/operation.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/aiplatform/v1beta1;aiplatform"; option java_multiple_files = true; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/machine_resources.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/machine_resources.proto index e0acfd6e8..3cb131c24 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/machine_resources.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/machine_resources.proto @@ -27,41 +27,13 @@ option java_package = "com.google.cloud.aiplatform.v1beta1"; // Specification of a single machine. message MachineSpec { - // Immutable. The type of the machine. - // Following machine types are supported: + // Immutable. The type of the machine. For the machine types supported for prediction, + // see https://tinyurl.com/aip-docs/predictions/machine-types. + // For machine types supported for creating a custom training job, see + // https://tinyurl.com/aip-docs/training/configure-compute. // - // * `n1-standard-2` - // - // * `n1-standard-4` - // - // * `n1-standard-8` - // - // * `n1-standard-16` - // - // * `n1-standard-32` - // - // * `n1-highmem-2` - // - // * `n1-highmem-4` - // - // * `n1-highmem-8` - // - // * `n1-highmem-16` - // - // * `n1-highmem-32` - // - // * `n1-highcpu-2` - // - // * `n1-highcpu-4` - // - // * `n1-highcpu-8` - // - // * `n1-highcpu-16` - // - // * `n1-highcpu-32` - // - // When used for [DeployedMode][] this field is optional and the default value - // is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of + // For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default + // value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of // [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required. string machine_type = 1 [(google.api.field_behavior) = IMMUTABLE]; @@ -157,7 +129,7 @@ message ResourcesConsumed { // Represents the spec of disk options. message DiskSpec { - // Type of the boot disk (default is "pd-standard"). + // Type of the boot disk (default is "pd-ssd"). // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or // "pd-standard" (Persistent Disk Hard Disk Drive). string boot_disk_type = 1; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migratable_resource.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migratable_resource.proto index d0b0e9cf9..411105027 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migratable_resource.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migratable_resource.proto @@ -54,6 +54,7 @@ message MigratableResource { // The ml.googleapis.com endpoint that this model Version currently lives // in. // Example values: + // // * ml.googleapis.com // * us-centrall-ml.googleapis.com // * europe-west4-ml.googleapis.com diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migration_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migration_service.proto index 273eb9a26..e96f96e2f 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migration_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/migration_service.proto @@ -25,6 +25,7 @@ import "google/api/resource.proto"; import "google/cloud/aiplatform/v1beta1/migratable_resource.proto"; import "google/cloud/aiplatform/v1beta1/operation.proto"; import "google/longrunning/operations.proto"; +import "google/rpc/status.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/aiplatform/v1beta1;aiplatform"; option java_multiple_files = true; @@ -83,6 +84,17 @@ message SearchMigratableResourcesRequest { // The standard page token. string page_token = 3; + + // Supported filters are: + // * Resource type: For a specific type of MigratableResource. + // * `ml_engine_model_version:*` + // * `automl_model:*`, + // * `automl_dataset:*` + // * `data_labeling_dataset:*`. + // * Migrated or not: Filter migrated resource or not by last_migrate_time. + // * `last_migrate_time:*` will filter migrated resources. + // * `NOT last_migrate_time:*` will filter not yet migrated resource. + string filter = 4; } // Response message for [MigrationService.SearchMigratableResources][google.cloud.aiplatform.v1beta1.MigrationService.SearchMigratableResources]. @@ -265,6 +277,35 @@ message MigrateResourceResponse { // Runtime operation information for [MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1beta1.MigrationService.BatchMigrateResources]. message BatchMigrateResourcesOperationMetadata { + // Represents a partial result in batch migration opreation for one + // [MigrateResourceRequest][google.cloud.aiplatform.v1beta1.MigrateResourceRequest]. + message PartialResult { + // If the resource's migration is ongoing, none of the result will be set. + // If the resource's migration is finished, either error or one of the + // migrated resource name will be filled. + oneof result { + // The error result of the migration request in case of failure. + google.rpc.Status error = 2; + + // Migrated model resource name. + string model = 3 [(google.api.resource_reference) = { + type: "aiplatform.googleapis.com/Model" + }]; + + // Migrated dataset resource name. + string dataset = 4 [(google.api.resource_reference) = { + type: "aiplatform.googleapis.com/Dataset" + }]; + } + + // It's the same as the value in + // [MigrateResourceRequest.migrate_resource_requests][]. + MigrateResourceRequest request = 1; + } + // The common part of the operation metadata. GenericOperationMetadata generic_metadata = 1; + + // Partial results that reflects the latest migration operation progress. + repeated PartialResult partial_results = 2; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model.proto index 6e557a620..e69027e90 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model.proto @@ -20,6 +20,7 @@ import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/aiplatform/v1beta1/dataset.proto"; import "google/cloud/aiplatform/v1beta1/deployed_model_ref.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/env_var.proto"; import "google/cloud/aiplatform/v1beta1/explanation.proto"; import "google/protobuf/struct.proto"; @@ -196,6 +197,10 @@ message Model { // Each instance is a single row in BigQuery. Uses // [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source]. // + // * `file-list` + // Each line of the file is the location of an instance to process, uses + // `gcs_source` field of the + // [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object. // // // If this Model doesn't support any of these formats it means it cannot be @@ -248,18 +253,20 @@ message Model { // Model could have been deployed to Endpoints in different Locations. repeated DeployedModelRef deployed_models = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. The default explanation specification for this Model. + // The default explanation specification for this Model. // - // Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] - // after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated. + // The Model can be used for [requesting + // explanation][PredictionService.Explain] after being + // [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated. + // The Model can be used for [batch + // explanation][BatchPredictionJob.generate_explanation] iff it is populated. // // All fields of the explanation_spec can be overridden by // [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of - // [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model]. - // - // This field is populated only for tabular AutoML Models. - // Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported. - ExplanationSpec explanation_spec = 23 [(google.api.field_behavior) = OUTPUT_ONLY]; + // [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or + // [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of + // [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. + ExplanationSpec explanation_spec = 23; // Used to perform consistent read-modify-write updates. If not set, a blind // "overwrite" update happens. @@ -273,6 +280,10 @@ message Model { // // See https://goo.gl/xmQnxf for more information and examples of labels. map labels = 17; + + // Customer-managed encryption key spec for a Model. If set, this + // Model and all sub-resources of this Model will be secured by this key. + EncryptionSpec encryption_spec = 24; } // Contains the schemata used in Model's predictions and explanations via @@ -469,9 +480,8 @@ message ModelContainerSpec { // // For example, if you set this field to `/foo`, then when AI Platform // receives a prediction request, it forwards the request body in a POST - // request to the following URL on the container: - // localhost:PORT/foo - // PORT refers to the first value of this `ModelContainerSpec`'s + // request to the `/foo` path on the port of your container specified by the + // first value of this `ModelContainerSpec`'s // [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. // // If you don't specify this field, it defaults to the following value when @@ -499,9 +509,8 @@ message ModelContainerSpec { // checks](https://tinyurl.com/cust-cont-reqs#checks). // // For example, if you set this field to `/bar`, then AI Platform - // intermittently sends a GET request to the following URL on the container: - // localhost:PORT/bar - // PORT refers to the first value of this `ModelContainerSpec`'s + // intermittently sends a GET request to the `/bar` path on the port of your + // container specified by the first value of this `ModelContainerSpec`'s // [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field. // // If you don't specify this field, it defaults to the following value when diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_evaluation.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_evaluation.proto index 282e8da2b..d91d4b181 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_evaluation.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_evaluation.proto @@ -36,6 +36,19 @@ message ModelEvaluation { pattern: "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}" }; + message ModelEvaluationExplanationSpec { + // Explanation type. + // + // For AutoML Image Classification models, possible values are: + // + // * `image-integrated-gradients` + // * `image-xrai` + string explanation_type = 1; + + // Explanation spec details. + ExplanationSpec explanation_spec = 2; + } + // Output only. The resource name of the ModelEvaluation. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -63,4 +76,8 @@ message ModelEvaluation { // is evaluated with explanations, and only for AutoML tabular Models. // ModelExplanation model_explanation = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining + // the predicted values on the evaluated data. + repeated ModelEvaluationExplanationSpec explanation_specs = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_service.proto index 5d7357b60..219ed2f90 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/model_service.proto @@ -191,7 +191,21 @@ message ListModelsRequest { } ]; - // The standard list filter. + // An expression for filtering the results of the request. For field names + // both snake_case and camelCase are supported. + // + // * `model` supports = and !=. `model` represents the Model ID, + // i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name]. + // * `display_name` supports = and != + // * `labels` supports general map functions that is: + // * `labels.key=value` - key:value equality + // * `labels.key:* or labels:key - key existence + // * A key including a space must be quoted. `labels."a key"`. + // + // Some examples: + // * `model=1234` + // * `displayName="myDisplayName"` + // * `labels.myKey="myValue"` string filter = 2; // The standard list page size. @@ -224,10 +238,7 @@ message UpdateModelRequest { // Required. The update mask applies to the resource. // For the `FieldMask` definition, see - // - // [FieldMask](https: - // //developers.google.com/protocol-buffers - // // /docs/reference/google.protobuf#fieldmask). + // [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask). google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; } @@ -253,20 +264,20 @@ message ExportModelRequest { // supported formats is used by default. string export_format_id = 1; - // The Google Cloud Storage location where the Model artifact is to be + // The Cloud Storage location where the Model artifact is to be // written to. Under the directory given as the destination a new one with // name "`model-export--`", // where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format, // will be created. Inside, the Model and any of its supporting files // will be written. - // This field should only be set when - // [Models.supported_export_formats.exportable_contents] contains ARTIFACT. + // This field should only be set when the `exportableContent` field of the + // [Model.supported_export_formats] object contains `ARTIFACT`. GcsDestination artifact_destination = 3; // The Google Container Registry or Artifact Registry uri where the // Model container image will be copied to. - // This field should only be set when - // [Models.supported_export_formats.exportable_contents] contains IMAGE. + // This field should only be set when the `exportableContent` field of the + // [Model.supported_export_formats] object contains `IMAGE`. ContainerRegistryDestination image_destination = 4; } diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/prediction_service.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/prediction_service.proto index d28f1e6f4..d20ed3899 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/prediction_service.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/prediction_service.proto @@ -133,6 +133,16 @@ message ExplainRequest { // [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. google.protobuf.Value parameters = 4; + // If specified, overrides the + // [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel. + // Can be used for explaining prediction results with different + // configurations, such as: + // - Explaining top-5 predictions results as opposed to top-1; + // - Increasing path count or step count of the attribution methods to reduce + // approximate errors; + // - Using different baselines for explaining the prediction results. + ExplanationSpecOverride explanation_spec_override = 5; + // If specified, this ExplainRequest will be served by the chosen // DeployedModel, overriding [Endpoint.traffic_split][google.cloud.aiplatform.v1beta1.Endpoint.traffic_split]. string deployed_model_id = 3; diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/study.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/study.proto index 210c9eb7f..2d0a8f878 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/study.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/study.proto @@ -21,6 +21,7 @@ import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/aiplatform/v1beta1;aiplatform"; @@ -32,6 +33,11 @@ option java_package = "com.google.cloud.aiplatform.v1beta1"; // that has been or will be evaluated, along with the objective metrics got by // running the Trial. message Trial { + option (google.api.resource) = { + type: "aiplatform.googleapis.com/Trial" + pattern: "projects/{project}/locations/{location}/studies/{study}/trials/{trial}" + }; + // A message representing a parameter to be tuned. message Parameter { // Output only. The ID of the parameter. The parameter should be defined in @@ -265,6 +271,48 @@ message StudySpec { RANDOM_SEARCH = 3; } + // Describes the noise level of the repeated observations. + // + // "Noisy" means that the repeated observations with the same Trial parameters + // may lead to different metric evaluations. + enum ObservationNoise { + // The default noise level chosen by the AI Platform service. + OBSERVATION_NOISE_UNSPECIFIED = 0; + + // AI Platform Vizier assumes that the objective function is (nearly) + // perfectly reproducible, and will never repeat the same Trial + // parameters. + LOW = 1; + + // AI Platform Vizier will estimate the amount of noise in metric + // evaluations, it may repeat the same Trial parameters more than once. + HIGH = 2; + } + + // This indicates which measurement to use if/when the service automatically + // selects the final measurement from previously reported intermediate + // measurements. Choose this based on two considerations: + // A) Do you expect your measurements to monotonically improve? + // If so, choose LAST_MEASUREMENT. On the other hand, if you're in a + // situation where your system can "over-train" and you expect the + // performance to get better for a while but then start declining, + // choose BEST_MEASUREMENT. + // B) Are your measurements significantly noisy and/or irreproducible? + // If so, BEST_MEASUREMENT will tend to be over-optimistic, and it + // may be better to choose LAST_MEASUREMENT. + // If both or neither of (A) and (B) apply, it doesn't matter which + // selection type is chosen. + enum MeasurementSelectionType { + // Will be treated as LAST_MEASUREMENT. + MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0; + + // Use the last measurement reported. + LAST_MEASUREMENT = 1; + + // Use the best measurement reported. + BEST_MEASUREMENT = 2; + } + // Required. Metric specs for the Study. repeated MetricSpec metrics = 1 [(google.api.field_behavior) = REQUIRED]; @@ -273,6 +321,14 @@ message StudySpec { // The search algorithm specified for the Study. Algorithm algorithm = 3; + + // The observation noise level of the study. + // Currently only supported by the Vizier service. Not supported by + // HyperparamterTuningJob or TrainingPipeline. + ObservationNoise observation_noise = 6; + + // Describe which measurement selection type will be used + MeasurementSelectionType measurement_selection_type = 7; } // A message representing a Measurement of a Trial. A Measurement contains diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/training_pipeline.proto b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/training_pipeline.proto index 26fbd287f..26568518e 100644 --- a/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/training_pipeline.proto +++ b/proto-google-cloud-aiplatform-v1beta1/src/main/proto/google/cloud/aiplatform/v1beta1/training_pipeline.proto @@ -18,6 +18,7 @@ package google.cloud.aiplatform.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; +import "google/cloud/aiplatform/v1beta1/encryption_spec.proto"; import "google/cloud/aiplatform/v1beta1/io.proto"; import "google/cloud/aiplatform/v1beta1/machine_resources.proto"; import "google/cloud/aiplatform/v1beta1/manual_batch_tuning_parameters.proto"; @@ -79,7 +80,7 @@ message TrainingPipeline { // pipeline's [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] contains `metadata` object. google.protobuf.Value training_task_metadata = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Describes the Model that may be uploaded (via [ModelService.UploadMode][]) + // Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel]) // by this TrainingPipeline. The TrainingPipeline's // [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model // description should be populated, and if there are any special requirements @@ -125,6 +126,13 @@ message TrainingPipeline { // // See https://goo.gl/xmQnxf for more information and examples of labels. map labels = 15; + + // Customer-managed encryption key spec for a TrainingPipeline. If set, this + // TrainingPipeline will be secured by this key. + // + // Note: Model trained by this TrainingPipeline is also secured by this key if + // [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately. + EncryptionSpec encryption_spec = 18; } // Specifies AI Platform owned input data to be used for training, and @@ -159,7 +167,7 @@ message InputDataConfig { // * For non-tabular data: "jsonl". // * For tabular data: "csv" and "bigquery". // - // Following AI Platform environment variables will be passed to containers + // The following AI Platform environment variables are passed to containers // or python modules of the training task when this field is set: // // * AIP_DATA_FORMAT : Exported data format. @@ -167,43 +175,50 @@ message InputDataConfig { // * AIP_VALIDATION_DATA_URI : Sharded exported validation data uris. // * AIP_TEST_DATA_URI : Sharded exported test data uris. oneof destination { - // The Google Cloud Storage location where the training data is to be - // written to. In the given directory a new directory will be created with + // The Cloud Storage location where the training data is to be + // written to. In the given directory a new directory is created with // name: // `dataset---` // where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. - // All training input data will be written into that directory. + // All training input data is written into that directory. // - // The AI Platform environment variables representing Google Cloud Storage - // data URIs will always be represented in the Google Cloud Storage wildcard + // The AI Platform environment variables representing Cloud Storage + // data URIs are represented in the Cloud Storage wildcard // format to support sharded data. e.g.: "gs://.../training-*.jsonl" // // * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data - // * AIP_TRAINING_DATA_URI = + // * AIP_TRAINING_DATA_URI = // // "gcs_destination/dataset---