Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(bazel): move rules_gapic up in WORKSPACE loading order #1454

Merged
merged 25 commits into from
Jul 15, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
6367266
feat: add BatchImportModelEvaluationSlices API in aiplatform v1 model…
gcf-owl-bot[bot] Jun 22, 2022
87e2546
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jun 22, 2022
c415d16
feat: add audience parameter
gcf-owl-bot[bot] Jun 23, 2022
61d58f7
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jun 23, 2022
d20605e
feat: add ListSavedQueries rpc to aiplatform v1beta1 dataset_service.…
gcf-owl-bot[bot] Jun 24, 2022
e911f6d
feat: add ListSavedQueries rpc to aiplatform v1 dataset_service.proto
gcf-owl-bot[bot] Jun 24, 2022
83de793
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jun 24, 2022
75c9fb4
Merge branch 'main' into owl-bot-copy
dizcology Jun 24, 2022
c497cb2
docs: clarify descriptions of the AdvancedSettings and WebhookRequest…
gcf-owl-bot[bot] Jun 29, 2022
ef45b05
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jun 29, 2022
37b714b
fix: Upgrade `rules_python` to 0.9.0
gcf-owl-bot[bot] Jul 5, 2022
b761eb4
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 5, 2022
78c6d38
chore: use gapic-generator-python 1.1.1
gcf-owl-bot[bot] Jul 6, 2022
5d7e4cb
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 6, 2022
b00af1d
fix(deps): require google-api-core 2.8.0
parthea Jul 7, 2022
ba49fae
chore: remove unused imports
gcf-owl-bot[bot] Jul 7, 2022
dd5a852
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 7, 2022
ae75990
Merge branch 'main' into owl-bot-copy
nayaknishant Jul 11, 2022
6b8285a
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 11, 2022
37be4f9
fix: update rules_go, grpc, gazelle and python toolchain to latest ve…
gcf-owl-bot[bot] Jul 11, 2022
7ceed4f
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 11, 2022
1bbb7f3
chore(bazel): move rules_gapic up in WORKSPACE loading order
gcf-owl-bot[bot] Jul 12, 2022
5b099ec
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 12, 2022
ffb7110
Merge branch 'main' into owl-bot-copy
dizcology Jul 15, 2022
a331e84
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 15, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@
from .types.dataset_service import ListDataItemsResponse
from .types.dataset_service import ListDatasetsRequest
from .types.dataset_service import ListDatasetsResponse
from .types.dataset_service import ListSavedQueriesRequest
from .types.dataset_service import ListSavedQueriesResponse
from .types.dataset_service import UpdateDatasetRequest
from .types.deployed_index_ref import DeployedIndexRef
from .types.deployed_model_ref import DeployedModelRef
Expand Down Expand Up @@ -345,6 +347,8 @@
from .types.model_monitoring import ModelMonitoringObjectiveConfig
from .types.model_monitoring import SamplingStrategy
from .types.model_monitoring import ThresholdConfig
from .types.model_service import BatchImportModelEvaluationSlicesRequest
from .types.model_service import BatchImportModelEvaluationSlicesResponse
from .types.model_service import DeleteModelRequest
from .types.model_service import DeleteModelVersionRequest
from .types.model_service import ExportModelOperationMetadata
Expand Down Expand Up @@ -393,6 +397,7 @@
from .types.prediction_service import PredictRequest
from .types.prediction_service import PredictResponse
from .types.prediction_service import RawPredictRequest
from .types.saved_query import SavedQuery
from .types.specialist_pool import SpecialistPool
from .types.specialist_pool_service import CreateSpecialistPoolOperationMetadata
from .types.specialist_pool_service import CreateSpecialistPoolRequest
Expand Down Expand Up @@ -535,6 +540,8 @@
"BatchCreateTensorboardTimeSeriesRequest",
"BatchCreateTensorboardTimeSeriesResponse",
"BatchDedicatedResources",
"BatchImportModelEvaluationSlicesRequest",
"BatchImportModelEvaluationSlicesResponse",
"BatchMigrateResourcesOperationMetadata",
"BatchMigrateResourcesRequest",
"BatchMigrateResourcesResponse",
Expand Down Expand Up @@ -796,6 +803,8 @@
"ListOptimalTrialsResponse",
"ListPipelineJobsRequest",
"ListPipelineJobsResponse",
"ListSavedQueriesRequest",
"ListSavedQueriesResponse",
"ListSpecialistPoolsRequest",
"ListSpecialistPoolsResponse",
"ListStudiesRequest",
Expand Down Expand Up @@ -886,6 +895,7 @@
"SampleConfig",
"SampledShapleyAttribution",
"SamplingStrategy",
"SavedQuery",
"Scalar",
"Scheduling",
"SearchFeaturesRequest",
Expand Down
20 changes: 20 additions & 0 deletions google/cloud/aiplatform_v1/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,11 @@
"list_datasets"
]
},
"ListSavedQueries": {
"methods": [
"list_saved_queries"
]
},
"UpdateDataset": {
"methods": [
"update_dataset"
Expand Down Expand Up @@ -110,6 +115,11 @@
"list_datasets"
]
},
"ListSavedQueries": {
"methods": [
"list_saved_queries"
]
},
"UpdateDataset": {
"methods": [
"update_dataset"
Expand Down Expand Up @@ -1266,6 +1276,11 @@
"grpc": {
"libraryClient": "ModelServiceClient",
"rpcs": {
"BatchImportModelEvaluationSlices": {
"methods": [
"batch_import_model_evaluation_slices"
]
},
"DeleteModel": {
"methods": [
"delete_model"
Expand Down Expand Up @@ -1341,6 +1356,11 @@
"grpc-async": {
"libraryClient": "ModelServiceAsyncClient",
"rpcs": {
"BatchImportModelEvaluationSlices": {
"methods": [
"batch_import_model_evaluation_slices"
]
},
"DeleteModel": {
"methods": [
"delete_model"
Expand Down
112 changes: 112 additions & 0 deletions google/cloud/aiplatform_v1/services/dataset_service/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from google.cloud.aiplatform_v1.types import dataset_service
from google.cloud.aiplatform_v1.types import encryption_spec
from google.cloud.aiplatform_v1.types import operation as gca_operation
from google.cloud.aiplatform_v1.types import saved_query
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
Expand Down Expand Up @@ -75,6 +76,8 @@ class DatasetServiceAsyncClient:
parse_data_item_path = staticmethod(DatasetServiceClient.parse_data_item_path)
dataset_path = staticmethod(DatasetServiceClient.dataset_path)
parse_dataset_path = staticmethod(DatasetServiceClient.parse_dataset_path)
saved_query_path = staticmethod(DatasetServiceClient.saved_query_path)
parse_saved_query_path = staticmethod(DatasetServiceClient.parse_saved_query_path)
common_billing_account_path = staticmethod(
DatasetServiceClient.common_billing_account_path
)
Expand Down Expand Up @@ -1157,6 +1160,115 @@ async def sample_list_data_items():
# Done; return the response.
return response

async def list_saved_queries(
self,
request: Union[dataset_service.ListSavedQueriesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListSavedQueriesAsyncPager:
r"""Lists SavedQueries in a Dataset.

.. code-block:: python

from google.cloud import aiplatform_v1

async def sample_list_saved_queries():
# Create a client
client = aiplatform_v1.DatasetServiceAsyncClient()

# Initialize request argument(s)
request = aiplatform_v1.ListSavedQueriesRequest(
parent="parent_value",
)

# Make the request
page_result = client.list_saved_queries(request=request)

# Handle the response
async for response in page_result:
print(response)

Args:
request (Union[google.cloud.aiplatform_v1.types.ListSavedQueriesRequest, dict]):
The request object. Request message for
[DatasetService.ListSavedQueries][google.cloud.aiplatform.v1.DatasetService.ListSavedQueries].
parent (:class:`str`):
Required. The resource name of the Dataset to list
SavedQueries from. Format:
``projects/{project}/locations/{location}/datasets/{dataset}``

This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.

Returns:
google.cloud.aiplatform_v1.services.dataset_service.pagers.ListSavedQueriesAsyncPager:
Response message for
[DatasetService.ListSavedQueries][google.cloud.aiplatform.v1.DatasetService.ListSavedQueries].

Iterating over this object will yield results and
resolve additional pages automatically.

"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

request = dataset_service.ListSavedQueriesRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_saved_queries,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListSavedQueriesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)

# Done; return the response.
return response

async def get_annotation_spec(
self,
request: Union[dataset_service.GetAnnotationSpecRequest, dict] = None,
Expand Down
135 changes: 135 additions & 0 deletions google/cloud/aiplatform_v1/services/dataset_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from google.cloud.aiplatform_v1.types import dataset_service
from google.cloud.aiplatform_v1.types import encryption_spec
from google.cloud.aiplatform_v1.types import operation as gca_operation
from google.cloud.aiplatform_v1.types import saved_query
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
Expand Down Expand Up @@ -277,6 +278,30 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}

@staticmethod
def saved_query_path(
project: str,
location: str,
dataset: str,
saved_query: str,
) -> str:
"""Returns a fully-qualified saved_query string."""
return "projects/{project}/locations/{location}/datasets/{dataset}/savedQueries/{saved_query}".format(
project=project,
location=location,
dataset=dataset,
saved_query=saved_query,
)

@staticmethod
def parse_saved_query_path(path: str) -> Dict[str, str]:
"""Parses a saved_query path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)/savedQueries/(?P<saved_query>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
def common_billing_account_path(
billing_account: str,
Expand Down Expand Up @@ -517,6 +542,7 @@ def __init__(
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
api_audience=client_options.api_audience,
)

def create_dataset(
Expand Down Expand Up @@ -1447,6 +1473,115 @@ def sample_list_data_items():
# Done; return the response.
return response

def list_saved_queries(
self,
request: Union[dataset_service.ListSavedQueriesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListSavedQueriesPager:
r"""Lists SavedQueries in a Dataset.

.. code-block:: python

from google.cloud import aiplatform_v1

def sample_list_saved_queries():
# Create a client
client = aiplatform_v1.DatasetServiceClient()

# Initialize request argument(s)
request = aiplatform_v1.ListSavedQueriesRequest(
parent="parent_value",
)

# Make the request
page_result = client.list_saved_queries(request=request)

# Handle the response
for response in page_result:
print(response)

Args:
request (Union[google.cloud.aiplatform_v1.types.ListSavedQueriesRequest, dict]):
The request object. Request message for
[DatasetService.ListSavedQueries][google.cloud.aiplatform.v1.DatasetService.ListSavedQueries].
parent (str):
Required. The resource name of the Dataset to list
SavedQueries from. Format:
``projects/{project}/locations/{location}/datasets/{dataset}``

This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.

Returns:
google.cloud.aiplatform_v1.services.dataset_service.pagers.ListSavedQueriesPager:
Response message for
[DatasetService.ListSavedQueries][google.cloud.aiplatform.v1.DatasetService.ListSavedQueries].

Iterating over this object will yield results and
resolve additional pages automatically.

"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

# Minor optimization to avoid making a copy if the user passes
# in a dataset_service.ListSavedQueriesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dataset_service.ListSavedQueriesRequest):
request = dataset_service.ListSavedQueriesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_saved_queries]

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListSavedQueriesPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)

# Done; return the response.
return response

def get_annotation_spec(
self,
request: Union[dataset_service.GetAnnotationSpecRequest, dict] = None,
Expand Down
Loading