diff --git a/packages/google-analytics-data/.github/.OwlBot.lock.yaml b/packages/google-analytics-data/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/google-analytics-data/.github/.OwlBot.lock.yaml +++ b/packages/google-analytics-data/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/google-analytics-data/docs/conf.py b/packages/google-analytics-data/docs/conf.py index 4a689ff87b95..6cf166a74d04 100644 --- a/packages/google-analytics-data/docs/conf.py +++ b/packages/google-analytics-data/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index 25dab26b677b..5a21726f40fa 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -207,7 +207,12 @@ async def run_report( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -255,7 +260,12 @@ async def run_pivot_report( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -299,7 +309,12 @@ async def batch_run_reports( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -343,7 +358,12 @@ async def batch_run_pivot_reports( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -434,7 +454,12 @@ async def get_metadata( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -486,7 +511,12 @@ async def run_realtime_report( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -494,7 +524,9 @@ async def run_realtime_report( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 7f1cafb06dc9..797be3e1e7c8 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -52,7 +52,8 @@ class AlphaAnalyticsDataClientMeta(type): _transport_registry["grpc_asyncio"] = AlphaAnalyticsDataGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[AlphaAnalyticsDataTransport]: """Returns an appropriate transport class. @@ -158,9 +159,13 @@ def transport(self) -> AlphaAnalyticsDataTransport: return self._transport @staticmethod - def metadata_path(property: str,) -> str: + def metadata_path( + property: str, + ) -> str: """Returns a fully-qualified metadata string.""" - return "properties/{property}/metadata".format(property=property,) + return "properties/{property}/metadata".format( + property=property, + ) @staticmethod def parse_metadata_path(path: str) -> Dict[str, str]: @@ -169,7 +174,9 @@ def parse_metadata_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -182,9 +189,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -193,9 +204,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -204,9 +219,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -215,10 +234,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -393,7 +416,12 @@ def run_report( rpc = self._transport._wrapped_methods[self._transport.run_report] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -442,7 +470,12 @@ def run_pivot_report( rpc = self._transport._wrapped_methods[self._transport.run_pivot_report] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -487,7 +520,12 @@ def batch_run_reports( rpc = self._transport._wrapped_methods[self._transport.batch_run_reports] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -532,7 +570,12 @@ def batch_run_pivot_reports( rpc = self._transport._wrapped_methods[self._transport.batch_run_pivot_reports] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -623,7 +666,12 @@ def get_metadata( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -676,7 +724,12 @@ def run_realtime_report( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -684,7 +737,9 @@ def run_realtime_report( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index 515fc61df5c4..8998561dd91a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -30,7 +30,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -156,13 +158,19 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.run_report: gapic_v1.method.wrap_method( - self.run_report, default_timeout=60.0, client_info=client_info, + self.run_report, + default_timeout=60.0, + client_info=client_info, ), self.run_pivot_report: gapic_v1.method.wrap_method( - self.run_pivot_report, default_timeout=60.0, client_info=client_info, + self.run_pivot_report, + default_timeout=60.0, + client_info=client_info, ), self.batch_run_reports: gapic_v1.method.wrap_method( - self.batch_run_reports, default_timeout=60.0, client_info=client_info, + self.batch_run_reports, + default_timeout=60.0, + client_info=client_info, ), self.batch_run_pivot_reports: gapic_v1.method.wrap_method( self.batch_run_pivot_reports, @@ -170,10 +178,14 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_metadata: gapic_v1.method.wrap_method( - self.get_metadata, default_timeout=None, client_info=client_info, + self.get_metadata, + default_timeout=None, + client_info=client_info, ), self.run_realtime_report: gapic_v1.method.wrap_method( - self.run_realtime_report, default_timeout=None, client_info=client_info, + self.run_realtime_report, + default_timeout=None, + client_info=client_info, ), } diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index 38d4e9eb4687..b44db39007d6 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -221,8 +221,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index a57f8e5dcbf6..88117225dc5f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -50,11 +50,20 @@ class Metadata(proto.Message): The metric descriptions. """ - name = proto.Field(proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=3, + ) dimensions = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionMetadata, + proto.MESSAGE, + number=1, + message=data.DimensionMetadata, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricMetadata, ) - metrics = proto.RepeatedField(proto.MESSAGE, number=2, message=data.MetricMetadata,) class RunReportRequest(proto.Message): @@ -124,24 +133,71 @@ class RunReportRequest(proto.Message): `PropertyQuota <#PropertyQuota>`__. """ - entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) - date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,) - offset = proto.Field(proto.INT64, number=5,) - limit = proto.Field(proto.INT64, number=6,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=data.Entity, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + offset = proto.Field( + proto.INT64, + number=5, + ) + limit = proto.Field( + proto.INT64, + number=6, + ) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=7, enum=data.MetricAggregation, + proto.ENUM, + number=7, + enum=data.MetricAggregation, ) dimension_filter = proto.Field( - proto.MESSAGE, number=8, message=data.FilterExpression, + proto.MESSAGE, + number=8, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=9, + message=data.FilterExpression, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.OrderBy, + ) + currency_code = proto.Field( + proto.STRING, + number=11, + ) + cohort_spec = proto.Field( + proto.MESSAGE, + number=12, + message=data.CohortSpec, + ) + keep_empty_rows = proto.Field( + proto.BOOL, + number=13, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=14, ) - metric_filter = proto.Field(proto.MESSAGE, number=9, message=data.FilterExpression,) - order_bys = proto.RepeatedField(proto.MESSAGE, number=10, message=data.OrderBy,) - currency_code = proto.Field(proto.STRING, number=11,) - cohort_spec = proto.Field(proto.MESSAGE, number=12, message=data.CohortSpec,) - keep_empty_rows = proto.Field(proto.BOOL, number=13,) - return_property_quota = proto.Field(proto.BOOL, number=14,) class RunReportResponse(proto.Message): @@ -182,18 +238,49 @@ class RunReportResponse(proto.Message): """ dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=11, message=data.DimensionHeader, + proto.MESSAGE, + number=11, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.MetricHeader, + proto.MESSAGE, + number=1, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Row, + ) + totals = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=data.Row, + ) + maximums = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=data.Row, + ) + minimums = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.Row, + ) + row_count = proto.Field( + proto.INT32, + number=12, + ) + metadata = proto.Field( + proto.MESSAGE, + number=6, + message=data.ResponseMetaData, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=7, + message=data.PropertyQuota, ) - rows = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Row,) - totals = proto.RepeatedField(proto.MESSAGE, number=8, message=data.Row,) - maximums = proto.RepeatedField(proto.MESSAGE, number=9, message=data.Row,) - minimums = proto.RepeatedField(proto.MESSAGE, number=10, message=data.Row,) - row_count = proto.Field(proto.INT32, number=12,) - metadata = proto.Field(proto.MESSAGE, number=6, message=data.ResponseMetaData,) - property_quota = proto.Field(proto.MESSAGE, number=7, message=data.PropertyQuota,) class RunPivotReportRequest(proto.Message): @@ -257,19 +344,58 @@ class RunPivotReportRequest(proto.Message): `PropertyQuota <#PropertyQuota>`__. """ - entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=data.Entity, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=4, message=data.FilterExpression, + proto.MESSAGE, + number=4, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + pivots = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Pivot, + ) + date_ranges = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=data.DateRange, + ) + currency_code = proto.Field( + proto.STRING, + number=8, + ) + cohort_spec = proto.Field( + proto.MESSAGE, + number=9, + message=data.CohortSpec, + ) + keep_empty_rows = proto.Field( + proto.BOOL, + number=10, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=11, ) - metric_filter = proto.Field(proto.MESSAGE, number=5, message=data.FilterExpression,) - pivots = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Pivot,) - date_ranges = proto.RepeatedField(proto.MESSAGE, number=7, message=data.DateRange,) - currency_code = proto.Field(proto.STRING, number=8,) - cohort_spec = proto.Field(proto.MESSAGE, number=9, message=data.CohortSpec,) - keep_empty_rows = proto.Field(proto.BOOL, number=10,) - return_property_quota = proto.Field(proto.BOOL, number=11,) class RunPivotReportResponse(proto.Message): @@ -344,18 +470,40 @@ class RunPivotReportResponse(proto.Message): """ pivot_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.PivotHeader, + proto.MESSAGE, + number=1, + message=data.PivotHeader, ) dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=7, message=data.DimensionHeader, + proto.MESSAGE, + number=7, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.MetricHeader, + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + aggregates = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + metadata = proto.Field( + proto.MESSAGE, + number=5, + message=data.ResponseMetaData, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=6, + message=data.PropertyQuota, ) - rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,) - aggregates = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,) - metadata = proto.Field(proto.MESSAGE, number=5, message=data.ResponseMetaData,) - property_quota = proto.Field(proto.MESSAGE, number=6, message=data.PropertyQuota,) class BatchRunReportsRequest(proto.Message): @@ -372,8 +520,16 @@ class BatchRunReportsRequest(proto.Message): allowed up to 5 requests. """ - entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,) - requests = proto.RepeatedField(proto.MESSAGE, number=2, message="RunReportRequest",) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=data.Entity, + ) + requests = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RunReportRequest", + ) class BatchRunReportsResponse(proto.Message): @@ -384,7 +540,11 @@ class BatchRunReportsResponse(proto.Message): separate report request. """ - reports = proto.RepeatedField(proto.MESSAGE, number=1, message="RunReportResponse",) + reports = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RunReportResponse", + ) class BatchRunPivotReportsRequest(proto.Message): @@ -401,9 +561,15 @@ class BatchRunPivotReportsRequest(proto.Message): request is allowed up to 5 requests. """ - entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=data.Entity, + ) requests = proto.RepeatedField( - proto.MESSAGE, number=2, message="RunPivotReportRequest", + proto.MESSAGE, + number=2, + message="RunPivotReportRequest", ) @@ -416,7 +582,9 @@ class BatchRunPivotReportsResponse(proto.Message): """ pivot_reports = proto.RepeatedField( - proto.MESSAGE, number=1, message="RunPivotReportResponse", + proto.MESSAGE, + number=1, + message="RunPivotReportResponse", ) @@ -438,7 +606,10 @@ class GetMetadataRequest(proto.Message): not return custom dimensions and metrics. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class RunRealtimeReportRequest(proto.Message): @@ -482,19 +653,48 @@ class RunRealtimeReportRequest(proto.Message): `PropertyQuota <#PropertyQuota>`__. """ - property = proto.Field(proto.STRING, number=1,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) - limit = proto.Field(proto.INT64, number=4,) + property = proto.Field( + proto.STRING, + number=1, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + limit = proto.Field( + proto.INT64, + number=4, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=5, message=data.FilterExpression, + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, ) - metric_filter = proto.Field(proto.MESSAGE, number=6, message=data.FilterExpression,) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=7, enum=data.MetricAggregation, + proto.ENUM, + number=7, + enum=data.MetricAggregation, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=data.OrderBy, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=9, ) - order_bys = proto.RepeatedField(proto.MESSAGE, number=8, message=data.OrderBy,) - return_property_quota = proto.Field(proto.BOOL, number=9,) class RunRealtimeReportResponse(proto.Message): @@ -532,17 +732,44 @@ class RunRealtimeReportResponse(proto.Message): """ dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionHeader, + proto.MESSAGE, + number=1, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.MetricHeader, - ) - rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,) - totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,) - maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,) - minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,) - row_count = proto.Field(proto.INT32, number=7,) - property_quota = proto.Field(proto.MESSAGE, number=8, message=data.PropertyQuota,) + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count = proto.Field( + proto.INT32, + number=7, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=8, + message=data.PropertyQuota, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index 536152686b30..7ad35e3b55ff 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -103,9 +103,18 @@ class DateRange(proto.Message): in the request: ``date_range_0``, ``date_range_1``, etc. """ - start_date = proto.Field(proto.STRING, number=1,) - end_date = proto.Field(proto.STRING, number=2,) - name = proto.Field(proto.STRING, number=3,) + start_date = proto.Field( + proto.STRING, + number=1, + ) + end_date = proto.Field( + proto.STRING, + number=2, + ) + name = proto.Field( + proto.STRING, + number=3, + ) class Entity(proto.Message): @@ -119,7 +128,10 @@ class Entity(proto.Message): ID `__. """ - property_id = proto.Field(proto.STRING, number=1,) + property_id = proto.Field( + proto.STRING, + number=1, + ) class Dimension(proto.Message): @@ -150,9 +162,14 @@ class Dimension(proto.Message): ", ", city). """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) dimension_expression = proto.Field( - proto.MESSAGE, number=2, message="DimensionExpression", + proto.MESSAGE, + number=2, + message="DimensionExpression", ) @@ -184,7 +201,10 @@ class CaseExpression(proto.Message): to a name in dimensions field of the request. """ - dimension_name = proto.Field(proto.STRING, number=1,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) class ConcatenateExpression(proto.Message): r"""Used to combine dimension values to a single dimension. @@ -204,17 +224,32 @@ class ConcatenateExpression(proto.Message): response will contain "US,FR,JP". """ - dimension_names = proto.RepeatedField(proto.STRING, number=1,) - delimiter = proto.Field(proto.STRING, number=2,) + dimension_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + delimiter = proto.Field( + proto.STRING, + number=2, + ) lower_case = proto.Field( - proto.MESSAGE, number=4, oneof="one_expression", message=CaseExpression, + proto.MESSAGE, + number=4, + oneof="one_expression", + message=CaseExpression, ) upper_case = proto.Field( - proto.MESSAGE, number=5, oneof="one_expression", message=CaseExpression, + proto.MESSAGE, + number=5, + oneof="one_expression", + message=CaseExpression, ) concatenate = proto.Field( - proto.MESSAGE, number=6, oneof="one_expression", message=ConcatenateExpression, + proto.MESSAGE, + number=6, + oneof="one_expression", + message=ConcatenateExpression, ) @@ -247,9 +282,18 @@ class Metric(proto.Message): ``orderBys``, or a metric ``expression``. """ - name = proto.Field(proto.STRING, number=1,) - expression = proto.Field(proto.STRING, number=2,) - invisible = proto.Field(proto.BOOL, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + expression = proto.Field( + proto.STRING, + number=2, + ) + invisible = proto.Field( + proto.BOOL, + number=3, + ) class FilterExpression(proto.Message): @@ -271,15 +315,29 @@ class FilterExpression(proto.Message): """ and_group = proto.Field( - proto.MESSAGE, number=1, oneof="expr", message="FilterExpressionList", + proto.MESSAGE, + number=1, + oneof="expr", + message="FilterExpressionList", ) or_group = proto.Field( - proto.MESSAGE, number=2, oneof="expr", message="FilterExpressionList", + proto.MESSAGE, + number=2, + oneof="expr", + message="FilterExpressionList", ) not_expression = proto.Field( - proto.MESSAGE, number=3, oneof="expr", message="FilterExpression", + proto.MESSAGE, + number=3, + oneof="expr", + message="FilterExpression", + ) + filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="expr", + message="Filter", ) - filter = proto.Field(proto.MESSAGE, number=4, oneof="expr", message="Filter",) class FilterExpressionList(proto.Message): @@ -290,7 +348,9 @@ class FilterExpressionList(proto.Message): """ expressions = proto.RepeatedField( - proto.MESSAGE, number=1, message="FilterExpression", + proto.MESSAGE, + number=1, + message="FilterExpression", ) @@ -339,10 +399,18 @@ class MatchType(proto.Enum): PARTIAL_REGEXP = 6 match_type = proto.Field( - proto.ENUM, number=1, enum="Filter.StringFilter.MatchType", + proto.ENUM, + number=1, + enum="Filter.StringFilter.MatchType", + ) + value = proto.Field( + proto.STRING, + number=2, + ) + case_sensitive = proto.Field( + proto.BOOL, + number=3, ) - value = proto.Field(proto.STRING, number=2,) - case_sensitive = proto.Field(proto.BOOL, number=3,) class InListFilter(proto.Message): r"""The result needs to be in a list of string values. @@ -354,8 +422,14 @@ class InListFilter(proto.Message): If true, the string value is case sensitive. """ - values = proto.RepeatedField(proto.STRING, number=1,) - case_sensitive = proto.Field(proto.BOOL, number=2,) + values = proto.RepeatedField( + proto.STRING, + number=1, + ) + case_sensitive = proto.Field( + proto.BOOL, + number=2, + ) class NumericFilter(proto.Message): r"""Filters for numeric or date values. @@ -376,9 +450,15 @@ class Operation(proto.Enum): GREATER_THAN_OR_EQUAL = 5 operation = proto.Field( - proto.ENUM, number=1, enum="Filter.NumericFilter.Operation", + proto.ENUM, + number=1, + enum="Filter.NumericFilter.Operation", + ) + value = proto.Field( + proto.MESSAGE, + number=2, + message="NumericValue", ) - value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",) class BetweenFilter(proto.Message): r"""To express that the result needs to be between two numbers @@ -391,22 +471,49 @@ class BetweenFilter(proto.Message): Ends with this number. """ - from_value = proto.Field(proto.MESSAGE, number=1, message="NumericValue",) - to_value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",) + from_value = proto.Field( + proto.MESSAGE, + number=1, + message="NumericValue", + ) + to_value = proto.Field( + proto.MESSAGE, + number=2, + message="NumericValue", + ) - field_name = proto.Field(proto.STRING, number=1,) - null_filter = proto.Field(proto.BOOL, number=2, oneof="one_filter",) + field_name = proto.Field( + proto.STRING, + number=1, + ) + null_filter = proto.Field( + proto.BOOL, + number=2, + oneof="one_filter", + ) string_filter = proto.Field( - proto.MESSAGE, number=3, oneof="one_filter", message=StringFilter, + proto.MESSAGE, + number=3, + oneof="one_filter", + message=StringFilter, ) in_list_filter = proto.Field( - proto.MESSAGE, number=4, oneof="one_filter", message=InListFilter, + proto.MESSAGE, + number=4, + oneof="one_filter", + message=InListFilter, ) numeric_filter = proto.Field( - proto.MESSAGE, number=5, oneof="one_filter", message=NumericFilter, + proto.MESSAGE, + number=5, + oneof="one_filter", + message=NumericFilter, ) between_filter = proto.Field( - proto.MESSAGE, number=6, oneof="one_filter", message=BetweenFilter, + proto.MESSAGE, + number=6, + oneof="one_filter", + message=BetweenFilter, ) @@ -431,7 +538,10 @@ class MetricOrderBy(proto.Message): A metric name in the request to order by. """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) class DimensionOrderBy(proto.Message): r"""Sorts by dimension values. @@ -450,9 +560,14 @@ class OrderType(proto.Enum): CASE_INSENSITIVE_ALPHANUMERIC = 2 NUMERIC = 3 - dimension_name = proto.Field(proto.STRING, number=1,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) order_type = proto.Field( - proto.ENUM, number=2, enum="OrderBy.DimensionOrderBy.OrderType", + proto.ENUM, + number=2, + enum="OrderBy.DimensionOrderBy.OrderType", ) class PivotOrderBy(proto.Message): @@ -498,24 +613,47 @@ class PivotSelection(proto.Message): this value. """ - dimension_name = proto.Field(proto.STRING, number=1,) - dimension_value = proto.Field(proto.STRING, number=2,) - - metric_name = proto.Field(proto.STRING, number=1,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) + dimension_value = proto.Field( + proto.STRING, + number=2, + ) + + metric_name = proto.Field( + proto.STRING, + number=1, + ) pivot_selections = proto.RepeatedField( - proto.MESSAGE, number=2, message="OrderBy.PivotOrderBy.PivotSelection", + proto.MESSAGE, + number=2, + message="OrderBy.PivotOrderBy.PivotSelection", ) metric = proto.Field( - proto.MESSAGE, number=1, oneof="one_order_by", message=MetricOrderBy, + proto.MESSAGE, + number=1, + oneof="one_order_by", + message=MetricOrderBy, ) dimension = proto.Field( - proto.MESSAGE, number=2, oneof="one_order_by", message=DimensionOrderBy, + proto.MESSAGE, + number=2, + oneof="one_order_by", + message=DimensionOrderBy, ) pivot = proto.Field( - proto.MESSAGE, number=3, oneof="one_order_by", message=PivotOrderBy, + proto.MESSAGE, + number=3, + oneof="one_order_by", + message=PivotOrderBy, + ) + desc = proto.Field( + proto.BOOL, + number=4, ) - desc = proto.Field(proto.BOOL, number=4,) class Pivot(proto.Message): @@ -549,12 +687,27 @@ class Pivot(proto.Message): specified metric_aggregations. """ - field_names = proto.RepeatedField(proto.STRING, number=1,) - order_bys = proto.RepeatedField(proto.MESSAGE, number=2, message="OrderBy",) - offset = proto.Field(proto.INT64, number=3,) - limit = proto.Field(proto.INT64, number=4,) + field_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OrderBy", + ) + offset = proto.Field( + proto.INT64, + number=3, + ) + limit = proto.Field( + proto.INT64, + number=4, + ) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=5, enum="MetricAggregation", + proto.ENUM, + number=5, + enum="MetricAggregation", ) @@ -590,10 +743,20 @@ class CohortSpec(proto.Message): Optional settings for a cohort report. """ - cohorts = proto.RepeatedField(proto.MESSAGE, number=1, message="Cohort",) - cohorts_range = proto.Field(proto.MESSAGE, number=2, message="CohortsRange",) + cohorts = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Cohort", + ) + cohorts_range = proto.Field( + proto.MESSAGE, + number=2, + message="CohortsRange", + ) cohort_report_settings = proto.Field( - proto.MESSAGE, number=3, message="CohortReportSettings", + proto.MESSAGE, + number=3, + message="CohortReportSettings", ) @@ -636,9 +799,19 @@ class Cohort(proto.Message): month. """ - name = proto.Field(proto.STRING, number=1,) - dimension = proto.Field(proto.STRING, number=2,) - date_range = proto.Field(proto.MESSAGE, number=3, message="DateRange",) + name = proto.Field( + proto.STRING, + number=1, + ) + dimension = proto.Field( + proto.STRING, + number=2, + ) + date_range = proto.Field( + proto.MESSAGE, + number=3, + message="DateRange", + ) class CohortsRange(proto.Message): @@ -697,9 +870,19 @@ class Granularity(proto.Enum): WEEKLY = 2 MONTHLY = 3 - granularity = proto.Field(proto.ENUM, number=1, enum=Granularity,) - start_offset = proto.Field(proto.INT32, number=2,) - end_offset = proto.Field(proto.INT32, number=3,) + granularity = proto.Field( + proto.ENUM, + number=1, + enum=Granularity, + ) + start_offset = proto.Field( + proto.INT32, + number=2, + ) + end_offset = proto.Field( + proto.INT32, + number=3, + ) class CohortReportSettings(proto.Message): @@ -710,7 +893,10 @@ class CohortReportSettings(proto.Message): end day. Not supported in ``RunReportRequest``. """ - accumulate = proto.Field(proto.BOOL, number=1,) + accumulate = proto.Field( + proto.BOOL, + number=1, + ) class ResponseMetaData(proto.Message): @@ -724,7 +910,10 @@ class ResponseMetaData(proto.Message): can happen for high cardinality reports. """ - data_loss_from_other_row = proto.Field(proto.BOOL, number=3,) + data_loss_from_other_row = proto.Field( + proto.BOOL, + number=3, + ) class DimensionHeader(proto.Message): @@ -739,7 +928,10 @@ class DimensionHeader(proto.Message): The dimension's name. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class MetricHeader(proto.Message): @@ -756,8 +948,15 @@ class MetricHeader(proto.Message): The metric's data type. """ - name = proto.Field(proto.STRING, number=1,) - type_ = proto.Field(proto.ENUM, number=2, enum="MetricType",) + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum="MetricType", + ) class PivotHeader(proto.Message): @@ -775,9 +974,14 @@ class PivotHeader(proto.Message): """ pivot_dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message="PivotDimensionHeader", + proto.MESSAGE, + number=1, + message="PivotDimensionHeader", + ) + row_count = proto.Field( + proto.INT32, + number=2, ) - row_count = proto.Field(proto.INT32, number=2,) class PivotDimensionHeader(proto.Message): @@ -788,7 +992,9 @@ class PivotDimensionHeader(proto.Message): """ dimension_values = proto.RepeatedField( - proto.MESSAGE, number=1, message="DimensionValue", + proto.MESSAGE, + number=1, + message="DimensionValue", ) @@ -840,9 +1046,15 @@ class Row(proto.Message): """ dimension_values = proto.RepeatedField( - proto.MESSAGE, number=1, message="DimensionValue", + proto.MESSAGE, + number=1, + message="DimensionValue", + ) + metric_values = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MetricValue", ) - metric_values = proto.RepeatedField(proto.MESSAGE, number=2, message="MetricValue",) class DimensionValue(proto.Message): @@ -853,7 +1065,11 @@ class DimensionValue(proto.Message): string. """ - value = proto.Field(proto.STRING, number=1, oneof="one_value",) + value = proto.Field( + proto.STRING, + number=1, + oneof="one_value", + ) class MetricValue(proto.Message): @@ -863,7 +1079,11 @@ class MetricValue(proto.Message): Measurement value. See MetricHeader for type. """ - value = proto.Field(proto.STRING, number=4, oneof="one_value",) + value = proto.Field( + proto.STRING, + number=4, + oneof="one_value", + ) class NumericValue(proto.Message): @@ -875,8 +1095,16 @@ class NumericValue(proto.Message): Double value """ - int64_value = proto.Field(proto.INT64, number=1, oneof="one_value",) - double_value = proto.Field(proto.DOUBLE, number=2, oneof="one_value",) + int64_value = proto.Field( + proto.INT64, + number=1, + oneof="one_value", + ) + double_value = proto.Field( + proto.DOUBLE, + number=2, + oneof="one_value", + ) class PropertyQuota(proto.Message): @@ -909,11 +1137,25 @@ class PropertyQuota(proto.Message): per hour. """ - tokens_per_day = proto.Field(proto.MESSAGE, number=1, message="QuotaStatus",) - tokens_per_hour = proto.Field(proto.MESSAGE, number=2, message="QuotaStatus",) - concurrent_requests = proto.Field(proto.MESSAGE, number=3, message="QuotaStatus",) + tokens_per_day = proto.Field( + proto.MESSAGE, + number=1, + message="QuotaStatus", + ) + tokens_per_hour = proto.Field( + proto.MESSAGE, + number=2, + message="QuotaStatus", + ) + concurrent_requests = proto.Field( + proto.MESSAGE, + number=3, + message="QuotaStatus", + ) server_errors_per_project_per_hour = proto.Field( - proto.MESSAGE, number=4, message="QuotaStatus", + proto.MESSAGE, + number=4, + message="QuotaStatus", ) @@ -926,8 +1168,14 @@ class QuotaStatus(proto.Message): Quota remaining after this request. """ - consumed = proto.Field(proto.INT32, number=1,) - remaining = proto.Field(proto.INT32, number=2,) + consumed = proto.Field( + proto.INT32, + number=1, + ) + remaining = proto.Field( + proto.INT32, + number=2, + ) class DimensionMetadata(proto.Message): @@ -954,11 +1202,26 @@ class DimensionMetadata(proto.Message): for this property. """ - api_name = proto.Field(proto.STRING, number=1,) - ui_name = proto.Field(proto.STRING, number=2,) - description = proto.Field(proto.STRING, number=3,) - deprecated_api_names = proto.RepeatedField(proto.STRING, number=4,) - custom_definition = proto.Field(proto.BOOL, number=5,) + api_name = proto.Field( + proto.STRING, + number=1, + ) + ui_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names = proto.RepeatedField( + proto.STRING, + number=4, + ) + custom_definition = proto.Field( + proto.BOOL, + number=5, + ) class MetricMetadata(proto.Message): @@ -991,13 +1254,35 @@ class MetricMetadata(proto.Message): this property. """ - api_name = proto.Field(proto.STRING, number=1,) - ui_name = proto.Field(proto.STRING, number=2,) - description = proto.Field(proto.STRING, number=3,) - deprecated_api_names = proto.RepeatedField(proto.STRING, number=4,) - type_ = proto.Field(proto.ENUM, number=5, enum="MetricType",) - expression = proto.Field(proto.STRING, number=6,) - custom_definition = proto.Field(proto.BOOL, number=7,) + api_name = proto.Field( + proto.STRING, + number=1, + ) + ui_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names = proto.RepeatedField( + proto.STRING, + number=4, + ) + type_ = proto.Field( + proto.ENUM, + number=5, + enum="MetricType", + ) + expression = proto.Field( + proto.STRING, + number=6, + ) + custom_definition = proto.Field( + proto.BOOL, + number=7, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py index 851c455253d1..9090976ce15f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py @@ -272,7 +272,12 @@ def sample_run_report(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -345,7 +350,12 @@ def sample_run_pivot_report(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -414,7 +424,12 @@ def sample_batch_run_reports(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -483,7 +498,12 @@ def sample_batch_run_pivot_reports(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -594,7 +614,12 @@ def sample_get_metadata(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -665,7 +690,12 @@ def sample_run_realtime_report(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -748,7 +778,12 @@ def sample_check_compatibility(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -762,7 +797,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index 9c28c87b8d05..f990474e0791 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -56,7 +56,8 @@ class BetaAnalyticsDataClientMeta(type): _transport_registry["grpc_asyncio"] = BetaAnalyticsDataGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[BetaAnalyticsDataTransport]: """Returns an appropriate transport class. @@ -162,9 +163,13 @@ def transport(self) -> BetaAnalyticsDataTransport: return self._transport @staticmethod - def metadata_path(property: str,) -> str: + def metadata_path( + property: str, + ) -> str: """Returns a fully-qualified metadata string.""" - return "properties/{property}/metadata".format(property=property,) + return "properties/{property}/metadata".format( + property=property, + ) @staticmethod def parse_metadata_path(path: str) -> Dict[str, str]: @@ -173,7 +178,9 @@ def parse_metadata_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -186,9 +193,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -197,9 +208,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -208,9 +223,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -219,10 +238,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -467,7 +490,12 @@ def sample_run_report(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -541,7 +569,12 @@ def sample_run_pivot_report(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -611,7 +644,12 @@ def sample_batch_run_reports(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -681,7 +719,12 @@ def sample_batch_run_pivot_reports(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -792,7 +835,12 @@ def sample_get_metadata(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -864,7 +912,12 @@ def sample_run_realtime_report(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -948,7 +1001,12 @@ def sample_check_compatibility(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -969,7 +1027,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py index 88fc7d8ba131..e4a1ce5b5aae 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py @@ -29,7 +29,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-analytics-data",).version, + gapic_version=pkg_resources.get_distribution( + "google-analytics-data", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -122,13 +124,19 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.run_report: gapic_v1.method.wrap_method( - self.run_report, default_timeout=60.0, client_info=client_info, + self.run_report, + default_timeout=60.0, + client_info=client_info, ), self.run_pivot_report: gapic_v1.method.wrap_method( - self.run_pivot_report, default_timeout=60.0, client_info=client_info, + self.run_pivot_report, + default_timeout=60.0, + client_info=client_info, ), self.batch_run_reports: gapic_v1.method.wrap_method( - self.batch_run_reports, default_timeout=60.0, client_info=client_info, + self.batch_run_reports, + default_timeout=60.0, + client_info=client_info, ), self.batch_run_pivot_reports: gapic_v1.method.wrap_method( self.batch_run_pivot_reports, @@ -136,22 +144,28 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_metadata: gapic_v1.method.wrap_method( - self.get_metadata, default_timeout=None, client_info=client_info, + self.get_metadata, + default_timeout=None, + client_info=client_info, ), self.run_realtime_report: gapic_v1.method.wrap_method( - self.run_realtime_report, default_timeout=60.0, client_info=client_info, + self.run_realtime_report, + default_timeout=60.0, + client_info=client_info, ), self.check_compatibility: gapic_v1.method.wrap_method( - self.check_compatibility, default_timeout=None, client_info=client_info, + self.check_compatibility, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py index d2d42985112e..946c492bdba0 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py @@ -224,8 +224,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py index 80dfa30bdd49..62bc65c570f8 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py @@ -78,14 +78,35 @@ class CheckCompatibilityRequest(proto.Message): compatible dimensions & metrics. """ - property = proto.Field(proto.STRING, number=1,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) + property = proto.Field( + proto.STRING, + number=1, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=4, message=data.FilterExpression, + proto.MESSAGE, + number=4, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + compatibility_filter = proto.Field( + proto.ENUM, + number=6, + enum=data.Compatibility, ) - metric_filter = proto.Field(proto.MESSAGE, number=5, message=data.FilterExpression,) - compatibility_filter = proto.Field(proto.ENUM, number=6, enum=data.Compatibility,) class CheckCompatibilityResponse(proto.Message): @@ -100,10 +121,14 @@ class CheckCompatibilityResponse(proto.Message): """ dimension_compatibilities = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionCompatibility, + proto.MESSAGE, + number=1, + message=data.DimensionCompatibility, ) metric_compatibilities = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.MetricCompatibility, + proto.MESSAGE, + number=2, + message=data.MetricCompatibility, ) @@ -120,11 +145,20 @@ class Metadata(proto.Message): The metric descriptions. """ - name = proto.Field(proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=3, + ) dimensions = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionMetadata, + proto.MESSAGE, + number=1, + message=data.DimensionMetadata, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricMetadata, ) - metrics = proto.RepeatedField(proto.MESSAGE, number=2, message=data.MetricMetadata,) class RunReportRequest(proto.Message): @@ -214,24 +248,70 @@ class RunReportRequest(proto.Message): `PropertyQuota <#PropertyQuota>`__. """ - property = proto.Field(proto.STRING, number=1,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) - date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,) + property = proto.Field( + proto.STRING, + number=1, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=5, message=data.FilterExpression, + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + offset = proto.Field( + proto.INT64, + number=7, + ) + limit = proto.Field( + proto.INT64, + number=8, ) - metric_filter = proto.Field(proto.MESSAGE, number=6, message=data.FilterExpression,) - offset = proto.Field(proto.INT64, number=7,) - limit = proto.Field(proto.INT64, number=8,) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=9, enum=data.MetricAggregation, + proto.ENUM, + number=9, + enum=data.MetricAggregation, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.OrderBy, + ) + currency_code = proto.Field( + proto.STRING, + number=11, + ) + cohort_spec = proto.Field( + proto.MESSAGE, + number=12, + message=data.CohortSpec, + ) + keep_empty_rows = proto.Field( + proto.BOOL, + number=13, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=14, ) - order_bys = proto.RepeatedField(proto.MESSAGE, number=10, message=data.OrderBy,) - currency_code = proto.Field(proto.STRING, number=11,) - cohort_spec = proto.Field(proto.MESSAGE, number=12, message=data.CohortSpec,) - keep_empty_rows = proto.Field(proto.BOOL, number=13,) - return_property_quota = proto.Field(proto.BOOL, number=14,) class RunReportResponse(proto.Message): @@ -280,19 +360,53 @@ class RunReportResponse(proto.Message): """ dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionHeader, + proto.MESSAGE, + number=1, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.MetricHeader, + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count = proto.Field( + proto.INT32, + number=7, + ) + metadata = proto.Field( + proto.MESSAGE, + number=8, + message=data.ResponseMetaData, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=9, + message=data.PropertyQuota, + ) + kind = proto.Field( + proto.STRING, + number=10, ) - rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,) - totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,) - maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,) - minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,) - row_count = proto.Field(proto.INT32, number=7,) - metadata = proto.Field(proto.MESSAGE, number=8, message=data.ResponseMetaData,) - property_quota = proto.Field(proto.MESSAGE, number=9, message=data.PropertyQuota,) - kind = proto.Field(proto.STRING, number=10,) class RunPivotReportRequest(proto.Message): @@ -361,19 +475,57 @@ class RunPivotReportRequest(proto.Message): `PropertyQuota <#PropertyQuota>`__. """ - property = proto.Field(proto.STRING, number=1,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) - date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,) - pivots = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Pivot,) + property = proto.Field( + proto.STRING, + number=1, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + pivots = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Pivot, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=6, message=data.FilterExpression, + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=7, + message=data.FilterExpression, + ) + currency_code = proto.Field( + proto.STRING, + number=8, + ) + cohort_spec = proto.Field( + proto.MESSAGE, + number=9, + message=data.CohortSpec, + ) + keep_empty_rows = proto.Field( + proto.BOOL, + number=10, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=11, ) - metric_filter = proto.Field(proto.MESSAGE, number=7, message=data.FilterExpression,) - currency_code = proto.Field(proto.STRING, number=8,) - cohort_spec = proto.Field(proto.MESSAGE, number=9, message=data.CohortSpec,) - keep_empty_rows = proto.Field(proto.BOOL, number=10,) - return_property_quota = proto.Field(proto.BOOL, number=11,) class RunPivotReportResponse(proto.Message): @@ -453,19 +605,44 @@ class RunPivotReportResponse(proto.Message): """ pivot_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.PivotHeader, + proto.MESSAGE, + number=1, + message=data.PivotHeader, ) dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.DimensionHeader, + proto.MESSAGE, + number=2, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=3, message=data.MetricHeader, + proto.MESSAGE, + number=3, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + aggregates = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + metadata = proto.Field( + proto.MESSAGE, + number=6, + message=data.ResponseMetaData, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=7, + message=data.PropertyQuota, + ) + kind = proto.Field( + proto.STRING, + number=8, ) - rows = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,) - aggregates = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,) - metadata = proto.Field(proto.MESSAGE, number=6, message=data.ResponseMetaData,) - property_quota = proto.Field(proto.MESSAGE, number=7, message=data.PropertyQuota,) - kind = proto.Field(proto.STRING, number=8,) class BatchRunReportsRequest(proto.Message): @@ -488,8 +665,15 @@ class BatchRunReportsRequest(proto.Message): allowed up to 5 requests. """ - property = proto.Field(proto.STRING, number=1,) - requests = proto.RepeatedField(proto.MESSAGE, number=2, message="RunReportRequest",) + property = proto.Field( + proto.STRING, + number=1, + ) + requests = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RunReportRequest", + ) class BatchRunReportsResponse(proto.Message): @@ -506,8 +690,15 @@ class BatchRunReportsResponse(proto.Message): between response types in JSON. """ - reports = proto.RepeatedField(proto.MESSAGE, number=1, message="RunReportResponse",) - kind = proto.Field(proto.STRING, number=2,) + reports = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RunReportResponse", + ) + kind = proto.Field( + proto.STRING, + number=2, + ) class BatchRunPivotReportsRequest(proto.Message): @@ -530,9 +721,14 @@ class BatchRunPivotReportsRequest(proto.Message): request is allowed up to 5 requests. """ - property = proto.Field(proto.STRING, number=1,) + property = proto.Field( + proto.STRING, + number=1, + ) requests = proto.RepeatedField( - proto.MESSAGE, number=2, message="RunPivotReportRequest", + proto.MESSAGE, + number=2, + message="RunPivotReportRequest", ) @@ -551,9 +747,14 @@ class BatchRunPivotReportsResponse(proto.Message): """ pivot_reports = proto.RepeatedField( - proto.MESSAGE, number=1, message="RunPivotReportResponse", + proto.MESSAGE, + number=1, + message="RunPivotReportResponse", + ) + kind = proto.Field( + proto.STRING, + number=2, ) - kind = proto.Field(proto.STRING, number=2,) class GetMetadataRequest(proto.Message): @@ -575,7 +776,10 @@ class GetMetadataRequest(proto.Message): not return custom dimensions and metrics. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class RunRealtimeReportRequest(proto.Message): @@ -637,21 +841,52 @@ class RunRealtimeReportRequest(proto.Message): rows for both minute ranges. """ - property = proto.Field(proto.STRING, number=1,) - dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,) + property = proto.Field( + proto.STRING, + number=1, + ) + dimensions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) dimension_filter = proto.Field( - proto.MESSAGE, number=4, message=data.FilterExpression, + proto.MESSAGE, + number=4, + message=data.FilterExpression, + ) + metric_filter = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + limit = proto.Field( + proto.INT64, + number=6, ) - metric_filter = proto.Field(proto.MESSAGE, number=5, message=data.FilterExpression,) - limit = proto.Field(proto.INT64, number=6,) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=7, enum=data.MetricAggregation, + proto.ENUM, + number=7, + enum=data.MetricAggregation, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=data.OrderBy, + ) + return_property_quota = proto.Field( + proto.BOOL, + number=9, ) - order_bys = proto.RepeatedField(proto.MESSAGE, number=8, message=data.OrderBy,) - return_property_quota = proto.Field(proto.BOOL, number=9,) minute_ranges = proto.RepeatedField( - proto.MESSAGE, number=10, message=data.MinuteRange, + proto.MESSAGE, + number=10, + message=data.MinuteRange, ) @@ -696,18 +931,48 @@ class RunRealtimeReportResponse(proto.Message): """ dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message=data.DimensionHeader, + proto.MESSAGE, + number=1, + message=data.DimensionHeader, ) metric_headers = proto.RepeatedField( - proto.MESSAGE, number=2, message=data.MetricHeader, - ) - rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,) - totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,) - maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,) - minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,) - row_count = proto.Field(proto.INT32, number=7,) - property_quota = proto.Field(proto.MESSAGE, number=8, message=data.PropertyQuota,) - kind = proto.Field(proto.STRING, number=9,) + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count = proto.Field( + proto.INT32, + number=7, + ) + property_quota = proto.Field( + proto.MESSAGE, + number=8, + message=data.PropertyQuota, + ) + kind = proto.Field( + proto.STRING, + number=9, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py index 9afb17b9f5c9..e442d0e0e0d4 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py @@ -123,9 +123,18 @@ class DateRange(proto.Message): in the request: ``date_range_0``, ``date_range_1``, etc. """ - start_date = proto.Field(proto.STRING, number=1,) - end_date = proto.Field(proto.STRING, number=2,) - name = proto.Field(proto.STRING, number=3,) + start_date = proto.Field( + proto.STRING, + number=1, + ) + end_date = proto.Field( + proto.STRING, + number=2, + ) + name = proto.Field( + proto.STRING, + number=3, + ) class MinuteRange(proto.Message): @@ -169,9 +178,20 @@ class MinuteRange(proto.Message): etc. """ - start_minutes_ago = proto.Field(proto.INT32, number=1, optional=True,) - end_minutes_ago = proto.Field(proto.INT32, number=2, optional=True,) - name = proto.Field(proto.STRING, number=3,) + start_minutes_ago = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_minutes_ago = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + name = proto.Field( + proto.STRING, + number=3, + ) class Dimension(proto.Message): @@ -204,9 +224,14 @@ class Dimension(proto.Message): ", ", city). """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) dimension_expression = proto.Field( - proto.MESSAGE, number=2, message="DimensionExpression", + proto.MESSAGE, + number=2, + message="DimensionExpression", ) @@ -252,7 +277,10 @@ class CaseExpression(proto.Message): to a name in dimensions field of the request. """ - dimension_name = proto.Field(proto.STRING, number=1,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) class ConcatenateExpression(proto.Message): r"""Used to combine dimension values to a single dimension. @@ -273,17 +301,32 @@ class ConcatenateExpression(proto.Message): response will contain "US,FR,JP". """ - dimension_names = proto.RepeatedField(proto.STRING, number=1,) - delimiter = proto.Field(proto.STRING, number=2,) + dimension_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + delimiter = proto.Field( + proto.STRING, + number=2, + ) lower_case = proto.Field( - proto.MESSAGE, number=4, oneof="one_expression", message=CaseExpression, + proto.MESSAGE, + number=4, + oneof="one_expression", + message=CaseExpression, ) upper_case = proto.Field( - proto.MESSAGE, number=5, oneof="one_expression", message=CaseExpression, + proto.MESSAGE, + number=5, + oneof="one_expression", + message=CaseExpression, ) concatenate = proto.Field( - proto.MESSAGE, number=6, oneof="one_expression", message=ConcatenateExpression, + proto.MESSAGE, + number=6, + oneof="one_expression", + message=ConcatenateExpression, ) @@ -318,9 +361,18 @@ class Metric(proto.Message): ``orderBys``, or a metric ``expression``. """ - name = proto.Field(proto.STRING, number=1,) - expression = proto.Field(proto.STRING, number=2,) - invisible = proto.Field(proto.BOOL, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + expression = proto.Field( + proto.STRING, + number=2, + ) + invisible = proto.Field( + proto.BOOL, + number=3, + ) class FilterExpression(proto.Message): @@ -357,15 +409,29 @@ class FilterExpression(proto.Message): """ and_group = proto.Field( - proto.MESSAGE, number=1, oneof="expr", message="FilterExpressionList", + proto.MESSAGE, + number=1, + oneof="expr", + message="FilterExpressionList", ) or_group = proto.Field( - proto.MESSAGE, number=2, oneof="expr", message="FilterExpressionList", + proto.MESSAGE, + number=2, + oneof="expr", + message="FilterExpressionList", ) not_expression = proto.Field( - proto.MESSAGE, number=3, oneof="expr", message="FilterExpression", + proto.MESSAGE, + number=3, + oneof="expr", + message="FilterExpression", + ) + filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="expr", + message="Filter", ) - filter = proto.Field(proto.MESSAGE, number=4, oneof="expr", message="Filter",) class FilterExpressionList(proto.Message): @@ -377,7 +443,9 @@ class FilterExpressionList(proto.Message): """ expressions = proto.RepeatedField( - proto.MESSAGE, number=1, message="FilterExpression", + proto.MESSAGE, + number=1, + message="FilterExpression", ) @@ -436,10 +504,18 @@ class MatchType(proto.Enum): PARTIAL_REGEXP = 6 match_type = proto.Field( - proto.ENUM, number=1, enum="Filter.StringFilter.MatchType", + proto.ENUM, + number=1, + enum="Filter.StringFilter.MatchType", + ) + value = proto.Field( + proto.STRING, + number=2, + ) + case_sensitive = proto.Field( + proto.BOOL, + number=3, ) - value = proto.Field(proto.STRING, number=2,) - case_sensitive = proto.Field(proto.BOOL, number=3,) class InListFilter(proto.Message): r"""The result needs to be in a list of string values. @@ -452,8 +528,14 @@ class InListFilter(proto.Message): If true, the string value is case sensitive. """ - values = proto.RepeatedField(proto.STRING, number=1,) - case_sensitive = proto.Field(proto.BOOL, number=2,) + values = proto.RepeatedField( + proto.STRING, + number=1, + ) + case_sensitive = proto.Field( + proto.BOOL, + number=2, + ) class NumericFilter(proto.Message): r"""Filters for numeric or date values. @@ -475,9 +557,15 @@ class Operation(proto.Enum): GREATER_THAN_OR_EQUAL = 5 operation = proto.Field( - proto.ENUM, number=1, enum="Filter.NumericFilter.Operation", + proto.ENUM, + number=1, + enum="Filter.NumericFilter.Operation", + ) + value = proto.Field( + proto.MESSAGE, + number=2, + message="NumericValue", ) - value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",) class BetweenFilter(proto.Message): r"""To express that the result needs to be between two numbers @@ -490,21 +578,44 @@ class BetweenFilter(proto.Message): Ends with this number. """ - from_value = proto.Field(proto.MESSAGE, number=1, message="NumericValue",) - to_value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",) + from_value = proto.Field( + proto.MESSAGE, + number=1, + message="NumericValue", + ) + to_value = proto.Field( + proto.MESSAGE, + number=2, + message="NumericValue", + ) - field_name = proto.Field(proto.STRING, number=1,) + field_name = proto.Field( + proto.STRING, + number=1, + ) string_filter = proto.Field( - proto.MESSAGE, number=3, oneof="one_filter", message=StringFilter, + proto.MESSAGE, + number=3, + oneof="one_filter", + message=StringFilter, ) in_list_filter = proto.Field( - proto.MESSAGE, number=4, oneof="one_filter", message=InListFilter, + proto.MESSAGE, + number=4, + oneof="one_filter", + message=InListFilter, ) numeric_filter = proto.Field( - proto.MESSAGE, number=5, oneof="one_filter", message=NumericFilter, + proto.MESSAGE, + number=5, + oneof="one_filter", + message=NumericFilter, ) between_filter = proto.Field( - proto.MESSAGE, number=6, oneof="one_filter", message=BetweenFilter, + proto.MESSAGE, + number=6, + oneof="one_filter", + message=BetweenFilter, ) @@ -544,7 +655,10 @@ class MetricOrderBy(proto.Message): A metric name in the request to order by. """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) class DimensionOrderBy(proto.Message): r"""Sorts by dimension values. @@ -564,9 +678,14 @@ class OrderType(proto.Enum): CASE_INSENSITIVE_ALPHANUMERIC = 2 NUMERIC = 3 - dimension_name = proto.Field(proto.STRING, number=1,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) order_type = proto.Field( - proto.ENUM, number=2, enum="OrderBy.DimensionOrderBy.OrderType", + proto.ENUM, + number=2, + enum="OrderBy.DimensionOrderBy.OrderType", ) class PivotOrderBy(proto.Message): @@ -613,24 +732,47 @@ class PivotSelection(proto.Message): this value. """ - dimension_name = proto.Field(proto.STRING, number=1,) - dimension_value = proto.Field(proto.STRING, number=2,) + dimension_name = proto.Field( + proto.STRING, + number=1, + ) + dimension_value = proto.Field( + proto.STRING, + number=2, + ) - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) pivot_selections = proto.RepeatedField( - proto.MESSAGE, number=2, message="OrderBy.PivotOrderBy.PivotSelection", + proto.MESSAGE, + number=2, + message="OrderBy.PivotOrderBy.PivotSelection", ) metric = proto.Field( - proto.MESSAGE, number=1, oneof="one_order_by", message=MetricOrderBy, + proto.MESSAGE, + number=1, + oneof="one_order_by", + message=MetricOrderBy, ) dimension = proto.Field( - proto.MESSAGE, number=2, oneof="one_order_by", message=DimensionOrderBy, + proto.MESSAGE, + number=2, + oneof="one_order_by", + message=DimensionOrderBy, ) pivot = proto.Field( - proto.MESSAGE, number=3, oneof="one_order_by", message=PivotOrderBy, + proto.MESSAGE, + number=3, + oneof="one_order_by", + message=PivotOrderBy, + ) + desc = proto.Field( + proto.BOOL, + number=4, ) - desc = proto.Field(proto.BOOL, number=4,) class Pivot(proto.Message): @@ -669,12 +811,27 @@ class Pivot(proto.Message): specified metric_aggregations. """ - field_names = proto.RepeatedField(proto.STRING, number=1,) - order_bys = proto.RepeatedField(proto.MESSAGE, number=2, message="OrderBy",) - offset = proto.Field(proto.INT64, number=3,) - limit = proto.Field(proto.INT64, number=4,) + field_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + order_bys = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OrderBy", + ) + offset = proto.Field( + proto.INT64, + number=3, + ) + limit = proto.Field( + proto.INT64, + number=4, + ) metric_aggregations = proto.RepeatedField( - proto.ENUM, number=5, enum="MetricAggregation", + proto.ENUM, + number=5, + enum="MetricAggregation", ) @@ -714,10 +871,20 @@ class CohortSpec(proto.Message): Optional settings for a cohort report. """ - cohorts = proto.RepeatedField(proto.MESSAGE, number=1, message="Cohort",) - cohorts_range = proto.Field(proto.MESSAGE, number=2, message="CohortsRange",) + cohorts = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Cohort", + ) + cohorts_range = proto.Field( + proto.MESSAGE, + number=2, + message="CohortsRange", + ) cohort_report_settings = proto.Field( - proto.MESSAGE, number=3, message="CohortReportSettings", + proto.MESSAGE, + number=3, + message="CohortReportSettings", ) @@ -760,9 +927,19 @@ class Cohort(proto.Message): month. """ - name = proto.Field(proto.STRING, number=1,) - dimension = proto.Field(proto.STRING, number=2,) - date_range = proto.Field(proto.MESSAGE, number=3, message="DateRange",) + name = proto.Field( + proto.STRING, + number=1, + ) + dimension = proto.Field( + proto.STRING, + number=2, + ) + date_range = proto.Field( + proto.MESSAGE, + number=3, + message="DateRange", + ) class CohortsRange(proto.Message): @@ -821,9 +998,19 @@ class Granularity(proto.Enum): WEEKLY = 2 MONTHLY = 3 - granularity = proto.Field(proto.ENUM, number=1, enum=Granularity,) - start_offset = proto.Field(proto.INT32, number=2,) - end_offset = proto.Field(proto.INT32, number=3,) + granularity = proto.Field( + proto.ENUM, + number=1, + enum=Granularity, + ) + start_offset = proto.Field( + proto.INT32, + number=2, + ) + end_offset = proto.Field( + proto.INT32, + number=3, + ) class CohortReportSettings(proto.Message): @@ -835,7 +1022,10 @@ class CohortReportSettings(proto.Message): end day. Not supported in ``RunReportRequest``. """ - accumulate = proto.Field(proto.BOOL, number=1,) + accumulate = proto.Field( + proto.BOOL, + number=1, + ) class ResponseMetaData(proto.Message): @@ -910,9 +1100,15 @@ class ActiveMetricRestriction(proto.Message): The reason for this metric's restriction. """ - metric_name = proto.Field(proto.STRING, number=1, optional=True,) + metric_name = proto.Field( + proto.STRING, + number=1, + optional=True, + ) restricted_metric_types = proto.RepeatedField( - proto.ENUM, number=2, enum="RestrictedMetricType", + proto.ENUM, + number=2, + enum="RestrictedMetricType", ) active_metric_restrictions = proto.RepeatedField( @@ -921,13 +1117,31 @@ class ActiveMetricRestriction(proto.Message): message="ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction", ) - data_loss_from_other_row = proto.Field(proto.BOOL, number=3,) + data_loss_from_other_row = proto.Field( + proto.BOOL, + number=3, + ) schema_restriction_response = proto.Field( - proto.MESSAGE, number=4, optional=True, message=SchemaRestrictionResponse, + proto.MESSAGE, + number=4, + optional=True, + message=SchemaRestrictionResponse, + ) + currency_code = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + time_zone = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + empty_reason = proto.Field( + proto.STRING, + number=7, + optional=True, ) - currency_code = proto.Field(proto.STRING, number=5, optional=True,) - time_zone = proto.Field(proto.STRING, number=6, optional=True,) - empty_reason = proto.Field(proto.STRING, number=7, optional=True,) class DimensionHeader(proto.Message): @@ -942,7 +1156,10 @@ class DimensionHeader(proto.Message): The dimension's name. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class MetricHeader(proto.Message): @@ -959,8 +1176,15 @@ class MetricHeader(proto.Message): The metric's data type. """ - name = proto.Field(proto.STRING, number=1,) - type_ = proto.Field(proto.ENUM, number=2, enum="MetricType",) + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum="MetricType", + ) class PivotHeader(proto.Message): @@ -977,9 +1201,14 @@ class PivotHeader(proto.Message): """ pivot_dimension_headers = proto.RepeatedField( - proto.MESSAGE, number=1, message="PivotDimensionHeader", + proto.MESSAGE, + number=1, + message="PivotDimensionHeader", + ) + row_count = proto.Field( + proto.INT32, + number=2, ) - row_count = proto.Field(proto.INT32, number=2,) class PivotDimensionHeader(proto.Message): @@ -991,7 +1220,9 @@ class PivotDimensionHeader(proto.Message): """ dimension_values = proto.RepeatedField( - proto.MESSAGE, number=1, message="DimensionValue", + proto.MESSAGE, + number=1, + message="DimensionValue", ) @@ -1043,9 +1274,15 @@ class Row(proto.Message): """ dimension_values = proto.RepeatedField( - proto.MESSAGE, number=1, message="DimensionValue", + proto.MESSAGE, + number=1, + message="DimensionValue", + ) + metric_values = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MetricValue", ) - metric_values = proto.RepeatedField(proto.MESSAGE, number=2, message="MetricValue",) class DimensionValue(proto.Message): @@ -1061,7 +1298,11 @@ class DimensionValue(proto.Message): This field is a member of `oneof`_ ``one_value``. """ - value = proto.Field(proto.STRING, number=1, oneof="one_value",) + value = proto.Field( + proto.STRING, + number=1, + oneof="one_value", + ) class MetricValue(proto.Message): @@ -1076,7 +1317,11 @@ class MetricValue(proto.Message): This field is a member of `oneof`_ ``one_value``. """ - value = proto.Field(proto.STRING, number=4, oneof="one_value",) + value = proto.Field( + proto.STRING, + number=4, + oneof="one_value", + ) class NumericValue(proto.Message): @@ -1100,8 +1345,16 @@ class NumericValue(proto.Message): This field is a member of `oneof`_ ``one_value``. """ - int64_value = proto.Field(proto.INT64, number=1, oneof="one_value",) - double_value = proto.Field(proto.DOUBLE, number=2, oneof="one_value",) + int64_value = proto.Field( + proto.INT64, + number=1, + oneof="one_value", + ) + double_value = proto.Field( + proto.DOUBLE, + number=2, + oneof="one_value", + ) class PropertyQuota(proto.Message): @@ -1141,14 +1394,30 @@ class PropertyQuota(proto.Message): dimensions. """ - tokens_per_day = proto.Field(proto.MESSAGE, number=1, message="QuotaStatus",) - tokens_per_hour = proto.Field(proto.MESSAGE, number=2, message="QuotaStatus",) - concurrent_requests = proto.Field(proto.MESSAGE, number=3, message="QuotaStatus",) + tokens_per_day = proto.Field( + proto.MESSAGE, + number=1, + message="QuotaStatus", + ) + tokens_per_hour = proto.Field( + proto.MESSAGE, + number=2, + message="QuotaStatus", + ) + concurrent_requests = proto.Field( + proto.MESSAGE, + number=3, + message="QuotaStatus", + ) server_errors_per_project_per_hour = proto.Field( - proto.MESSAGE, number=4, message="QuotaStatus", + proto.MESSAGE, + number=4, + message="QuotaStatus", ) potentially_thresholded_requests_per_hour = proto.Field( - proto.MESSAGE, number=5, message="QuotaStatus", + proto.MESSAGE, + number=5, + message="QuotaStatus", ) @@ -1162,8 +1431,14 @@ class QuotaStatus(proto.Message): Quota remaining after this request. """ - consumed = proto.Field(proto.INT32, number=1,) - remaining = proto.Field(proto.INT32, number=2,) + consumed = proto.Field( + proto.INT32, + number=1, + ) + remaining = proto.Field( + proto.INT32, + number=2, + ) class DimensionMetadata(proto.Message): @@ -1195,12 +1470,30 @@ class DimensionMetadata(proto.Message): metrics are categorized together. """ - api_name = proto.Field(proto.STRING, number=1,) - ui_name = proto.Field(proto.STRING, number=2,) - description = proto.Field(proto.STRING, number=3,) - deprecated_api_names = proto.RepeatedField(proto.STRING, number=4,) - custom_definition = proto.Field(proto.BOOL, number=5,) - category = proto.Field(proto.STRING, number=7,) + api_name = proto.Field( + proto.STRING, + number=1, + ) + ui_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names = proto.RepeatedField( + proto.STRING, + number=4, + ) + custom_definition = proto.Field( + proto.BOOL, + number=5, + ) + category = proto.Field( + proto.STRING, + number=7, + ) class MetricMetadata(proto.Message): @@ -1254,15 +1547,44 @@ class BlockedReason(proto.Enum): NO_REVENUE_METRICS = 1 NO_COST_METRICS = 2 - api_name = proto.Field(proto.STRING, number=1,) - ui_name = proto.Field(proto.STRING, number=2,) - description = proto.Field(proto.STRING, number=3,) - deprecated_api_names = proto.RepeatedField(proto.STRING, number=4,) - type_ = proto.Field(proto.ENUM, number=5, enum="MetricType",) - expression = proto.Field(proto.STRING, number=6,) - custom_definition = proto.Field(proto.BOOL, number=7,) - blocked_reasons = proto.RepeatedField(proto.ENUM, number=8, enum=BlockedReason,) - category = proto.Field(proto.STRING, number=10,) + api_name = proto.Field( + proto.STRING, + number=1, + ) + ui_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names = proto.RepeatedField( + proto.STRING, + number=4, + ) + type_ = proto.Field( + proto.ENUM, + number=5, + enum="MetricType", + ) + expression = proto.Field( + proto.STRING, + number=6, + ) + custom_definition = proto.Field( + proto.BOOL, + number=7, + ) + blocked_reasons = proto.RepeatedField( + proto.ENUM, + number=8, + enum=BlockedReason, + ) + category = proto.Field( + proto.STRING, + number=10, + ) class DimensionCompatibility(proto.Message): @@ -1285,10 +1607,16 @@ class DimensionCompatibility(proto.Message): """ dimension_metadata = proto.Field( - proto.MESSAGE, number=1, optional=True, message="DimensionMetadata", + proto.MESSAGE, + number=1, + optional=True, + message="DimensionMetadata", ) compatibility = proto.Field( - proto.ENUM, number=2, optional=True, enum="Compatibility", + proto.ENUM, + number=2, + optional=True, + enum="Compatibility", ) @@ -1312,10 +1640,16 @@ class MetricCompatibility(proto.Message): """ metric_metadata = proto.Field( - proto.MESSAGE, number=1, optional=True, message="MetricMetadata", + proto.MESSAGE, + number=1, + optional=True, + message="MetricMetadata", ) compatibility = proto.Field( - proto.ENUM, number=2, optional=True, enum="Compatibility", + proto.ENUM, + number=2, + optional=True, + enum="Compatibility", ) diff --git a/packages/google-analytics-data/noxfile.py b/packages/google-analytics-data/noxfile.py index de41c6bf9af5..4a0cf8289979 100644 --- a/packages/google-analytics-data/noxfile.py +++ b/packages/google-analytics-data/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -57,7 +57,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +69,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/google-analytics-data/samples/snippets/noxfile.py b/packages/google-analytics-data/samples/snippets/noxfile.py index 4c808af73ea2..949e0fde9ae1 100644 --- a/packages/google-analytics-data/samples/snippets/noxfile.py +++ b/packages/google-analytics-data/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 9248676a8f14..ecd868911131 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -107,7 +107,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [AlphaAnalyticsDataClient, AlphaAnalyticsDataAsyncClient,] + "client_class", + [ + AlphaAnalyticsDataClient, + AlphaAnalyticsDataAsyncClient, + ], ) def test_alpha_analytics_data_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -149,7 +153,11 @@ def test_alpha_analytics_data_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [AlphaAnalyticsDataClient, AlphaAnalyticsDataAsyncClient,] + "client_class", + [ + AlphaAnalyticsDataClient, + AlphaAnalyticsDataAsyncClient, + ], ) def test_alpha_analytics_data_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -442,7 +450,9 @@ def test_alpha_analytics_data_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -513,7 +523,8 @@ def test_run_report( transport: str = "grpc", request_type=analytics_data_api.RunReportRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -523,7 +534,9 @@ def test_run_report( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RunReportResponse(row_count=992,) + call.return_value = analytics_data_api.RunReportResponse( + row_count=992, + ) response = client.run_report(request) # Establish that the underlying gRPC stub method was called. @@ -544,7 +557,8 @@ def test_run_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -560,7 +574,8 @@ async def test_run_report_async( transport: str = "grpc_asyncio", request_type=analytics_data_api.RunReportRequest ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -571,7 +586,9 @@ async def test_run_report_async( with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RunReportResponse(row_count=992,) + analytics_data_api.RunReportResponse( + row_count=992, + ) ) response = await client.run_report(request) @@ -594,7 +611,8 @@ def test_run_pivot_report( transport: str = "grpc", request_type=analytics_data_api.RunPivotReportRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -624,7 +642,8 @@ def test_run_pivot_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -641,7 +660,8 @@ async def test_run_pivot_report_async( request_type=analytics_data_api.RunPivotReportRequest, ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -674,7 +694,8 @@ def test_batch_run_reports( transport: str = "grpc", request_type=analytics_data_api.BatchRunReportsRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -706,7 +727,8 @@ def test_batch_run_reports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -725,7 +747,8 @@ async def test_batch_run_reports_async( request_type=analytics_data_api.BatchRunReportsRequest, ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -760,7 +783,8 @@ def test_batch_run_pivot_reports( transport: str = "grpc", request_type=analytics_data_api.BatchRunPivotReportsRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -792,7 +816,8 @@ def test_batch_run_pivot_reports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -811,7 +836,8 @@ async def test_batch_run_pivot_reports_async( request_type=analytics_data_api.BatchRunPivotReportsRequest, ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -846,7 +872,8 @@ def test_get_metadata( transport: str = "grpc", request_type=analytics_data_api.GetMetadataRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -856,7 +883,9 @@ def test_get_metadata( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.Metadata(name="name_value",) + call.return_value = analytics_data_api.Metadata( + name="name_value", + ) response = client.get_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -877,7 +906,8 @@ def test_get_metadata_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -893,7 +923,8 @@ async def test_get_metadata_async( transport: str = "grpc_asyncio", request_type=analytics_data_api.GetMetadataRequest ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -904,7 +935,9 @@ async def test_get_metadata_async( with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.Metadata(name="name_value",) + analytics_data_api.Metadata( + name="name_value", + ) ) response = await client.get_metadata(request) @@ -946,7 +979,10 @@ def test_get_metadata_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -975,7 +1011,10 @@ async def test_get_metadata_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_metadata_flattened(): @@ -989,7 +1028,9 @@ def test_get_metadata_flattened(): call.return_value = analytics_data_api.Metadata() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_metadata(name="name_value",) + client.get_metadata( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1007,7 +1048,8 @@ def test_get_metadata_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.get_metadata( - analytics_data_api.GetMetadataRequest(), name="name_value", + analytics_data_api.GetMetadataRequest(), + name="name_value", ) @@ -1027,7 +1069,9 @@ async def test_get_metadata_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_metadata(name="name_value",) + response = await client.get_metadata( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1046,7 +1090,8 @@ async def test_get_metadata_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_metadata( - analytics_data_api.GetMetadataRequest(), name="name_value", + analytics_data_api.GetMetadataRequest(), + name="name_value", ) @@ -1054,7 +1099,8 @@ def test_run_realtime_report( transport: str = "grpc", request_type=analytics_data_api.RunRealtimeReportRequest ): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1066,7 +1112,9 @@ def test_run_realtime_report( type(client.transport.run_realtime_report), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RunRealtimeReportResponse(row_count=992,) + call.return_value = analytics_data_api.RunRealtimeReportResponse( + row_count=992, + ) response = client.run_realtime_report(request) # Establish that the underlying gRPC stub method was called. @@ -1087,7 +1135,8 @@ def test_run_realtime_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1106,7 +1155,8 @@ async def test_run_realtime_report_async( request_type=analytics_data_api.RunRealtimeReportRequest, ): client = AlphaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1119,7 +1169,9 @@ async def test_run_realtime_report_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RunRealtimeReportResponse(row_count=992,) + analytics_data_api.RunRealtimeReportResponse( + row_count=992, + ) ) response = await client.run_realtime_report(request) @@ -1163,7 +1215,10 @@ def test_run_realtime_report_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1194,7 +1249,10 @@ async def test_run_realtime_report_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -1204,7 +1262,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1223,7 +1282,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = AlphaAnalyticsDataClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1271,7 +1331,10 @@ def test_transport_grpc_default(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) - assert isinstance(client.transport, transports.AlphaAnalyticsDataGrpcTransport,) + assert isinstance( + client.transport, + transports.AlphaAnalyticsDataGrpcTransport, + ) def test_alpha_analytics_data_base_transport_error(): @@ -1319,7 +1382,8 @@ def test_alpha_analytics_data_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AlphaAnalyticsDataTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1343,7 +1407,8 @@ def test_alpha_analytics_data_base_transport_with_credentials_file_old_google_au Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AlphaAnalyticsDataTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1554,7 +1619,8 @@ def test_alpha_analytics_data_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.AlphaAnalyticsDataGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1566,7 +1632,8 @@ def test_alpha_analytics_data_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.AlphaAnalyticsDataGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1675,7 +1742,9 @@ def test_alpha_analytics_data_transport_channel_mtls_with_adc(transport_class): def test_metadata_path(): property = "squid" - expected = "properties/{property}/metadata".format(property=property,) + expected = "properties/{property}/metadata".format( + property=property, + ) actual = AlphaAnalyticsDataClient.metadata_path(property) assert expected == actual @@ -1713,7 +1782,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = AlphaAnalyticsDataClient.common_folder_path(folder) assert expected == actual @@ -1731,7 +1802,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = AlphaAnalyticsDataClient.common_organization_path(organization) assert expected == actual @@ -1749,7 +1822,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = AlphaAnalyticsDataClient.common_project_path(project) assert expected == actual @@ -1769,7 +1844,8 @@ def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = AlphaAnalyticsDataClient.common_location_path(project, location) assert expected == actual @@ -1794,7 +1870,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.AlphaAnalyticsDataTransport, "_prep_wrapped_messages" ) as prep: client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1803,6 +1880,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = AlphaAnalyticsDataClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 0bfbb4ac55bc..ab395319703b 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -90,7 +90,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [BetaAnalyticsDataClient, BetaAnalyticsDataAsyncClient,] + "client_class", + [ + BetaAnalyticsDataClient, + BetaAnalyticsDataAsyncClient, + ], ) def test_beta_analytics_data_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -132,7 +136,11 @@ def test_beta_analytics_data_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [BetaAnalyticsDataClient, BetaAnalyticsDataAsyncClient,] + "client_class", + [ + BetaAnalyticsDataClient, + BetaAnalyticsDataAsyncClient, + ], ) def test_beta_analytics_data_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -506,7 +514,9 @@ def test_beta_analytics_data_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -649,10 +659,17 @@ def test_beta_analytics_data_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [analytics_data_api.RunReportRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunReportRequest, + dict, + ], +) def test_run_report(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -663,7 +680,8 @@ def test_run_report(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_data_api.RunReportResponse( - row_count=992, kind="kind_value", + row_count=992, + kind="kind_value", ) response = client.run_report(request) @@ -682,7 +700,8 @@ def test_run_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -698,7 +717,8 @@ async def test_run_report_async( transport: str = "grpc_asyncio", request_type=analytics_data_api.RunReportRequest ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -709,7 +729,10 @@ async def test_run_report_async( with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RunReportResponse(row_count=992, kind="kind_value",) + analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + ) ) response = await client.run_report(request) @@ -730,7 +753,9 @@ async def test_run_report_async_from_dict(): def test_run_report_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -750,7 +775,10 @@ def test_run_report_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -779,15 +807,23 @@ async def test_run_report_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [analytics_data_api.RunPivotReportRequest, dict,] + "request_type", + [ + analytics_data_api.RunPivotReportRequest, + dict, + ], ) def test_run_pivot_report(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -816,7 +852,8 @@ def test_run_pivot_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -833,7 +870,8 @@ async def test_run_pivot_report_async( request_type=analytics_data_api.RunPivotReportRequest, ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -844,7 +882,9 @@ async def test_run_pivot_report_async( with mock.patch.object(type(client.transport.run_pivot_report), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RunPivotReportResponse(kind="kind_value",) + analytics_data_api.RunPivotReportResponse( + kind="kind_value", + ) ) response = await client.run_pivot_report(request) @@ -864,7 +904,9 @@ async def test_run_pivot_report_async_from_dict(): def test_run_pivot_report_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -884,7 +926,10 @@ def test_run_pivot_report_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -913,15 +958,23 @@ async def test_run_pivot_report_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [analytics_data_api.BatchRunReportsRequest, dict,] + "request_type", + [ + analytics_data_api.BatchRunReportsRequest, + dict, + ], ) def test_batch_run_reports(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -952,7 +1005,8 @@ def test_batch_run_reports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -971,7 +1025,8 @@ async def test_batch_run_reports_async( request_type=analytics_data_api.BatchRunReportsRequest, ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -984,7 +1039,9 @@ async def test_batch_run_reports_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.BatchRunReportsResponse(kind="kind_value",) + analytics_data_api.BatchRunReportsResponse( + kind="kind_value", + ) ) response = await client.batch_run_reports(request) @@ -1004,7 +1061,9 @@ async def test_batch_run_reports_async_from_dict(): def test_batch_run_reports_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1026,7 +1085,10 @@ def test_batch_run_reports_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1057,15 +1119,23 @@ async def test_batch_run_reports_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [analytics_data_api.BatchRunPivotReportsRequest, dict,] + "request_type", + [ + analytics_data_api.BatchRunPivotReportsRequest, + dict, + ], ) def test_batch_run_pivot_reports(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1096,7 +1166,8 @@ def test_batch_run_pivot_reports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1115,7 +1186,8 @@ async def test_batch_run_pivot_reports_async( request_type=analytics_data_api.BatchRunPivotReportsRequest, ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1128,7 +1200,9 @@ async def test_batch_run_pivot_reports_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.BatchRunPivotReportsResponse(kind="kind_value",) + analytics_data_api.BatchRunPivotReportsResponse( + kind="kind_value", + ) ) response = await client.batch_run_pivot_reports(request) @@ -1148,7 +1222,9 @@ async def test_batch_run_pivot_reports_async_from_dict(): def test_batch_run_pivot_reports_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1170,7 +1246,10 @@ def test_batch_run_pivot_reports_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1201,13 +1280,23 @@ async def test_batch_run_pivot_reports_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [analytics_data_api.GetMetadataRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetMetadataRequest, + dict, + ], +) def test_get_metadata(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1217,7 +1306,9 @@ def test_get_metadata(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.Metadata(name="name_value",) + call.return_value = analytics_data_api.Metadata( + name="name_value", + ) response = client.get_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -1234,7 +1325,8 @@ def test_get_metadata_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1250,7 +1342,8 @@ async def test_get_metadata_async( transport: str = "grpc_asyncio", request_type=analytics_data_api.GetMetadataRequest ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1261,7 +1354,9 @@ async def test_get_metadata_async( with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.Metadata(name="name_value",) + analytics_data_api.Metadata( + name="name_value", + ) ) response = await client.get_metadata(request) @@ -1281,7 +1376,9 @@ async def test_get_metadata_async_from_dict(): def test_get_metadata_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1301,7 +1398,10 @@ def test_get_metadata_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1330,11 +1430,16 @@ async def test_get_metadata_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_metadata_flattened(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: @@ -1342,7 +1447,9 @@ def test_get_metadata_flattened(): call.return_value = analytics_data_api.Metadata() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_metadata(name="name_value",) + client.get_metadata( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1354,13 +1461,16 @@ def test_get_metadata_flattened(): def test_get_metadata_flattened_error(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_metadata( - analytics_data_api.GetMetadataRequest(), name="name_value", + analytics_data_api.GetMetadataRequest(), + name="name_value", ) @@ -1380,7 +1490,9 @@ async def test_get_metadata_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_metadata(name="name_value",) + response = await client.get_metadata( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1401,16 +1513,22 @@ async def test_get_metadata_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_metadata( - analytics_data_api.GetMetadataRequest(), name="name_value", + analytics_data_api.GetMetadataRequest(), + name="name_value", ) @pytest.mark.parametrize( - "request_type", [analytics_data_api.RunRealtimeReportRequest, dict,] + "request_type", + [ + analytics_data_api.RunRealtimeReportRequest, + dict, + ], ) def test_run_realtime_report(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1423,7 +1541,8 @@ def test_run_realtime_report(request_type, transport: str = "grpc"): ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_data_api.RunRealtimeReportResponse( - row_count=992, kind="kind_value", + row_count=992, + kind="kind_value", ) response = client.run_realtime_report(request) @@ -1442,7 +1561,8 @@ def test_run_realtime_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1461,7 +1581,8 @@ async def test_run_realtime_report_async( request_type=analytics_data_api.RunRealtimeReportRequest, ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1475,7 +1596,8 @@ async def test_run_realtime_report_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_data_api.RunRealtimeReportResponse( - row_count=992, kind="kind_value", + row_count=992, + kind="kind_value", ) ) response = await client.run_realtime_report(request) @@ -1497,7 +1619,9 @@ async def test_run_realtime_report_async_from_dict(): def test_run_realtime_report_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1519,7 +1643,10 @@ def test_run_realtime_report_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1550,15 +1677,23 @@ async def test_run_realtime_report_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [analytics_data_api.CheckCompatibilityRequest, dict,] + "request_type", + [ + analytics_data_api.CheckCompatibilityRequest, + dict, + ], ) def test_check_compatibility(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1586,7 +1721,8 @@ def test_check_compatibility_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1605,7 +1741,8 @@ async def test_check_compatibility_async( request_type=analytics_data_api.CheckCompatibilityRequest, ): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1637,7 +1774,9 @@ async def test_check_compatibility_async_from_dict(): def test_check_compatibility_field_headers(): - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1659,7 +1798,10 @@ def test_check_compatibility_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1690,7 +1832,10 @@ async def test_check_compatibility_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "property=property/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "property=property/value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -1700,7 +1845,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1720,7 +1866,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = BetaAnalyticsDataClient(client_options=options, transport=transport,) + client = BetaAnalyticsDataClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1736,7 +1885,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = BetaAnalyticsDataClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1781,8 +1931,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = BetaAnalyticsDataClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.BetaAnalyticsDataGrpcTransport,) + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BetaAnalyticsDataGrpcTransport, + ) def test_beta_analytics_data_base_transport_error(): @@ -1833,7 +1988,8 @@ def test_beta_analytics_data_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.BetaAnalyticsDataTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2005,7 +2161,8 @@ def test_beta_analytics_data_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.BetaAnalyticsDataGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2017,7 +2174,8 @@ def test_beta_analytics_data_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.BetaAnalyticsDataGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2126,7 +2284,9 @@ def test_beta_analytics_data_transport_channel_mtls_with_adc(transport_class): def test_metadata_path(): property = "squid" - expected = "properties/{property}/metadata".format(property=property,) + expected = "properties/{property}/metadata".format( + property=property, + ) actual = BetaAnalyticsDataClient.metadata_path(property) assert expected == actual @@ -2164,7 +2324,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = BetaAnalyticsDataClient.common_folder_path(folder) assert expected == actual @@ -2182,7 +2344,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = BetaAnalyticsDataClient.common_organization_path(organization) assert expected == actual @@ -2200,7 +2364,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = BetaAnalyticsDataClient.common_project_path(project) assert expected == actual @@ -2220,7 +2386,8 @@ def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = BetaAnalyticsDataClient.common_location_path(project, location) assert expected == actual @@ -2245,7 +2412,8 @@ def test_client_with_default_client_info(): transports.BetaAnalyticsDataTransport, "_prep_wrapped_messages" ) as prep: client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2254,7 +2422,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = BetaAnalyticsDataClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2262,7 +2431,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = BetaAnalyticsDataAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close"