diff --git a/docs/bigquery-usage.rst b/docs/bigquery-usage.rst index f84bf638f3cb..edab2e1510d4 100644 --- a/docs/bigquery-usage.rst +++ b/docs/bigquery-usage.rst @@ -291,26 +291,88 @@ Run a query which can be expected to complete within bounded time: >>> from gcloud import bigquery >>> client = bigquery.Client() - >>> query = """\ - SELECT count(*) AS age_count FROM dataset_name.person_ages - """ - >>> query = client.run_sync_query(query) + >>> QUERY = """\ + ... SELECT count(*) AS age_count FROM dataset_name.person_ages + ... """ + >>> query = client.run_sync_query(QUERY) + >>> query.timeout_ms = 1000 + >>> query.run() # API request + >>> query.complete + True + >>> len(query.schema) + 1 + >>> field = query.schema[0] + >>> field.name + u'count' + >>> field.field_type + u'INTEGER' + >>> field.mode + u'NULLABLE' + >>> query.rows + [(15,)] + >>> query.total_rows + 1 + +If the rows returned by the query do not fit into the inital response, +then we need to fetch the remaining rows via ``fetch_data``: + +.. doctest:: + + >>> from gcloud import bigquery + >>> client = bigquery.Client() + >>> QUERY = """\ + ... SELECT * FROM dataset_name.person_ages + ... """ + >>> query = client.run_sync_query(QUERY) + >>> query.timeout_ms = 1000 + >>> query.run() # API request + >>> query.complete + True + >>> query.total_rows + 1234 + >>> query.page_token + '8d6e452459238eb0fe87d8eb191dd526ee70a35e' + >>> do_something_with(query.schema, query.rows) + >>> token = query.page_token # for initial request + >>> while True: + ... do_something_with(query.schema, rows) + ... if token is None: + ... break + ... rows, _, token = query.fetch_data(page_token=token) + + +If the query takes longer than the timeout allowed, ``query.complete`` +will be ``False``. In that case, we need to poll the associated job until +it is done, and then fetch the reuslts: + +.. doctest:: + + >>> from gcloud import bigquery + >>> client = bigquery.Client() + >>> QUERY = """\ + ... SELECT * FROM dataset_name.person_ages + ... """ + >>> query = client.run_sync_query(QUERY) >>> query.timeout_ms = 1000 >>> query.run() # API request + >>> query.complete + False + >>> job = query.job >>> retry_count = 100 - >>> while retry_count > 0 and not job.complete: + >>> while retry_count > 0 and job.state == 'running': ... retry_count -= 1 ... time.sleep(10) - ... query.reload() # API request - >>> query.schema - [{'name': 'age_count', 'type': 'integer', 'mode': 'nullable'}] - >>> query.rows - [(15,)] + ... job.reload() # API call + >>> job.state + 'done' + >>> token = None # for initial request + >>> while True: + ... rows, _, token = query.fetch_data(page_token=token) + ... do_something_with(query.schema, rows) + ... if token is None: + ... break -.. note:: - If the query takes longer than the timeout allowed, ``job.complete`` - will be ``False``: we therefore poll until it is completed. Querying data (asynchronous) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/index.rst b/docs/index.rst index 7f30287711cc..a9c1094a6789 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -107,6 +107,18 @@ search-index search-document +.. toctree:: + :maxdepth: 0 + :hidden: + :caption: Cloud Logging + + logging-usage + Client + logging-logger + logging-entries + logging-metric + logging-sink + .. toctree:: :maxdepth: 0 :hidden: diff --git a/docs/logging-client.rst b/docs/logging-client.rst new file mode 100644 index 000000000000..528414e1a2e3 --- /dev/null +++ b/docs/logging-client.rst @@ -0,0 +1,16 @@ +Logging Client +============== + +.. automodule:: gcloud.logging.client + :members: + :undoc-members: + :show-inheritance: + +Connection +~~~~~~~~~~ + +.. automodule:: gcloud.logging.connection + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/logging-entries.rst b/docs/logging-entries.rst new file mode 100644 index 000000000000..a7b96721d30b --- /dev/null +++ b/docs/logging-entries.rst @@ -0,0 +1,8 @@ +Entries +======= + +.. automodule:: gcloud.logging.entries + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/logging-logger.rst b/docs/logging-logger.rst new file mode 100644 index 000000000000..8deb9b434534 --- /dev/null +++ b/docs/logging-logger.rst @@ -0,0 +1,8 @@ +Logger +====== + +.. automodule:: gcloud.logging.logger + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/logging-metric.rst b/docs/logging-metric.rst new file mode 100644 index 000000000000..343634e8307d --- /dev/null +++ b/docs/logging-metric.rst @@ -0,0 +1,7 @@ +Metrics +======= + +.. automodule:: gcloud.logging.metric + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/logging-sink.rst b/docs/logging-sink.rst new file mode 100644 index 000000000000..bbfb62130f27 --- /dev/null +++ b/docs/logging-sink.rst @@ -0,0 +1,7 @@ +Sinks +===== + +.. automodule:: gcloud.logging.sink + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst new file mode 100644 index 000000000000..6711bf207a0a --- /dev/null +++ b/docs/logging-usage.rst @@ -0,0 +1,315 @@ +Using the API +============= + + +Authentication and Configuration +-------------------------------- + +- For an overview of authentication in ``gcloud-python``, + see :doc:`gcloud-auth`. + +- In addition to any authentication configuration, you should also set the + :envvar:`GCLOUD_PROJECT` environment variable for the project you'd like + to interact with. If you are Google App Engine or Google Compute Engine + this will be detected automatically. + +- After configuring your environment, create a + :class:`Client ` + + .. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + + or pass in ``credentials`` and ``project`` explicitly + + .. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client(project='my-project', credentials=creds) + + +Writing log entries +------------------- + +Write a simple text entry to a logger. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.log_text("A simple entry") # API call + +Write a dictionary entry to a logger. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.log_struct( + ... message="My second entry", + ... weather="partly cloudy") # API call + + +Retrieving log entries +---------------------- + +Fetch entries for the default project. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries() # API call + >>> for entry in entries: + ... timestamp = entry.timestamp.isoformat() + ... print('%sZ: %s | %s' % + ... (timestamp, entry.text_payload, entry.struct_payload)) + 2016-02-17T20:35:49.031864072Z: A simple entry | None + 2016-02-17T20:38:15.944418531Z: None | {'message': 'My second entry', 'weather': 'partly cloudy'} + +Fetch entries across multiple projects. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries( + ... project_ids=['one-project', 'another-project']) # API call + +Filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> FILTER = "log:log_name AND textPayload:simple" + >>> entries, token = client.list_entries(filter=FILTER) # API call + +Sort entries in descending timestamp order. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries(order_by=logging.DESCENDING) # API call + +Retrieve entries in batches of 10, iterating until done. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> retrieved = [] + >>> token = None + >>> while True: + ... entries, token = client.list_entries(page_size=10, page_token=token) # API call + ... retrieved.extend(entries) + ... if token is None: + ... break + +Retrieve entries for a single logger, sorting in descending timestamp order: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> entries, token = logger.list_entries(order_by=logging.DESCENDING) # API call + +Delete all entries for a logger +------------------------------- + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.delete_entries() # API call + + +Manage log metrics +------------------ + +Metrics are counters of entries which match a given filter. They can be +used within Cloud Monitoring to create charts and alerts. + +Create a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric( + ... "robots", "Robots all up in your server", + ... filter='log:apache-access AND textPayload:robot') + >>> metric.exists() # API call + False + >>> metric.create() # API call + >>> metric.exists() # API call + True + +List all metrics for a project: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metrics, token = client.list_metrics() + >>> len(metrics) + 1 + >>> metric = metrics[0] + >>> metric.name + "robots" + +Refresh local information about a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.reload() # API call + >>> metric.description + "Robots all up in your server" + >>> metric.filter + "log:apache-access AND textPayload:robot" + +Update a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.exists() # API call + True + >>> metric.reload() # API call + >>> metric.description = "Danger, Will Robinson!" + >>> metric.update() # API call + +Delete a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.exists() # API call + True + >>> metric.delete() # API call + >>> metric.exists() # API call + False + + +Export log entries using sinks +------------------------------ + +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Create a Cloud Storage sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-storage", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.storage_bucket = "my-bucket-name" + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +Create a BigQuery sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-bq", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.bigquery_dataset = "projects/my-project/datasets/my-dataset" + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +Create a Cloud Pub/Sub sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-pubsub", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.pubsub_topic = 'projects/my-project/topics/my-topic' + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +List all sinks for a project: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sinks, token = client.list_sinks() + >>> for sink in sinks: + ... print('%s: %s' % (sink.name, sink.destination)) + robots-storage: storage.googleapis.com/my-bucket-name + robots-bq: bigquery.googleapis.com/projects/my-project/datasets/my-dataset + robots-pubsub: pubsub.googleapis.com/projects/my-project/topics/my-topic + +Refresh local information about a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink('robots-storage') + >>> sink.filter is None + True + >>> sink.reload() # API call + >>> sink.filter + 'log:apache-access AND textPayload:robot' + >>> sink.destination + 'storage.googleapis.com/my-bucket-name' + +Update a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink("robots") + >>> sink.reload() # API call + >>> sink.filter = "log:apache-access" + >>> sink.update() # API call + +Delete a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.exists() # API call + True + >>> sink.delete() # API call + >>> sink.exists() # API call + False diff --git a/docs/pubsub-usage.rst b/docs/pubsub-usage.rst index df58f7273ae6..2f74fd584120 100644 --- a/docs/pubsub-usage.rst +++ b/docs/pubsub-usage.rst @@ -99,12 +99,12 @@ Test permissions allowed by the current IAM policy on a topic: .. doctest:: >>> from gcloud import pubsub - >>> from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + >>> from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE >>> client = pubsub.Client() >>> topic = client.topic('topic_name') >>> allowed = topic.check_iam_permissions( - ... [READER_ROLE, WRITER_ROLE, OWNER_ROLE]) # API request - >>> allowed == [READER_ROLE, WRITER_ROLE] + ... [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE]) # API request + >>> allowed == [VIEWER_ROLE, EDITOR_ROLE] True @@ -349,11 +349,11 @@ Test permissions allowed by the current IAM policy on a subscription: .. doctest:: >>> from gcloud import pubsub - >>> from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + >>> from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE >>> client = pubsub.Client() >>> topic = client.topic('topic_name') >>> subscription = topic.subscription('subscription_name') >>> allowed = subscription.check_iam_permissions( - ... [READER_ROLE, WRITER_ROLE, OWNER_ROLE]) # API request - >>> allowed == [READER_ROLE, WRITER_ROLE] + ... [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE]) # API request + >>> allowed == [VIEWER_ROLE, EDITOR_ROLE] True diff --git a/gcloud/_helpers.py b/gcloud/_helpers.py index dd85a54e405f..7b91c00271da 100644 --- a/gcloud/_helpers.py +++ b/gcloud/_helpers.py @@ -38,6 +38,16 @@ _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' +_RFC3339_NO_FRACTION = '%Y-%m-%dT%H:%M:%S' +# datetime.strptime cannot handle nanosecond precision: parse w/ regex +_RFC3339_NANOS = re.compile(r""" + (?P + \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS + ) + \. # decimal point + (?P\d{9}) # nanoseconds + Z # Zulu +""", re.VERBOSE) class _LocalStack(Local): @@ -301,7 +311,7 @@ def _total_seconds(offset): def _rfc3339_to_datetime(dt_str): - """Convert a string to a native timestamp. + """Convert a microsecond-precision timetamp to a native datetime. :type dt_str: str :param dt_str: The string to convert. @@ -313,6 +323,32 @@ def _rfc3339_to_datetime(dt_str): dt_str, _RFC3339_MICROS).replace(tzinfo=UTC) +def _rfc3339_nanos_to_datetime(dt_str): + """Convert a nanosecond-precision timestamp to a native datetime. + + .. note:: + + Python datetimes do not support nanosecond precision; this function + therefore truncates such values to microseconds. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + """ + with_nanos = _RFC3339_NANOS.match(dt_str) + if with_nanos is None: + raise ValueError( + 'Timestamp: %r, does not match pattern: %r' % ( + dt_str, _RFC3339_NANOS.pattern)) + bare_seconds = datetime.datetime.strptime( + with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) + nanos = int(with_nanos.group('nanos')) + micros = nanos // 1000 + return bare_seconds.replace(microsecond=micros, tzinfo=UTC) + + def _datetime_to_rfc3339(value): """Convert a native timestamp to a string. diff --git a/gcloud/bigquery/query.py b/gcloud/bigquery/query.py index 0e580cb4826f..f9158703ce68 100644 --- a/gcloud/bigquery/query.py +++ b/gcloud/bigquery/query.py @@ -50,6 +50,7 @@ def __init__(self, query, client): self._properties = {} self.query = query self._configuration = _SyncQueryConfiguration() + self._job = None @property def project(self): @@ -134,9 +135,12 @@ def job(self): :returns: Job instance used to run the query (None until ``jobReference`` property is set by the server). """ - job_ref = self._properties.get('jobReference') - if job_ref is not None: - return QueryJob(job_ref['jobId'], self.query, self._client) + if self._job is None: + job_ref = self._properties.get('jobReference') + if job_ref is not None: + self._job = QueryJob(job_ref['jobId'], self.query, + self._client) + return self._job @property def page_token(self): diff --git a/gcloud/bigquery/test_query.py b/gcloud/bigquery/test_query.py index b1d323e80bed..bed46d9e85e3 100644 --- a/gcloud/bigquery/test_query.py +++ b/gcloud/bigquery/test_query.py @@ -156,6 +156,8 @@ def test_job_w_jobid(self): self.assertEqual(job.query, self.QUERY) self.assertTrue(job._client is client) self.assertEqual(job.name, SERVER_GENERATED) + fetched_later = query.job + self.assertTrue(fetched_later is job) def test_schema(self): client = _Client(self.PROJECT) diff --git a/gcloud/dns/client.py b/gcloud/dns/client.py index b6a692aa01c0..ddc4ded769be 100644 --- a/gcloud/dns/client.py +++ b/gcloud/dns/client.py @@ -55,8 +55,9 @@ def quotas(self): """ path = '/projects/%s' % (self.project,) resp = self.connection.api_request(method='GET', path=path) + return dict([(key, int(value)) - for key, value in resp['quota'].items()]) + for key, value in resp['quota'].items() if key != 'kind']) def list_zones(self, max_results=None, page_token=None): """List zones for the project associated with this client. diff --git a/gcloud/dns/test_client.py b/gcloud/dns/test_client.py index 8f59e989c55f..44b168f82efe 100644 --- a/gcloud/dns/test_client.py +++ b/gcloud/dns/test_client.py @@ -68,6 +68,42 @@ def test_quotas_defaults(self): self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % PATH) + def test_quotas_w_kind_key(self): + PROJECT = 'PROJECT' + PATH = 'projects/%s' % PROJECT + MANAGED_ZONES = 1234 + RRS_PER_RRSET = 23 + RRSETS_PER_ZONE = 345 + RRSET_ADDITIONS = 456 + RRSET_DELETIONS = 567 + TOTAL_SIZE = 67890 + DATA = { + 'quota': { + 'managedZones': str(MANAGED_ZONES), + 'resourceRecordsPerRrset': str(RRS_PER_RRSET), + 'rrsetsPerManagedZone': str(RRSETS_PER_ZONE), + 'rrsetAdditionsPerChange': str(RRSET_ADDITIONS), + 'rrsetDeletionsPerChange': str(RRSET_DELETIONS), + 'totalRrdataSizePerChange': str(TOTAL_SIZE), + } + } + CONVERTED = dict([(key, int(value)) + for key, value in DATA['quota'].items()]) + WITH_KIND = {'quota': DATA['quota'].copy()} + WITH_KIND['quota']['kind'] = 'dns#quota' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(WITH_KIND) + + quotas = client.quotas() + + self.assertEqual(quotas, CONVERTED) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + def test_list_zones_defaults(self): from gcloud.dns.zone import ManagedZone PROJECT = 'PROJECT' diff --git a/gcloud/logging/__init__.py b/gcloud/logging/__init__.py new file mode 100644 index 000000000000..67b0386329e9 --- /dev/null +++ b/gcloud/logging/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Logging API wrapper.""" + +from gcloud.logging.client import Client +from gcloud.logging.connection import Connection + + +SCOPE = Connection.SCOPE +ASCENDING = 'timestamp asc' +DESCENDING = 'timestamp desc' diff --git a/gcloud/logging/_helpers.py b/gcloud/logging/_helpers.py new file mode 100644 index 000000000000..8061abc2cd30 --- /dev/null +++ b/gcloud/logging/_helpers.py @@ -0,0 +1,46 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for shared behavior.""" + +import re + +from gcloud._helpers import _name_from_project_path + + +_LOGGER_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /logs/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def logger_name_from_path(path, project): + """Validate a logger URI path and get the logger name. + + :type path: string + :param path: URI path for a logger API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: Logger name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _LOGGER_TEMPLATE) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py new file mode 100644 index 000000000000..fd79d3100aa3 --- /dev/null +++ b/gcloud/logging/client.py @@ -0,0 +1,251 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Logging API.""" + + +from gcloud.client import JSONClient +from gcloud.logging.connection import Connection +from gcloud.logging.entries import ProtobufEntry +from gcloud.logging.entries import StructEntry +from gcloud.logging.entries import TextEntry +from gcloud.logging.logger import Logger +from gcloud.logging.metric import Metric +from gcloud.logging.sink import Sink + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: string + :param project: the project which the client acts on behalf of. + If not passed, falls back to the default inferred + from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def logger(self, name): + """Creates a logger bound to the current client. + + :type name: string + :param name: the name of the logger to be constructed. + + :rtype: :class:`gcloud.logging.logger.Logger` + :returns: Logger created with the current client. + """ + return Logger(name, client=self) + + def _entry_from_resource(self, resource, loggers): + """Detect correct entry type from resource and instantiate. + + :type resource: dict + :param resource: one entry resource from API response + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype; One of: + :class:`gcloud.logging.entries.TextEntry`, + :class:`gcloud.logging.entries.StructEntry`, + :class:`gcloud.logging.entries.ProtobufEntry` + :returns: the entry instance, constructed via the resource + """ + if 'textPayload' in resource: + return TextEntry.from_api_repr(resource, self, loggers) + elif 'jsonPayload' in resource: + return StructEntry.from_api_repr(resource, self, loggers) + elif 'protoPayload' in resource: + return ProtobufEntry.from_api_repr(resource, self, loggers) + raise ValueError('Cannot parse log entry resource') + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + if projects is None: + projects = [self.project] + + params = {'projectIds': projects} + + if filter_ is not None: + params['filter'] = filter_ + + if order_by is not None: + params['orderBy'] = order_by + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + resp = self.connection.api_request(method='POST', path='/entries:list', + data=params) + loggers = {} + entries = [self._entry_from_resource(resource, loggers) + for resource in resp.get('entries', ())] + return entries, resp.get('nextPageToken') + + def sink(self, name, filter_, destination): + """Creates a sink bound to the current client. + + :type name: string + :param name: the name of the sink to be constructed. + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + + :rtype: :class:`gcloud.logging.sink.Sink` + :returns: Sink created with the current client. + """ + return Sink(name, filter_, destination, client=self) + + def list_sinks(self, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.sink.Sink`, plus a + "next page token" string: if not None, indicates that + more sinks can be retrieved with another call (pass that + value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/sinks' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + sinks = [Sink.from_api_repr(resource, self) + for resource in resp.get('sinks', ())] + return sinks, resp.get('nextPageToken') + + def metric(self, name, filter_, description=''): + """Creates a metric bound to the current client. + + :type name: string + :param name: the name of the metric to be constructed. + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries tracked by the metric. + + :type description: string + :param description: the description of the metric to be constructed. + + :rtype: :class:`gcloud.logging.metric.Metric` + :returns: Metric created with the current client. + """ + return Metric(name, filter_, client=self, description=description) + + def list_metrics(self, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.metric.Metric`, plus a + "next page token" string: if not None, indicates that + more metrics can be retrieved with another call (pass that + value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/metrics' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + metrics = [Metric.from_api_repr(resource, self) + for resource in resp.get('metrics', ())] + return metrics, resp.get('nextPageToken') diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py new file mode 100644 index 000000000000..1c330a28529e --- /dev/null +++ b/gcloud/logging/connection.py @@ -0,0 +1,48 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud logging connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud Logging via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: (Optional) HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to the value + :attr:`Connection.API_BASE_URL`. + """ + + API_BASE_URL = 'https://logging.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v2beta1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud Logging consumer.""" diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py new file mode 100644 index 000000000000..e26ac7ef0cd3 --- /dev/null +++ b/gcloud/logging/entries.py @@ -0,0 +1,134 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Log entries within the Google Cloud Logging API.""" + +import json + +from google.protobuf.json_format import Parse + +from gcloud._helpers import _rfc3339_nanos_to_datetime +from gcloud.logging._helpers import logger_name_from_path + + +class _BaseEntry(object): + """Base class for TextEntry, StructEntry. + + :type payload: text or dict + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: text, or :class:`NoneType` + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` + :param timestamp: (optional) timestamp for the entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): + self.payload = payload + self.logger = logger + self.insert_id = insert_id + self.timestamp = timestamp + self.labels = labels + self.severity = severity + self.http_request = http_request + + @classmethod + def from_api_repr(cls, resource, client, loggers=None): + """Factory: construct an entry given its API representation + + :type resource: dict + :param resource: text entry resource representation returned from + the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration. + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: :class:`gcloud.logging.entries.TextEntry` + :returns: Text entry parsed from ``resource``. + """ + if loggers is None: + loggers = {} + logger_fullname = resource['logName'] + logger = loggers.get(logger_fullname) + if logger is None: + logger_name = logger_name_from_path( + logger_fullname, client.project) + logger = loggers[logger_fullname] = client.logger(logger_name) + payload = resource[cls._PAYLOAD_KEY] + insert_id = resource.get('insertId') + timestamp = resource.get('timestamp') + if timestamp is not None: + timestamp = _rfc3339_nanos_to_datetime(timestamp) + labels = resource.get('labels') + severity = resource.get('severity') + http_request = resource.get('httpRequest') + return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) + + +class TextEntry(_BaseEntry): + """Entry created with ``textPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'textPayload' + + +class StructEntry(_BaseEntry): + """Entry created with ``jsonPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'jsonPayload' + + +class ProtobufEntry(_BaseEntry): + """Entry created with ``protoPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'protoPayload' + + def parse_message(self, message): + """Parse payload into a protobuf message. + + Mutates the passed-in ``message`` in place. + + :type message: Protobuf message + :param message: the message to be logged + """ + Parse(json.dumps(self.payload), message) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py new file mode 100644 index 000000000000..f7bb50ee4a80 --- /dev/null +++ b/gcloud/logging/logger.py @@ -0,0 +1,453 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Loggers.""" + +import json + +from google.protobuf.json_format import MessageToJson + + +class Logger(object): + """Loggers represent named targets for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type name: string + :param name: the name of the logger + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the logger (which requires a project). + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of default labels for entries written + via this logger. + """ + def __init__(self, name, client, labels=None): + self.name = name + self._client = client + self.labels = labels + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in logging APIs""" + return 'projects/%s/logs/%s' % (self.project, self.name) + + @property + def path(self): + """URI path for use in logging APIs""" + return '/%s' % (self.full_name,) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def batch(self, client=None): + """Return a batch to use as a context manager. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`Batch` + :returns: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + + def _make_entry_resource(self, text=None, info=None, message=None, + labels=None, insert_id=None, severity=None, + http_request=None): + """Return a log entry resource of the appropriate type. + + Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. + + Only one of ``text``, ``info``, or ``message`` should be passed. + + :type text: string or :class:`NoneType` + :param text: text payload + + :type info: dict or :class:`NoneType` + :param info: struct payload + + :type message: Protobuf message or :class:`NoneType` + :param message: protobuf payload + + :type labels: dict or :class:`NoneType` + :param labels: labels passed in to calling method. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + resource = { + 'logName': self.full_name, + 'resource': {'type': 'global'}, + } + + if text is not None: + resource['textPayload'] = text + + if info is not None: + resource['jsonPayload'] = info + + if message is not None: + as_json_str = MessageToJson(message) + as_json = json.loads(as_json_str) + resource['protoPayload'] = as_json + + if labels is None: + labels = self.labels + + if labels is not None: + resource['labels'] = labels + + if insert_id is not None: + resource['insertId'] = insert_id + + if severity is not None: + resource['severity'] = severity + + if http_request is not None: + resource['httpRequest'] = http_request + + return resource + + def log_text(self, text, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a text message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type text: text + :param text: the log message. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + text=text, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + data = {'entries': [entry_resource]} + + client.connection.api_request( + method='POST', path='/entries:write', data=data) + + def log_struct(self, info, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a structured message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type info: dict + :param info: the log entry information + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + info=info, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + data = {'entries': [entry_resource]} + + client.connection.api_request( + method='POST', path='/entries:write', data=data) + + def log_proto(self, message, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a protobuf message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type message: Protobuf message + :param message: the message to be logged + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + message=message, labels=labels, insert_id=insert_id, + severity=severity, http_request=http_request) + data = {'entries': [entry_resource]} + + client.connection.api_request( + method='POST', path='/entries:write', data=data) + + def delete(self, client=None): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + log_filter = 'logName:%s' % (self.name,) + if filter_ is not None: + filter_ = '%s AND %s' % (filter_, log_filter) + else: + filter_ = log_filter + return self.client.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) + + +class Batch(object): + """Context manager: collect entries to log via a single API call. + + Helper returned by :meth:`Logger.batch` + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger to which entries will be logged. + + :type client: :class:`gcloud.logging.client.Client` + :param client: The client to use. + """ + def __init__(self, logger, client): + self.logger = logger + self.entries = [] + self.client = client + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def log_text(self, text, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a text entry to be logged during :meth:`commit`. + + :type text: string + :param text: the text entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('text', text, labels, insert_id, severity, http_request)) + + def log_struct(self, info, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a struct entry to be logged during :meth:`commit`. + + :type info: dict + :param info: the struct entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('struct', info, labels, insert_id, severity, http_request)) + + def log_proto(self, message, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a protobuf entry to be logged during :meth:`commit`. + + :type message: protobuf message + :param message: the protobuf entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('proto', message, labels, insert_id, severity, http_request)) + + def commit(self, client=None): + """Send saved log entries as a single API call. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if client is None: + client = self.client + + data = { + 'logName': self.logger.path, + 'resource': {'type': 'global'}, + } + if self.logger.labels is not None: + data['labels'] = self.logger.labels + + entries = data['entries'] = [] + for entry_type, entry, labels, iid, severity, http_req in self.entries: + if entry_type == 'text': + info = {'textPayload': entry} + elif entry_type == 'struct': + info = {'jsonPayload': entry} + elif entry_type == 'proto': + as_json_str = MessageToJson(entry) + as_json = json.loads(as_json_str) + info = {'protoPayload': as_json} + else: + raise ValueError('Unknown entry type: %s' % (entry_type,)) + if labels is not None: + info['labels'] = labels + if iid is not None: + info['insertId'] = iid + if severity is not None: + info['severity'] = severity + if http_req is not None: + info['httpRequest'] = http_req + entries.append(info) + + client.connection.api_request( + method='POST', path='/entries:write', data=data) + del self.entries[:] diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py new file mode 100644 index 000000000000..34fa343ff53f --- /dev/null +++ b/gcloud/logging/metric.py @@ -0,0 +1,211 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Metrics.""" + +import re + +from gcloud._helpers import _name_from_project_path +from gcloud.exceptions import NotFound + + +_METRIC_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /metrics/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def _metric_name_from_path(path, project): + """Validate a metric URI path and get the metric name. + + :type path: string + :param path: URI path for a metric API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: Metric name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _METRIC_TEMPLATE) + + +class Metric(object): + """Metrics represent named filters for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type name: string + :param name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + tracked by the metric. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the metric (which requires a project). + + :type description: string + :param description: an optional description of the metric + """ + def __init__(self, name, filter_, client, description=''): + self.name = name + self._client = client + self.filter_ = filter_ + self.description = description + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in metric APIs""" + return 'projects/%s/metrics/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the metric's APIs""" + return '/%s' % (self.full_name,) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a metric given its API representation + + :type resource: dict + :param resource: metric resource representation returned from the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the metric. + + :rtype: :class:`gcloud.logging.metric.Metric` + :returns: Metric parsed from ``resource``. + """ + metric_name = resource['name'] + filter_ = resource['filter'] + description = resource.get('description', '') + return cls(metric_name, filter_, client=client, + description=description) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the metric via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + target = '/projects/%s/metrics' % (self.project,) + data = { + 'name': self.name, + 'filter': self.filter_, + } + if self.description: + data['description'] = self.description + client.connection.api_request(method='POST', path=target, data=data) + + def exists(self, client=None): + """API call: test for the existence of the metric via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local metric configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = client.connection.api_request(method='GET', path=self.path) + self.description = data.get('description', '') + self.filter_ = data['filter'] + + def update(self, client=None): + """API call: update metric configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = {'name': self.name, 'filter': self.filter_} + if self.description: + data['description'] = self.description + client.connection.api_request(method='PUT', path=self.path, data=data) + + def delete(self, client=None): + """API call: delete a metric via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py new file mode 100644 index 000000000000..49f651bfe905 --- /dev/null +++ b/gcloud/logging/sink.py @@ -0,0 +1,211 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Sinks.""" + +import re + +from gcloud._helpers import _name_from_project_path +from gcloud.exceptions import NotFound + + +_SINK_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /sinks/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def _sink_name_from_path(path, project): + """Validate a sink URI path and get the sink name. + :type path: string + :param path: URI path for a sink API request. + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + :rtype: string + :returns: Metric name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _SINK_TEMPLATE) + + +class Sink(object): + """Sinks represent filtered exports for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type name: string + :param name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by the sink. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the sink (which requires a project). + """ + def __init__(self, name, filter_, destination, client): + self.name = name + self.filter_ = filter_ + self.destination = destination + self._client = client + + @property + def client(self): + """Clent bound to the sink.""" + return self._client + + @property + def project(self): + """Project bound to the sink.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in sink APIs""" + return 'projects/%s/sinks/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the sink's APIs""" + return '/%s' % (self.full_name) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a sink given its API representation + + :type resource: dict + :param resource: sink resource representation returned from the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the sink. + + :rtype: :class:`gcloud.logging.sink.Sink` + :returns: Sink parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + sink_name = _sink_name_from_path(resource['name'], client.project) + filter_ = resource['filter'] + destination = resource['destination'] + return cls(sink_name, filter_, destination, client=client) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the sink via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + target = '/projects/%s/sinks' % (self.project,) + data = { + 'name': self.name, + 'filter': self.filter_, + 'destination': self.destination, + } + client.connection.api_request(method='POST', path=target, data=data) + + def exists(self, client=None): + """API call: test for the existence of the sink via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local sink configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = client.connection.api_request(method='GET', path=self.path) + self.filter_ = data['filter'] + self.destination = data['destination'] + + def update(self, client=None): + """API call: update sink configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = { + 'name': self.name, + 'filter': self.filter_, + 'destination': self.destination, + } + client.connection.api_request(method='PUT', path=self.path, data=data) + + def delete(self, client=None): + """API call: delete a sink via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) diff --git a/gcloud/logging/test__helpers.py b/gcloud/logging/test__helpers.py new file mode 100644 index 000000000000..a70d40218186 --- /dev/null +++ b/gcloud/logging/test__helpers.py @@ -0,0 +1,36 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_logger_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging._helpers import logger_name_from_path + return logger_name_from_path(path, project) + + def test_w_simple_name(self): + LOGGER_NAME = 'LOGGER_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH, PROJECT) + self.assertEqual(logger_name, LOGGER_NAME) + + def test_w_name_w_all_extras(self): + LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH, PROJECT) + self.assertEqual(logger_name, LOGGER_NAME) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py new file mode 100644 index 000000000000..2ac27234ad6e --- /dev/null +++ b/gcloud/logging/test_client.py @@ -0,0 +1,431 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestClient(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + SINK_NAME = 'SINK_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + DESTINATION_URI = 'faux.googleapis.com/destination' + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + self.assertEqual(client.project, self.PROJECT) + + def test_logger(self): + from gcloud.logging.logger import Logger + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + logger = client.logger(self.LOGGER_NAME) + self.assertTrue(isinstance(logger, Logger)) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + def test__entry_from_resource_unknown_type(self): + PROJECT = 'PROJECT' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + loggers = {} + with self.assertRaises(ValueError): + client._entry_from_resource({'unknownPayload': {}}, loggers) + + def test_list_entries_defaults(self): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging.entries import TextEntry + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + IID1 = 'IID1' + TEXT = 'TEXT' + SENT = { + 'projectIds': [self.PROJECT], + } + TOKEN = 'TOKEN' + RETURNED = { + 'entries': [{ + 'textPayload': TEXT, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + 'nextPageToken': TOKEN, + } + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = _Connection(RETURNED) + entries, token = client.list_entries() + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertTrue(isinstance(entry, TextEntry)) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, TEXT) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(token, TOKEN) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:list') + self.assertEqual(req['data'], SENT) + + def test_list_entries_explicit(self): + # pylint: disable=too-many-statements + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging import DESCENDING + from gcloud.logging.entries import ProtobufEntry + from gcloud.logging.entries import StructEntry + from gcloud.logging.logger import Logger + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'logName:LOGNAME' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SENT = { + 'projectIds': [PROJECT1, PROJECT2], + 'filter': FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + } + RETURNED = { + 'entries': [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + } + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = _Connection(RETURNED) + entries, token = client.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 2) + + entry = entries[0] + self.assertTrue(isinstance(entry, StructEntry)) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertTrue(isinstance(logger, Logger)) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertTrue(isinstance(entry, ProtobufEntry)) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertTrue(entries[0].logger is entries[1].logger) + + self.assertEqual(token, None) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:list') + self.assertEqual(req['data'], SENT) + + def test_sink(self): + from gcloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + + def test_list_sinks_no_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + + RETURNED = { + 'sinks': [{ + 'name': SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }], + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks() + # Test values are correct. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) + self.assertEqual(req['query_params'], {}) + + def test_list_sinks_with_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + RETURNED = { + 'sinks': [{ + 'name': SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }], + 'nextPageToken': TOKEN2, + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks(SIZE, TOKEN1) + # Test values are correct. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(next_page_token, TOKEN2) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) + self.assertEqual(req['query_params'], + {'pageSize': SIZE, 'pageToken': TOKEN1}) + + def test_list_sinks_missing_key(self): + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + RETURNED = {} + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks() + # Test values are correct. + self.assertEqual(len(sinks), 0) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % PROJECT) + self.assertEqual(req['query_params'], {}) + + def test_metric(self): + from gcloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME, self.FILTER, + description=self.DESCRIPTION) + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client_obj) + self.assertEqual(metric.project, self.PROJECT) + + def test_list_metrics_no_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }], + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics() + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], {}) + + def test_list_metrics_with_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }], + 'nextPageToken': TOKEN2, + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics(SIZE, TOKEN1) + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(next_page_token, TOKEN2) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], + {'pageSize': SIZE, 'pageToken': TOKEN1}) + + def test_list_metrics_missing_key(self): + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + RETURNED = {} + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics() + # Test values are correct. + self.assertEqual(len(metrics), 0) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], {}) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py new file mode 100644 index 000000000000..2939b683305e --- /dev/null +++ b/gcloud/logging/test_connection.py @@ -0,0 +1,44 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.logging.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_default_url(self): + creds = _Credentials() + conn = self._makeOne(creds) + klass = self._getTargetClass() + self.assertEqual(conn.credentials._scopes, klass.SCOPE) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py new file mode 100644 index 000000000000..312e456d63e0 --- /dev/null +++ b/gcloud/logging/test_entries.py @@ -0,0 +1,214 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_BaseEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import _BaseEntry + + class _Dummy(_BaseEntry): + _PAYLOAD_KEY = 'dummyPayload' + + return _Dummy + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = 'PAYLOAD' + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.labels is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = 'PAYLOAD' + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + klass = self._getTargetClass() + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + SEVERITY = 'CRITICAL' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + 'severity': SEVERITY, + 'httpRequest': { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + }, + } + loggers = {} + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertTrue(entry.logger is LOGGER) + + +class TestProtobufEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import ProtobufEntry + return ProtobufEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_parse_message(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + LOGGER = object() + message = Struct(fields={'foo': Value(bool_value=False)}) + with_true = Struct(fields={'foo': Value(bool_value=True)}) + PAYLOAD = json.loads(MessageToJson(with_true)) + entry = self._makeOne(PAYLOAD, LOGGER) + entry.parse_message(message) + self.assertTrue(message.fields['foo']) + + +def _datetime_to_rfc3339_w_nanos(value): + from gcloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) + return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + + +class _Logger(object): + + def __init__(self, name, client): + self.name = name + self.client = client + + +class _Client(object): + + def __init__(self, project): + self.project = project + + def logger(self, name): + return _Logger(name, self) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py new file mode 100644 index 000000000000..069ad2f47d2e --- /dev/null +++ b/gcloud/logging/test_logger.py @@ -0,0 +1,762 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestLogger(unittest2.TestCase): + + PROJECT = 'test-project' + LOGGER_NAME = 'logger-name' + + def _getTargetClass(self): + from gcloud.logging.logger import Logger + return Logger + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, None) + + def test_ctor_explicit(self): + LABELS = {'foo': 'bar', 'baz': 'qux'} + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, LABELS) + + def test_batch_w_bound_client(self): + from gcloud.logging.logger import Batch + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + batch = logger.batch() + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client) + + def test_batch_w_alternate_client(self): + from gcloud.logging.logger import Batch + conn1 = _Connection() + conn2 = _Connection() + client1 = _Client(self.PROJECT, conn1) + client2 = _Client(self.PROJECT, conn2) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + batch = logger.batch(client2) + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client2) + + def test_log_text_w_str_implicit_client(self): + TEXT = 'TEXT' + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_text(TEXT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_text_w_default_labels(self): + TEXT = 'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_text(TEXT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): + TEXT = u'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_text(TEXT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, http_request=REQUEST) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_struct_w_implicit_client(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_struct(STRUCT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_struct_w_default_labels(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_struct(STRUCT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_struct_w_explicit_client_labels_severity_httpreq(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_struct(STRUCT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_proto_w_implicit_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_proto(message) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_proto_w_default_labels(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_proto(message) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_proto_w_explicit_client_labels_severity_httpreq(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_proto(message, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_delete_w_bound_client(self): + PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = self._makeOne(self.LOGGER_NAME, client=CLIENT) + logger.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_delete_w_alternate_client(self): + PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn1 = _Connection({}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + logger = self._makeOne(self.LOGGER_NAME, client=CLIENT1) + logger.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_entries_defaults(self): + LISTED = { + 'projects': None, + 'filter_': 'logName:%s' % (self.LOGGER_NAME), + 'order_by': None, + 'page_size': None, + 'page_token': None, + } + TOKEN = 'TOKEN' + conn = _Connection() + client = _Client(self.PROJECT, conn) + client._token = TOKEN + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries() + self.assertEqual(len(entries), 0) + self.assertEqual(token, TOKEN) + self.assertEqual(client._listed, LISTED) + + def test_list_entries_explicit(self): + from gcloud.logging import DESCENDING + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'resource.type:global' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + LISTED = { + 'projects': ['PROJECT1', 'PROJECT2'], + 'filter_': '%s AND logName:%s' % (FILTER, self.LOGGER_NAME), + 'order_by': DESCENDING, + 'page_size': PAGE_SIZE, + 'page_token': TOKEN, + } + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 0) + self.assertEqual(token, None) + self.assertEqual(client._listed, LISTED) + + +class TestBatch(unittest2.TestCase): + + PROJECT = 'test-project' + + def _getTargetClass(self): + from gcloud.logging.logger import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_defaults(self): + logger = _Logger() + CLIENT = _Client(project=self.PROJECT) + batch = self._makeOne(logger, CLIENT) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is CLIENT) + self.assertEqual(len(batch.entries), 0) + + def test_log_text_defaults(self): + TEXT = 'This is the entry text' + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('text', TEXT, None, None, None, None)]) + + def test_log_text_explicit(self): + TEXT = 'This is the entry text' + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_struct_defaults(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_struct(STRUCT) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('struct', STRUCT, None, None, None, None)]) + + def test_log_struct_explicit(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_proto_defaults(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_proto(message) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('proto', message, None, None, None, None)]) + + def test_log_proto_explicit(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_proto(message, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) + + def test_commit_w_invalid_entry_type(self): + logger = _Logger() + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + batch = self._makeOne(logger, CLIENT) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) + with self.assertRaises(ValueError): + batch.commit() + + def test_commit_w_bound_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + IID1 = 'IID1' + IID2 = 'IID2' + IID3 = 'IID3' + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = _Logger() + SENT = { + 'logName': logger.path, + 'resource': { + 'type': 'global', + }, + 'entries': [ + {'textPayload': TEXT, 'insertId': IID1}, + {'jsonPayload': STRUCT, 'insertId': IID2}, + {'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3}, + ], + } + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT, insert_id=IID1) + batch.log_struct(STRUCT, insert_id=IID2) + batch.log_proto(message, insert_id=IID3) + batch.commit() + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_commit_w_alternate_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + conn1 = _Connection() + conn2 = _Connection({}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + logger = Logger('logger_name', CLIENT1, labels=DEFAULT_LABELS) + SENT = { + 'logName': logger.path, + 'resource': {'type': 'global'}, + 'labels': DEFAULT_LABELS, + 'entries': [ + {'textPayload': TEXT, 'labels': LABELS}, + {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'protoPayload': json.loads(MessageToJson(message)), + 'httpRequest': REQUEST}, + ], + } + batch = self._makeOne(logger, client=CLIENT1) + batch.log_text(TEXT, labels=LABELS) + batch.log_struct(STRUCT, severity=SEVERITY) + batch.log_proto(message, http_request=REQUEST) + batch.commit(client=CLIENT2) + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_context_mgr_success(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = Logger('logger_name', CLIENT, labels=DEFAULT_LABELS) + SENT = { + 'logName': logger.path, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + 'entries': [ + {'textPayload': TEXT, 'httpRequest': REQUEST}, + {'jsonPayload': STRUCT, 'labels': LABELS}, + {'protoPayload': json.loads(MessageToJson(message)), + 'severity': SEVERITY}, + ], + } + batch = self._makeOne(logger, client=CLIENT) + + with batch as other: + other.log_text(TEXT, http_request=REQUEST) + other.log_struct(STRUCT, labels=LABELS) + other.log_proto(message, severity=SEVERITY) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_context_mgr_failure(self): + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + message = Struct(fields={'foo': Value(bool_value=True)}) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = _Logger() + UNSENT = [ + ('text', TEXT, None, IID, None, None), + ('struct', STRUCT, None, None, SEVERITY, None), + ('proto', message, LABELS, None, None, REQUEST), + ] + batch = self._makeOne(logger, client=CLIENT) + + try: + with batch as other: + other.log_text(TEXT, insert_id=IID) + other.log_struct(STRUCT, severity=SEVERITY) + other.log_proto(message, labels=LABELS, http_request=REQUEST) + raise _Bugout() + except _Bugout: + pass + + self.assertEqual(list(batch.entries), UNSENT) + self.assertEqual(len(conn._requested), 0) + + +class _Logger(object): + + labels = None + + def __init__(self, name="NAME", project="PROJECT"): + self.path = '/projects/%s/logs/%s' % (project, name) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response + + +class _Client(object): + + _listed = _token = None + _entries = () + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection + + def list_entries(self, **kw): + self._listed = kw + return self._entries, self._token + + +class _Bugout(Exception): + pass diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py new file mode 100644 index 000000000000..cbba9d1c4252 --- /dev/null +++ b/gcloud/logging/test_metric.py @@ -0,0 +1,316 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test__metric_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging.metric import _metric_name_from_path + return _metric_name_from_path(path, project) + + def test_invalid_path_length(self): + PATH = 'projects/foo' + PROJECT = None + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_path_format(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT = 'PROJECT' + PATH = 'foo/%s/bar/%s' % (PROJECT, METRIC_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_project(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + PATH = 'projects/%s/metrics/%s' % (PROJECT1, METRIC_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT2) + + def test_valid_data(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT = 'PROJECT' + PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + metric_name = self._callFUT(PATH, PROJECT) + self.assertEqual(metric_name, METRIC_NAME) + + +class TestMetric(unittest2.TestCase): + + PROJECT = 'test-project' + METRIC_NAME = 'metric-name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.metric import Metric + return Metric + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, + client=client, description=self.DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertTrue(metric._client is CLIENT) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_from_api_repr_w_description(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': DESCRIPTION, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertTrue(metric._client is CLIENT) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_create_w_bound_client(self): + TARGET = 'projects/%s/metrics' % (self.PROJECT,) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric.create() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) + self.assertEqual(req['data'], RESOURCE) + + def test_create_w_alternate_client(self): + TARGET = 'projects/%s/metrics' % (self.PROJECT,) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + metric.create(client=client2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) + self.assertEqual(req['data'], RESOURCE) + + def test_exists_miss_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + self.assertFalse(metric.exists()) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_exists_hit_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + self.assertTrue(metric.exists(client=CLIENT2)) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': NEW_FILTER, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT, + description=DESCRIPTION) + metric.reload() + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, '') + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': DESCRIPTION, + 'filter': NEW_FILTER, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + metric.reload(client=CLIENT2) + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_update_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + metric.update() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_update_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': DESCRIPTION, + 'filter': self.FILTER, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1, + description=DESCRIPTION) + metric.update(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_delete_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + metric.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_delete_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + metric.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: # pragma: NO COVER + raise NotFound('miss') + else: + return response + + +class _Client(object): + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py new file mode 100644 index 000000000000..103aa0ab6b8c --- /dev/null +++ b/gcloud/logging/test_sink.py @@ -0,0 +1,326 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test__sink_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging.sink import _sink_name_from_path + return _sink_name_from_path(path, project) + + def test_invalid_path_length(self): + PATH = 'projects/foo' + PROJECT = None + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_path_format(self): + SINK_NAME = 'SINK_NAME' + PROJECT = 'PROJECT' + PATH = 'foo/%s/bar/%s' % (PROJECT, SINK_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_project(self): + SINK_NAME = 'SINK_NAME' + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + PATH = 'projects/%s/sinks/%s' % (PROJECT1, SINK_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT2) + + def test_valid_data(self): + SINK_NAME = 'SINK_NAME' + PROJECT = 'PROJECT' + PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + sink_name = self._callFUT(PATH, PROJECT) + self.assertEqual(sink_name, SINK_NAME) + + +class TestSink(unittest2.TestCase): + + PROJECT = 'test-project' + SINK_NAME = 'sink-name' + FILTER = 'logName:syslog AND severity>=INFO' + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging.sink import Sink + return Sink + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is CLIENT) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_w_description(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is CLIENT) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_with_mismatched_project(self): + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + CLIENT = _Client(project=PROJECT1) + FULL = 'projects/%s/sinks/%s' % (PROJECT2, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + self.assertRaises(ValueError, klass.from_api_repr, + RESOURCE, client=CLIENT) + + def test_create_w_bound_client(self): + TARGET = 'projects/%s/sinks' % (self.PROJECT,) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + sink.create() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) + self.assertEqual(req['data'], RESOURCE) + + def test_create_w_alternate_client(self): + TARGET = 'projects/%s/sinks' % (self.PROJECT,) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + sink.create(client=client2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) + self.assertEqual(req['data'], RESOURCE) + + def test_exists_miss_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + self.assertFalse(sink.exists()) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_exists_hit_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + self.assertTrue(sink.exists(client=CLIENT2)) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.reload() + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.reload(client=CLIENT2) + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_update_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.update() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_update_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.update(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_delete_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_delete_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: # pragma: NO COVER + raise NotFound('miss') + else: + return response + + +class _Client(object): + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection diff --git a/gcloud/pubsub/iam.py b/gcloud/pubsub/iam.py index 1ead9d05ce92..5c9b2eeac603 100644 --- a/gcloud/pubsub/iam.py +++ b/gcloud/pubsub/iam.py @@ -16,10 +16,10 @@ OWNER_ROLE = 'roles/owner' """IAM permission implying all rights to an object.""" -WRITER_ROLE = 'roles/writer' +EDITOR_ROLE = 'roles/editor' """IAM permission implying rights to modify an object.""" -READER_ROLE = 'roles/reader' +VIEWER_ROLE = 'roles/viewer' """IAM permission implying rights to access an object without modifying it.""" @@ -40,8 +40,8 @@ def __init__(self, etag=None, version=None): self.etag = etag self.version = version self.owners = set() - self.writers = set() - self.readers = set() + self.editors = set() + self.viewers = set() @staticmethod def user(email): @@ -127,10 +127,10 @@ def from_api_repr(cls, resource): members = set(binding['members']) if role == OWNER_ROLE: policy.owners = members - elif role == WRITER_ROLE: - policy.writers = members - elif role == READER_ROLE: - policy.readers = members + elif role == EDITOR_ROLE: + policy.editors = members + elif role == VIEWER_ROLE: + policy.viewers = members else: raise ValueError('Unknown role: %s' % (role,)) return policy @@ -155,13 +155,13 @@ def to_api_repr(self): bindings.append( {'role': OWNER_ROLE, 'members': sorted(self.owners)}) - if self.writers: + if self.editors: bindings.append( - {'role': WRITER_ROLE, 'members': sorted(self.writers)}) + {'role': EDITOR_ROLE, 'members': sorted(self.editors)}) - if self.readers: + if self.viewers: bindings.append( - {'role': READER_ROLE, 'members': sorted(self.readers)}) + {'role': VIEWER_ROLE, 'members': sorted(self.viewers)}) if bindings: resource['bindings'] = bindings diff --git a/gcloud/pubsub/subscription.py b/gcloud/pubsub/subscription.py index b9e5ef30834b..fc058c948347 100644 --- a/gcloud/pubsub/subscription.py +++ b/gcloud/pubsub/subscription.py @@ -305,8 +305,9 @@ def set_iam_policy(self, policy, client=None): client = self._require_client(client) path = '%s:setIamPolicy' % (self.path,) resource = policy.to_api_repr() + wrapped = {'policy': resource} resp = client.connection.api_request( - method='POST', path=path, data=resource) + method='POST', path=path, data=wrapped) return Policy.from_api_repr(resp) def check_iam_permissions(self, permissions, client=None): diff --git a/gcloud/pubsub/test_iam.py b/gcloud/pubsub/test_iam.py index d6a6e165e715..4aec6ad14130 100644 --- a/gcloud/pubsub/test_iam.py +++ b/gcloud/pubsub/test_iam.py @@ -29,8 +29,8 @@ def test_ctor_defaults(self): self.assertEqual(policy.etag, None) self.assertEqual(policy.version, None) self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.writers), []) - self.assertEqual(list(policy.readers), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) def test_ctor_explicit(self): VERSION = 17 @@ -39,8 +39,8 @@ def test_ctor_explicit(self): self.assertEqual(policy.etag, ETAG) self.assertEqual(policy.version, VERSION) self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.writers), []) - self.assertEqual(list(policy.readers), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) def test_user(self): EMAIL = 'phred@example.com' @@ -83,24 +83,24 @@ def test_from_api_repr_only_etag(self): self.assertEqual(policy.etag, 'ACAB') self.assertEqual(policy.version, None) self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.writers), []) - self.assertEqual(list(policy.readers), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) def test_from_api_repr_complete(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' RESOURCE = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } klass = self._getTargetClass() @@ -108,8 +108,8 @@ def test_from_api_repr_complete(self): self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.writers), [WRITER1, WRITER2]) - self.assertEqual(sorted(policy.readers), [READER1, READER2]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) def test_from_api_repr_bad_role(self): BOGUS1 = 'user:phred@example.com' @@ -134,27 +134,27 @@ def test_to_api_repr_only_etag(self): self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) def test_to_api_repr_full(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' EXPECTED = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } policy = self._makeOne('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) - policy.writers.add(WRITER1) - policy.writers.add(WRITER2) - policy.readers.add(READER1) - policy.readers.add(READER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) self.assertEqual(policy.to_api_repr(), EXPECTED) diff --git a/gcloud/pubsub/test_subscription.py b/gcloud/pubsub/test_subscription.py index d4d3b2746e89..1b8ea750643a 100644 --- a/gcloud/pubsub/test_subscription.py +++ b/gcloud/pubsub/test_subscription.py @@ -485,20 +485,20 @@ def test_delete_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % SUB_PATH) def test_get_iam_policy_w_bound_client(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' POLICY = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } PROJECT = 'PROJECT' @@ -517,8 +517,8 @@ def test_get_iam_policy_w_bound_client(self): self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.writers), [WRITER1, WRITER2]) - self.assertEqual(sorted(policy.readers), [READER1, READER2]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] @@ -547,8 +547,8 @@ def test_get_iam_policy_w_alternate_client(self): self.assertEqual(policy.etag, 'ACAB') self.assertEqual(policy.version, None) self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.writers), []) - self.assertEqual(sorted(policy.readers), []) + self.assertEqual(sorted(policy.editors), []) + self.assertEqual(sorted(policy.viewers), []) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) @@ -557,21 +557,21 @@ def test_get_iam_policy_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % PATH) def test_set_iam_policy_w_bound_client(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE from gcloud.pubsub.iam import Policy OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' POLICY = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } RESPONSE = POLICY.copy() @@ -590,24 +590,24 @@ def test_set_iam_policy_w_bound_client(self): policy = Policy('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) - policy.writers.add(WRITER1) - policy.writers.add(WRITER2) - policy.readers.add(READER1) - policy.readers.add(READER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) new_policy = subscription.set_iam_policy(policy) self.assertEqual(new_policy.etag, 'ABACABAF') self.assertEqual(new_policy.version, 18) self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.writers), [WRITER1, WRITER2]) - self.assertEqual(sorted(new_policy.readers), [READER1, READER2]) + self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/%s' % PATH) - self.assertEqual(req['data'], POLICY) + self.assertEqual(req['data'], {'policy': POLICY}) def test_set_iam_policy_w_alternate_client(self): from gcloud.pubsub.iam import Policy @@ -631,23 +631,24 @@ def test_set_iam_policy_w_alternate_client(self): self.assertEqual(new_policy.etag, 'ACAB') self.assertEqual(new_policy.version, None) self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.writers), []) - self.assertEqual(sorted(new_policy.readers), []) + self.assertEqual(sorted(new_policy.editors), []) + self.assertEqual(sorted(new_policy.viewers), []) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/%s' % PATH) - self.assertEqual(req['data'], {}) + self.assertEqual(req['data'], {'policy': {}}) def test_check_iam_permissions_w_bound_client(self): + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE PROJECT = 'PROJECT' TOPIC_NAME = 'topic_name' SUB_NAME = 'sub_name' PATH = 'projects/%s/subscriptions/%s:testIamPermissions' % ( PROJECT, SUB_NAME) - ROLES = ['roles/reader', 'roles/writer', 'roles/owner'] + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] REQUESTED = { 'permissions': ROLES, } @@ -669,12 +670,13 @@ def test_check_iam_permissions_w_bound_client(self): self.assertEqual(req['data'], REQUESTED) def test_check_iam_permissions_w_alternate_client(self): + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE PROJECT = 'PROJECT' TOPIC_NAME = 'topic_name' SUB_NAME = 'sub_name' PATH = 'projects/%s/subscriptions/%s:testIamPermissions' % ( PROJECT, SUB_NAME) - ROLES = ['roles/reader', 'roles/writer', 'roles/owner'] + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] REQUESTED = { 'permissions': ROLES, } diff --git a/gcloud/pubsub/test_topic.py b/gcloud/pubsub/test_topic.py index de52cddc1c7f..c6967bfe4b72 100644 --- a/gcloud/pubsub/test_topic.py +++ b/gcloud/pubsub/test_topic.py @@ -453,20 +453,20 @@ def test_list_subscriptions_missing_key(self): self.assertEqual(req['query_params'], {}) def test_get_iam_policy_w_bound_client(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' POLICY = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } TOPIC_NAME = 'topic_name' @@ -483,8 +483,8 @@ def test_get_iam_policy_w_bound_client(self): self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.writers), [WRITER1, WRITER2]) - self.assertEqual(sorted(policy.readers), [READER1, READER2]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] @@ -511,8 +511,8 @@ def test_get_iam_policy_w_alternate_client(self): self.assertEqual(policy.etag, 'ACAB') self.assertEqual(policy.version, None) self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.writers), []) - self.assertEqual(sorted(policy.readers), []) + self.assertEqual(sorted(policy.editors), []) + self.assertEqual(sorted(policy.viewers), []) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) @@ -522,20 +522,20 @@ def test_get_iam_policy_w_alternate_client(self): def test_set_iam_policy_w_bound_client(self): from gcloud.pubsub.iam import Policy - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' - WRITER1 = 'domain:google.com' - WRITER2 = 'user:phred@example.com' - READER1 = 'serviceAccount:1234-abcdef@service.example.com' - READER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' POLICY = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': WRITER_ROLE, 'members': [WRITER1, WRITER2]}, - {'role': READER_ROLE, 'members': [READER1, READER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } RESPONSE = POLICY.copy() @@ -552,24 +552,24 @@ def test_set_iam_policy_w_bound_client(self): policy = Policy('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) - policy.writers.add(WRITER1) - policy.writers.add(WRITER2) - policy.readers.add(READER1) - policy.readers.add(READER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) new_policy = topic.set_iam_policy(policy) self.assertEqual(new_policy.etag, 'ABACABAF') self.assertEqual(new_policy.version, 18) self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.writers), [WRITER1, WRITER2]) - self.assertEqual(sorted(new_policy.readers), [READER1, READER2]) + self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/%s' % PATH) - self.assertEqual(req['data'], POLICY) + self.assertEqual(req['data'], {'policy': POLICY}) def test_set_iam_policy_w_alternate_client(self): from gcloud.pubsub.iam import Policy @@ -591,23 +591,23 @@ def test_set_iam_policy_w_alternate_client(self): self.assertEqual(new_policy.etag, 'ACAB') self.assertEqual(new_policy.version, None) self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.writers), []) - self.assertEqual(sorted(new_policy.readers), []) + self.assertEqual(sorted(new_policy.editors), []) + self.assertEqual(sorted(new_policy.viewers), []) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/%s' % PATH) - self.assertEqual(req['data'], {}) + self.assertEqual(req['data'], {'policy': {}}) def test_check_iam_permissions_w_bound_client(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE TOPIC_NAME = 'topic_name' PROJECT = 'PROJECT' PATH = 'projects/%s/topics/%s:testIamPermissions' % ( PROJECT, TOPIC_NAME) - ROLES = [READER_ROLE, WRITER_ROLE, OWNER_ROLE] + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] REQUESTED = { 'permissions': ROLES, } @@ -628,12 +628,12 @@ def test_check_iam_permissions_w_bound_client(self): self.assertEqual(req['data'], REQUESTED) def test_check_iam_permissions_w_alternate_client(self): - from gcloud.pubsub.iam import OWNER_ROLE, WRITER_ROLE, READER_ROLE + from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE TOPIC_NAME = 'topic_name' PROJECT = 'PROJECT' PATH = 'projects/%s/topics/%s:testIamPermissions' % ( PROJECT, TOPIC_NAME) - ROLES = [READER_ROLE, WRITER_ROLE, OWNER_ROLE] + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] REQUESTED = { 'permissions': ROLES, } diff --git a/gcloud/pubsub/topic.py b/gcloud/pubsub/topic.py index 859a84c93d29..5e442afeeb59 100644 --- a/gcloud/pubsub/topic.py +++ b/gcloud/pubsub/topic.py @@ -299,8 +299,9 @@ def set_iam_policy(self, policy, client=None): client = self._require_client(client) path = '%s:setIamPolicy' % (self.path,) resource = policy.to_api_repr() + wrapped = {'policy': resource} resp = client.connection.api_request( - method='POST', path=path, data=resource) + method='POST', path=path, data=wrapped) return Policy.from_api_repr(resp) def check_iam_permissions(self, permissions, client=None): diff --git a/gcloud/test__helpers.py b/gcloud/test__helpers.py index 4f2cb849c89d..00aa5075c731 100644 --- a/gcloud/test__helpers.py +++ b/gcloud/test__helpers.py @@ -411,7 +411,21 @@ def _callFUT(self, dt_str): from gcloud._helpers import _rfc3339_to_datetime return _rfc3339_to_datetime(dt_str) - def test_it(self): + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_microseconds(self): import datetime from gcloud._helpers import UTC @@ -430,6 +444,76 @@ def test_it(self): year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) + def test_w_naonseconds(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( + year, month, day, hour, minute, seconds, nanos) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + +class Test__rfc3339_nanos_to_datetime(unittest2.TestCase): + + def _callFUT(self, dt_str): + from gcloud._helpers import _rfc3339_nanos_to_datetime + return _rfc3339_nanos_to_datetime(dt_str) + + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_microseconds(self): + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_naonseconds(self): + import datetime + from gcloud._helpers import UTC + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + micros = nanos // 1000 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, nanos) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, micros, UTC) + self.assertEqual(result, expected_result) + class Test__datetime_to_rfc3339(unittest2.TestCase): diff --git a/scripts/verify_included_modules.py b/scripts/verify_included_modules.py index eb1a6f3571fe..41bb658ebe23 100644 --- a/scripts/verify_included_modules.py +++ b/scripts/verify_included_modules.py @@ -36,6 +36,7 @@ 'gcloud.datastore.__init__', 'gcloud.dns.__init__', 'gcloud.iterator', + 'gcloud.logging.__init__', 'gcloud.pubsub.__init__', 'gcloud.resource_manager.__init__', 'gcloud.search.__init__', diff --git a/system_tests/logging_.py b/system_tests/logging_.py new file mode 100644 index 000000000000..9e3c02de30e6 --- /dev/null +++ b/system_tests/logging_.py @@ -0,0 +1,304 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import unittest2 + +from gcloud import _helpers +from gcloud.environment_vars import TESTS_PROJECT +from gcloud import logging + + +_MILLIS = 1000 * time.time() +DEFAULT_METRIC_NAME = 'system-tests-metric-%d' % (_MILLIS,) +DEFAULT_SINK_NAME = 'system-tests-sink-%d' % (_MILLIS,) +DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' +DEFAULT_DESCRIPTION = 'System testing' +BUCKET_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) +DATASET_NAME = 'system_testing_dataset_%d' % (_MILLIS,) +TOPIC_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) + + +class Config(object): + """Run-time configuration to be modified at set-up. + + This is a mutable stand-in to allow test set-up to modify + global state. + """ + CLIENT = None + + +def setUpModule(): + _helpers.PROJECT = TESTS_PROJECT + Config.CLIENT = logging.Client() + + +class TestLogging(unittest2.TestCase): + + def setUp(self): + self.to_delete = [] + + def tearDown(self): + from gcloud.exceptions import NotFound + for doomed in self.to_delete: + backoff_intervals = [1, 2, 4, 8] + while True: + try: + doomed.delete() + break + except NotFound: + if backoff_intervals: + time.sleep(backoff_intervals.pop(0)) + else: + raise + + @staticmethod + def _logger_name(): + _millis = 1000 * time.time() + return 'system-tests-logger-%d' % (_millis,) + + def test_log_text(self): + TEXT_PAYLOAD = 'System test: test_log_text' + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_text_w_metadata(self): + TEXT_PAYLOAD = 'System test: test_log_text' + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, + http_request=REQUEST) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], int(STATUS)) + + def test_log_struct(self): + JSON_PAYLOAD = { + 'message': 'System test: test_log_struct', + 'weather': 'partly cloudy', + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_struct(JSON_PAYLOAD) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + + def test_log_struct_w_metadata(self): + JSON_PAYLOAD = { + 'message': 'System test: test_log_struct', + 'weather': 'partly cloudy', + } + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_struct(JSON_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, + http_request=REQUEST) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], int(STATUS)) + + def test_create_metric(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) + + def test_list_metrics(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + before_metrics, _ = Config.CLIENT.list_metrics() + before_names = set(metric.name for metric in before_metrics) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) + after_metrics, _ = Config.CLIENT.list_metrics() + after_names = set(metric.name for metric in after_metrics) + self.assertEqual(after_names - before_names, + set([DEFAULT_METRIC_NAME])) + + def test_reload_metric(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + metric.filter_ = 'logName:other' + metric.description = 'local changes' + metric.reload() + self.assertEqual(metric.filter_, DEFAULT_FILTER) + self.assertEqual(metric.description, DEFAULT_DESCRIPTION) + + def test_update_metric(self): + NEW_FILTER = 'logName:other' + NEW_DESCRIPTION = 'updated' + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + metric.filter_ = NEW_FILTER + metric.description = NEW_DESCRIPTION + metric.update() + after_metrics, _ = Config.CLIENT.list_metrics() + after_info = dict((metric.name, metric) for metric in after_metrics) + after = after_info[DEFAULT_METRIC_NAME] + self.assertEqual(after.filter_, NEW_FILTER) + self.assertEqual(after.description, NEW_DESCRIPTION) + + def _init_storage_bucket(self): + from gcloud import storage + BUCKET_URI = 'storage.googleapis.com/%s' % (BUCKET_NAME,) + + # Create the destination bucket, and set up the ACL to allow + # Cloud Logging to write into it. + storage_client = storage.Client() + bucket = storage_client.create_bucket(BUCKET_NAME) + self.to_delete.append(bucket) + bucket.acl.reload() + logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group.grant_owner() + bucket.acl.add_entity(logs_group) + bucket.acl.save() + + return BUCKET_URI + + def test_create_sink_storage_bucket(self): + uri = self._init_storage_bucket() + + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def test_create_sink_pubsub_topic(self): + from gcloud import pubsub + + # Create the destination topic, and set up the IAM policy to allow + # Cloud Logging to write into it. + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(TOPIC_NAME) + topic.create() + self.to_delete.append(topic) + policy = topic.get_iam_policy() + policy.owners.add(policy.group('cloud-logs@google.com')) + topic.set_iam_policy(policy) + + TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name,) + + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, TOPIC_URI) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def _init_bigquery_dataset(self): + from gcloud import bigquery + from gcloud.bigquery.dataset import AccessGrant + DATASET_URI = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( + Config.CLIENT.project, DATASET_NAME,) + + # Create the destination dataset, and set up the ACL to allow + # Cloud Logging to write into it. + bigquery_client = bigquery.Client() + dataset = bigquery_client.dataset(DATASET_NAME) + dataset.create() + self.to_delete.append(dataset) + dataset.reload() + grants = dataset.access_grants + grants.append(AccessGrant( + 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) + dataset.access_grants = grants + dataset.update() + return DATASET_URI + + def test_create_sink_bigquery_dataset(self): + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def test_reload_sink(self): + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + sink.filter_ = 'BOGUS FILTER' + sink.destination = 'BOGUS DESTINATION' + sink.reload() + self.assertEqual(sink.filter_, DEFAULT_FILTER) + self.assertEqual(sink.destination, uri) + + def test_update_sink(self): + bucket_uri = self._init_storage_bucket() + dataset_uri = self._init_bigquery_dataset() + UPDATED_FILTER = 'logName:syslog' + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, bucket_uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + sink.filter_ = UPDATED_FILTER + sink.destination = dataset_uri + sink.update() + self.assertEqual(sink.filter_, UPDATED_FILTER) + self.assertEqual(sink.destination, dataset_uri) diff --git a/system_tests/pubsub.py b/system_tests/pubsub.py index 956a788c6d36..0adae0a1b0a3 100644 --- a/system_tests/pubsub.py +++ b/system_tests/pubsub.py @@ -89,7 +89,7 @@ def test_create_subscription_defaults(self): self.assertFalse(topic.exists()) topic.create() self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + SUBSCRIPTION_NAME = 'subscribing-now-%d' % (1000 * time.time(),) subscription = topic.subscription(SUBSCRIPTION_NAME) self.assertFalse(subscription.exists()) subscription.create() @@ -103,7 +103,7 @@ def test_create_subscription_w_ack_deadline(self): self.assertFalse(topic.exists()) topic.create() self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + SUBSCRIPTION_NAME = 'subscribing-now-%d' % (1000 * time.time(),) subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) self.assertFalse(subscription.exists()) subscription.create() @@ -142,7 +142,7 @@ def test_message_pull_mode_e2e(self): self.assertFalse(topic.exists()) topic.create() self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + SUBSCRIPTION_NAME = 'subscribing-now-%d' % (1000 * time.time(),) subscription = topic.subscription(SUBSCRIPTION_NAME) self.assertFalse(subscription.exists()) subscription.create() @@ -168,3 +168,42 @@ def _by_timestamp(message): self.assertEqual(message1.attributes['extra'], EXTRA_1) self.assertEqual(message2.data, MESSAGE_2) self.assertEqual(message2.attributes['extra'], EXTRA_2) + + def test_topic_iam_policy(self): + topic_name = 'test-topic-iam-policy-topic-%d' % (1000 * time.time(),) + topic = Config.CLIENT.topic(topic_name) + topic.create() + count = 5 + while count > 0 and not topic.exists(): + time.sleep(1) + count -= 1 + self.assertTrue(topic.exists()) + self.to_delete.append(topic) + policy = topic.get_iam_policy() + policy.viewers.add(policy.user('jjg@google.com')) + new_policy = topic.set_iam_policy(policy) + self.assertEqual(new_policy.viewers, policy.viewers) + + def test_subscription_iam_policy(self): + topic_name = 'test-sub-iam-policy-topic-%d' % (1000 * time.time(),) + topic = Config.CLIENT.topic(topic_name) + topic.create() + count = 5 + while count > 0 and not topic.exists(): + time.sleep(1) + count -= 1 + self.assertTrue(topic.exists()) + self.to_delete.append(topic) + SUB_NAME = 'test-sub-iam-policy-sub-%d' % (1000 * time.time(),) + subscription = topic.subscription(SUB_NAME) + subscription.create() + count = 5 + while count > 0 and not subscription.exists(): + time.sleep(1) + count -= 1 + self.assertTrue(subscription.exists()) + self.to_delete.insert(0, subscription) + policy = subscription.get_iam_policy() + policy.viewers.add(policy.user('jjg@google.com')) + new_policy = subscription.set_iam_policy(policy) + self.assertEqual(new_policy.viewers, policy.viewers) diff --git a/system_tests/run_system_test.py b/system_tests/run_system_test.py index ec0ce4043abf..c880a81bfe2a 100644 --- a/system_tests/run_system_test.py +++ b/system_tests/run_system_test.py @@ -22,6 +22,7 @@ import bigtable import bigtable_happybase import datastore +import logging_ import pubsub import storage import system_test_utils @@ -34,6 +35,7 @@ 'bigquery': bigquery, 'bigtable': bigtable, 'bigtable-happybase': bigtable_happybase, + 'logging': logging_, }