diff --git a/gcloud/_helpers.py b/gcloud/_helpers.py index c6267888be36..a2c4289c1e59 100644 --- a/gcloud/_helpers.py +++ b/gcloud/_helpers.py @@ -26,6 +26,10 @@ from threading import local as Local from google.protobuf import timestamp_pb2 +try: + from google.appengine.api import app_identity +except ImportError: + app_identity = None import six from six.moves.http_client import HTTPConnection from six.moves import configparser @@ -33,11 +37,6 @@ from gcloud.environment_vars import PROJECT from gcloud.environment_vars import CREDENTIALS -try: - from google.appengine.api import app_identity -except ImportError: - app_identity = None - _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' @@ -77,8 +76,9 @@ def push(self, resource): def pop(self): """Pop a resource from our stack. - :raises: IndexError if the stack is empty. + :rtype: object :returns: the top-most resource, after removing it. + :raises IndexError: if the stack is empty. """ return self._stack.pop() @@ -86,6 +86,7 @@ def pop(self): def top(self): """Get the top-most resource + :rtype: object :returns: the top-most item, or None if the stack is empty. """ if len(self._stack) > 0: @@ -141,8 +142,7 @@ def _ensure_tuple_or_list(arg_name, tuple_or_list): :rtype: list of str :returns: The ``tuple_or_list`` passed in cast to a ``list``. - :raises: class:`TypeError` if the ``tuple_or_list`` is not a tuple or - list. + :raises TypeError: if the ``tuple_or_list`` is not a tuple or list. """ if not isinstance(tuple_or_list, (tuple, list)): raise TypeError('Expected %s to be a tuple or list. ' @@ -392,6 +392,8 @@ def _rfc3339_nanos_to_datetime(dt_str): :rtype: :class:`datetime.datetime` :returns: The datetime object created from the string. + :raises ValueError: If the timestamp does not match the RFC 3339 + regular expression. """ with_nanos = _RFC3339_NANOS.match(dt_str) if with_nanos is None: @@ -439,8 +441,7 @@ def _to_bytes(value, encoding='ascii'): :rtype: str / bytes :returns: The original value converted to bytes (if unicode) or as passed in if it started out as bytes. - :raises: :class:`TypeError ` if the value - could not be converted to bytes. + :raises TypeError: if the value could not be converted to bytes. """ result = (value.encode(encoding) if isinstance(value, six.text_type) else value) @@ -460,8 +461,7 @@ def _bytes_to_unicode(value): :returns: The original value converted to unicode (if bytes) or as passed in if it started out as unicode. - :raises: :class:`ValueError` if the value could not be converted to - unicode. + :raises ValueError: if the value could not be converted to unicode. """ result = (value.decode('utf-8') if isinstance(value, six.binary_type) else value) @@ -522,9 +522,9 @@ def _name_from_project_path(path, project, template): :rtype: str :returns: Name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. + :raises ValueError: if the ``path`` is ill-formed or if the project from + the ``path`` does not agree with the ``project`` + passed in. """ if isinstance(template, str): template = re.compile(template) diff --git a/gcloud/bigquery/dataset.py b/gcloud/bigquery/dataset.py index 397dbe244353..a8f7e18f4631 100644 --- a/gcloud/bigquery/dataset.py +++ b/gcloud/bigquery/dataset.py @@ -432,6 +432,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the dataset. """ client = self._require_client(client) diff --git a/gcloud/bigquery/job.py b/gcloud/bigquery/job.py index d9643594b66a..7febfd06c60b 100644 --- a/gcloud/bigquery/job.py +++ b/gcloud/bigquery/job.py @@ -319,6 +319,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the job. """ client = self._require_client(client) diff --git a/gcloud/bigquery/table.py b/gcloud/bigquery/table.py index 7bd7f818ee8d..ec6f7a45bf0c 100644 --- a/gcloud/bigquery/table.py +++ b/gcloud/bigquery/table.py @@ -461,6 +461,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the table. """ client = self._require_client(client) diff --git a/gcloud/bigtable/happybase/connection.py b/gcloud/bigtable/happybase/connection.py index 3780763cd033..86df4f3e78ee 100644 --- a/gcloud/bigtable/happybase/connection.py +++ b/gcloud/bigtable/happybase/connection.py @@ -78,8 +78,8 @@ def _get_instance(timeout=None): :rtype: :class:`gcloud.bigtable.instance.Instance` :returns: The unique instance owned by the project inferred from the environment. - :raises: :class:`ValueError ` if there is a failed - location or any number of instances other than one. + :raises ValueError: if there is a failed location or any number of + instances other than one. """ client_kwargs = {'admin': True} if timeout is not None: @@ -189,9 +189,8 @@ def _handle_legacy_args(arguments_dict): :type arguments_dict: dict :param arguments_dict: Unused keyword arguments. - :raises: :class:`TypeError ` if a keyword other - than ``host``, ``port``, ``compat``, ``transport`` or - ``protocol`` is used. + :raises TypeError: if a keyword other than ``host``, ``port``, + ``compat``, ``transport`` or ``protocol`` is used. """ common_args = _LEGACY_ARGS.intersection(six.iterkeys(arguments_dict)) if common_args: @@ -329,10 +328,12 @@ def create_table(self, name, families): * :class:`dict` * :class:`.GarbageCollectionRule` - :raises: :class:`TypeError ` if ``families`` is - not a dictionary, - :class:`ValueError ` if ``families`` - has no entries + :raises TypeError: If ``families`` is not a dictionary. + :raises ValueError: If ``families`` has no entries. + :raises AlreadyExists: If creation fails due to an already + existing table. + :raises NetworkError: If creation fails for a reason other than + table exists. """ if not isinstance(families, dict): raise TypeError('families arg must be a dictionary') diff --git a/gcloud/bigtable/happybase/pool.py b/gcloud/bigtable/happybase/pool.py index 1ed22cdd6c84..f670065fb049 100644 --- a/gcloud/bigtable/happybase/pool.py +++ b/gcloud/bigtable/happybase/pool.py @@ -113,12 +113,13 @@ def connection(self, timeout=None): If ``timeout`` is omitted, this method waits forever for a connection to become available from the local queue. + Yields an active :class:`Connection <.happybase.connection.Connection>` + from the pool. + :type timeout: int :param timeout: (Optional) Time (in seconds) to wait for a connection to open. - :rtype: :class:`Connection <.happybase.connection.Connection>` - :returns: An active connection from the pool. :raises: :class:`NoConnectionsAvailable` if no connection can be retrieved from the pool before the ``timeout`` (only if a timeout is specified). diff --git a/gcloud/connection.py b/gcloud/connection.py index b7518d020afc..1a96086a35a6 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -324,6 +324,9 @@ def api_request(self, method, path, query_params=None, initialization of the object at a later time. :raises: Exception if the response code is not 200 OK. + :rtype: dict or str + :returns: The API response payload, either as a raw string or + a dictionary if the response is valid JSON. """ url = self.build_api_url(path=path, query_params=query_params, api_base_url=api_base_url, diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py index fc8b36b46910..a83af078f4d4 100644 --- a/gcloud/datastore/client.py +++ b/gcloud/datastore/client.py @@ -366,7 +366,7 @@ def delete(self, key): :type key: :class:`gcloud.datastore.key.Key` :param key: The key to be deleted from the datastore. """ - return self.delete_multi(keys=[key]) + self.delete_multi(keys=[key]) def delete_multi(self, keys): """Delete keys from the Cloud Datastore. diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 63a82adf19ff..de030bc04470 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -112,10 +112,13 @@ def _rpc(self, project, method, request_pb, response_pb_cls): :type request_pb: :class:`google.protobuf.message.Message` instance :param request_pb: the protobuf instance representing the request. - :type response_pb_cls: A :class:`google.protobuf.message.Message' + :type response_pb_cls: A :class:`google.protobuf.message.Message` subclass. :param response_pb_cls: The class used to unmarshall the response protobuf. + + :rtype: :class:`google.protobuf.message.Message` + :returns: The RPC message parsed from the response. """ response = self._request(project=project, method=method, data=request_pb.SerializeToString()) @@ -142,6 +145,9 @@ def build_api_url(self, project, method, base_url=None, :type api_version: string :param api_version: The version of the API to connect to. You shouldn't have to provide this. + + :rtype: str + :returns: The API URL created. """ return self.API_URL_TEMPLATE.format( api_base=(base_url or self.api_base_url), @@ -322,9 +328,9 @@ def commit(self, project, request, transaction_id): This method will mutate ``request`` before using it. :rtype: tuple - :returns': The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key - that was completed in the commit. + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. """ if transaction_id: request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL @@ -415,9 +421,9 @@ def _parse_commit_response(commit_response_pb): :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple - :returns': The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key - that was completed in the commit. + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. """ mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 0d5ce4b18bcb..7021d3d7e7f5 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -131,6 +131,7 @@ def exclude_from_indexes(self): """Names of fields which are *not* to be indexed for this entity. :rtype: sequence of field names + :returns: The set of fields excluded from indexes. """ return frozenset(self._exclude_from_indexes) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 6b0ff82772ab..e13f7a51b039 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -296,6 +296,7 @@ def _pb_attr_value(val): bool, float, integer, string :param val: The value to be scrutinized. + :rtype: tuple :returns: A tuple of the attribute name and proper value type. """ @@ -341,6 +342,7 @@ def _get_value_from_value_pb(value_pb): :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` :param value_pb: The Value Protobuf. + :rtype: object :returns: The value provided by the Protobuf. :raises: :class:`ValueError ` if no value type has been set. diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index a6ce955cee38..2b2a7928221a 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -101,6 +101,7 @@ def project(self): """Get the project for this Query. :rtype: str + :returns: The project for the query. """ return self._project or self._client.project @@ -128,6 +129,7 @@ def kind(self): """Get the Kind of the Query. :rtype: string + :returns: The kind for the query. """ return self._kind @@ -155,6 +157,7 @@ def ancestor(self): """The ancestor key for the query. :rtype: Key or None + :returns: The ancestor for the query. """ return self._ancestor @@ -179,6 +182,7 @@ def filters(self): """Filters set on the query. :rtype: sequence of (property_name, operator, value) tuples. + :returns: The filters set on the query. """ return self._filters[:] @@ -267,6 +271,7 @@ def order(self): """Names of fields used to sort query results. :rtype: sequence of string + :returns: The order(s) set on the query. """ return self._order[:] @@ -291,6 +296,7 @@ def distinct_on(self): """Names of fields used to group query results. :rtype: sequence of string + :returns: The "distinct on" fields set on the query. """ return self._distinct_on[:] @@ -338,6 +344,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, If not supplied, uses the query's value. :rtype: :class:`Iterator` + :returns: The iterator for the query. :raises: ValueError if ``connection`` is not passed and no implicit default has been set. """ @@ -400,6 +407,7 @@ def next_page(self): to iterate on the current Iterator. :rtype: tuple, (entities, more_results, cursor) + :returns: The next page of results. """ pb = _pb_from_query(self._query) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index dc78c7ba99f9..c3ee420a1d36 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -112,6 +112,7 @@ def current(self): returns None. :rtype: :class:`gcloud.datastore.transaction.Transaction` or None + :returns: The current transaction (if any are active). """ top = super(Transaction, self).current() if isinstance(top, Transaction): diff --git a/gcloud/dns/changes.py b/gcloud/dns/changes.py index e3e05e723397..58ab3bfe8bf2 100644 --- a/gcloud/dns/changes.py +++ b/gcloud/dns/changes.py @@ -229,6 +229,9 @@ def exists(self, client=None): :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current zone. + + :rtype: bool + :returns: Boolean indicating existence of the changes. """ client = self._require_client(client) try: diff --git a/gcloud/dns/zone.py b/gcloud/dns/zone.py index 2a7df03da6ea..d6e9c569b3db 100644 --- a/gcloud/dns/zone.py +++ b/gcloud/dns/zone.py @@ -267,6 +267,9 @@ def exists(self, client=None): :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current zone. + + :rtype: bool + :returns: Boolean indicating existence of the managed zone. """ client = self._require_client(client) diff --git a/gcloud/iterator.py b/gcloud/iterator.py index f62d28578ea5..14fdd905f300 100644 --- a/gcloud/iterator.py +++ b/gcloud/iterator.py @@ -136,9 +136,6 @@ def get_items_from_response(self, response): :type response: dict :param response: The response of asking for the next page of items. - - :rtype: iterable - :returns: Items that the iterator should yield. """ raise NotImplementedError diff --git a/gcloud/logging/_gax.py b/gcloud/logging/_gax.py index 3f34645f3588..5b70e2abf61a 100644 --- a/gcloud/logging/_gax.py +++ b/gcloud/logging/_gax.py @@ -201,6 +201,10 @@ def sink_get(self, project, sink_name): :type sink_name: string :param sink_name: the name of the sink + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/sinks/%s' % (project, sink_name) @@ -228,6 +232,10 @@ def sink_update(self, project, sink_name, filter_, destination): :type destination: string :param destination: destination URI for the entries exported by the sink. + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/sinks/%s' % (project, sink_name) @@ -336,6 +344,10 @@ def metric_get(self, project, metric_name): :type metric_name: string :param metric_name: the name of the metric + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/metrics/%s' % (project, metric_name) @@ -362,6 +374,10 @@ def metric_update(self, project, metric_name, filter_, description): :type description: string :param description: description of the metric. + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/metrics/%s' % (project, metric_name) diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py index fa60e181b3fd..6cad3f5ed03f 100644 --- a/gcloud/logging/connection.py +++ b/gcloud/logging/connection.py @@ -253,6 +253,9 @@ def sink_get(self, project, sink_name): :type sink_name: string :param sink_name: the name of the sink + + :rtype: dict + :returns: The JSON sink object returned from the API. """ target = '/projects/%s/sinks/%s' % (project, sink_name) return self._connection.api_request(method='GET', path=target) @@ -388,6 +391,9 @@ def metric_get(self, project, metric_name): :type metric_name: string :param metric_name: the name of the metric + + :rtype: dict + :returns: The JSON metric object returned from the API. """ target = '/projects/%s/metrics/%s' % (project, metric_name) return self._connection.api_request(method='GET', path=target) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index ad2d4b70ca77..0c781c73c9d4 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -118,6 +118,9 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type http_request: dict or :class:`NoneType` :param http_request: (optional) info about HTTP request associated with the entry + + :rtype: dict + :returns: The JSON resource created. """ resource = { 'logName': self.full_name, diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index b05269e39ac5..b3a1f282a5fb 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -121,6 +121,9 @@ def exists(self, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. + + :rtype: bool + :returns: Boolean indicating existence of the metric. """ client = self._require_client(client) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 07a6dba2a0d0..9cde122d5851 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -125,6 +125,9 @@ def exists(self, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. + + :rtype: bool + :returns: Boolean indicating existence of the sink. """ client = self._require_client(client) diff --git a/gcloud/monitoring/client.py b/gcloud/monitoring/client.py index 25e729c5fc9c..efe47c23e538 100644 --- a/gcloud/monitoring/client.py +++ b/gcloud/monitoring/client.py @@ -180,6 +180,9 @@ def metric_descriptor(self, type_, :type display_name: string :param display_name: An optional concise name for the metric. + + :rtype: :class:`MetricDescriptor` + :returns: The metric descriptor created with the passed-in arguments. """ return MetricDescriptor( self, type_, diff --git a/gcloud/monitoring/query.py b/gcloud/monitoring/query.py index 44e7f917de87..372959f53e9f 100644 --- a/gcloud/monitoring/query.py +++ b/gcloud/monitoring/query.py @@ -409,6 +409,10 @@ def reduce(self, cross_series_reducer, *group_by_fields): def iter(self, headers_only=False, page_size=None): """Yield all time series objects selected by the query. + The generator returned iterates over + :class:`~gcloud.monitoring.timeseries.TimeSeries` objects + containing points ordered from oldest to newest. + Note that the :class:`Query` object itself is an iterable, such that the following are equivalent:: @@ -428,9 +432,6 @@ def iter(self, headers_only=False, page_size=None): points to return per page. This can be used to control how far the iterator reads ahead. - :rtype: iterator over :class:`~gcloud.monitoring.timeseries.TimeSeries` - :returns: Time series objects, containing points ordered from oldest - to newest. :raises: :exc:`ValueError` if the query time interval has not been specified. """ @@ -481,8 +482,8 @@ def _build_query_params(self, headers_only=False, page_size=None, page_token=None): """Yield key-value pairs for the URL query string. - We use a series of key-value pairs instead of a ``dict`` to allow for - repeated fields. + We use a series of key-value pairs (suitable for passing to + ``urlencode``) instead of a ``dict`` to allow for repeated fields. :type headers_only: boolean :param headers_only: @@ -494,10 +495,6 @@ def _build_query_params(self, headers_only=False, :type page_token: string or None :param page_token: A token to continue the retrieval. - - :rtype: iterator over tuples - :returns: - Key-value pairs suitable for passing to ``urlencode``. """ yield 'filter', self.filter diff --git a/gcloud/pubsub/_gax.py b/gcloud/pubsub/_gax.py index 28ac6c23e294..9f9e8ef7eee6 100644 --- a/gcloud/pubsub/_gax.py +++ b/gcloud/pubsub/_gax.py @@ -133,9 +133,6 @@ def topic_delete(self, topic_path): :type topic_path: string :param topic_path: fully-qualified path of the new topic, in format ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. """ try: self._gax_api.delete_topic(topic_path) diff --git a/gcloud/pubsub/client.py b/gcloud/pubsub/client.py index 24c51f4697c2..bb610e3fd68e 100644 --- a/gcloud/pubsub/client.py +++ b/gcloud/pubsub/client.py @@ -24,6 +24,7 @@ from gcloud.pubsub.subscription import Subscription from gcloud.pubsub.topic import Topic +# pylint: disable=ungrouped-imports try: from google.pubsub.v1.publisher_api import ( PublisherApi as GeneratedPublisherAPI) @@ -37,6 +38,7 @@ GeneratedSubscriberAPI = GAXSubscriberAPI = None else: _HAVE_GAX = True +# pylint: enable=ungrouped-imports _USE_GAX = _HAVE_GAX and (os.environ.get('GCLOUD_ENABLE_GAX') is not None) diff --git a/gcloud/pubsub/message.py b/gcloud/pubsub/message.py index b309950a88d5..3ed97a33e095 100644 --- a/gcloud/pubsub/message.py +++ b/gcloud/pubsub/message.py @@ -81,6 +81,9 @@ def from_api_repr(cls, api_repr): :type api_repr: dict or None :param api_repr: The API representation of the message + + :rtype: :class:`Message` + :returns: The message created from the response. """ data = base64.b64decode(api_repr.get('data', b'')) instance = cls( diff --git a/gcloud/pubsub/subscription.py b/gcloud/pubsub/subscription.py index e2050fb06211..83493d529295 100644 --- a/gcloud/pubsub/subscription.py +++ b/gcloud/pubsub/subscription.py @@ -193,6 +193,9 @@ def exists(self, client=None): :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current subscription's topic. + + :rtype: bool + :returns: Boolean indicating existence of the subscription. """ client = self._require_client(client) api = client.subscriber_api diff --git a/gcloud/pubsub/topic.py b/gcloud/pubsub/topic.py index 568434789ac4..c0706649c735 100644 --- a/gcloud/pubsub/topic.py +++ b/gcloud/pubsub/topic.py @@ -82,6 +82,9 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None): :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. + + :rtype: :class:`Subscription` + :returns: The subscription created with the passed in arguments. """ return Subscription(name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint) @@ -165,6 +168,9 @@ def exists(self, client=None): :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. + + :rtype: bool + :returns: Boolean indicating existence of the topic. """ client = self._require_client(client) api = client.publisher_api diff --git a/gcloud/resource_manager/project.py b/gcloud/resource_manager/project.py index bf3c0e01d18b..4624d8a27241 100644 --- a/gcloud/resource_manager/project.py +++ b/gcloud/resource_manager/project.py @@ -73,6 +73,7 @@ def from_api_repr(cls, resource, client): :param client: The Client used with this project. :rtype: :class:`gcloud.resource_manager.project.Project` + :returns: The project created. """ project = cls(project_id=resource['projectId'], client=client) project.set_properties_from_api_repr(resource) @@ -173,6 +174,9 @@ def exists(self, client=None): :data:`NoneType ` :param client: the client to use. If not passed, falls back to the client stored on the current project. + + :rtype: bool + :returns: Boolean indicating existence of the project. """ client = self._require_client(client) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index e55fcf179a5f..538b5969f5b2 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -166,6 +166,9 @@ def _base64_md5hash(buffer_object): :type buffer_object: bytes buffer :param buffer_object: Buffer containing bytes used to compute an MD5 hash (as base64). + + :rtype: str + :returns: A base64 encoded digest of the MD5 hash. """ hash_obj = md5() _write_buffer_to_hash(buffer_object, hash_obj) diff --git a/gcloud/storage/batch.py b/gcloud/storage/batch.py index 92aed69d3d0a..569ea3e5f2de 100644 --- a/gcloud/storage/batch.py +++ b/gcloud/storage/batch.py @@ -303,9 +303,6 @@ def _unpack_batch_response(response, content): :type content: str :param content: Response payload with a batch response. - - :rtype: generator - :returns: A generator of header, payload pairs. """ parser = Parser() message = _generate_faux_mime_message(parser, response, content) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index d4e82b61bdb5..1d79ae794ce8 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -161,9 +161,6 @@ def create(self, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The newly created bucket. """ client = self._require_client(client) query_params = {'project': client.project} @@ -558,14 +555,6 @@ def lifecycle_rules(self): @lifecycle_rules.setter def lifecycle_rules(self, rules): - """Update the lifecycle rules configured for this bucket. - - See: https://cloud.google.com/storage/docs/lifecycle and - https://cloud.google.com/storage/docs/json_api/v1/buckets - - :rtype: list(dict) - :returns: A sequence of mappings describing each lifecycle rule. - """ self._patch_property('lifecycle', {'rule': rules}) location = _scalar_property('location') diff --git a/gcloud/streaming/buffered_stream.py b/gcloud/streaming/buffered_stream.py index bf5dc66550d4..02f1d5888d40 100644 --- a/gcloud/streaming/buffered_stream.py +++ b/gcloud/streaming/buffered_stream.py @@ -42,6 +42,7 @@ def stream_exhausted(self): """Does the stream have bytes remaining beyond the buffer :rtype: boolean + :returns: Boolean indicating if the stream is exhausted. """ return self._stream_at_end @@ -50,6 +51,7 @@ def stream_end_position(self): """Point to which stream was read into the buffer :rtype: integer + :returns: The end-position of the stream. """ return self._end_pos @@ -58,6 +60,7 @@ def _bytes_remaining(self): """Bytes remaining to be read from the buffer :rtype: integer + :returns: The number of bytes remaining. """ return len(self._buffered_data) - self._buffer_pos @@ -66,6 +69,9 @@ def read(self, size=None): :type size: integer or None :param size: How many bytes to read (defaults to all remaining bytes). + + :rtype: str + :returns: The data read from the stream. """ if size is None or size < 0: raise ValueError( diff --git a/gcloud/streaming/exceptions.py b/gcloud/streaming/exceptions.py index 1b3a4f43286a..4ff4b9d44ded 100644 --- a/gcloud/streaming/exceptions.py +++ b/gcloud/streaming/exceptions.py @@ -49,6 +49,7 @@ def from_response(cls, http_response): :param http_response: the response which returned the error :rtype: :class:`HttpError` + :returns: The error created from the response. """ return cls(http_response.info, http_response.content, http_response.request_url) @@ -97,6 +98,7 @@ def from_response(cls, http_response): :param http_response: the response which returned the error :rtype: :class:`RetryAfterError` + :returns: The error created from the response. """ return cls(http_response.info, http_response.content, http_response.request_url, http_response.retry_after) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index d6200694ab8a..6f4db884e343 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -140,6 +140,7 @@ def loggable_body(self): """Request body for logging purposes :rtype: str + :returns: The body to be logged. """ return self.__loggable_body @@ -162,6 +163,7 @@ def body(self): """Request body :rtype: str + :returns: The body of the request. """ return self.__body @@ -226,6 +228,7 @@ def length(self): for responses larger than ``sys.maxint``. :rtype: integer or long + :returns: The length of the response. """ if 'content-encoding' in self.info and 'content-range' in self.info: # httplib2 rewrites content-length in the case of a compressed @@ -243,6 +246,7 @@ def status_code(self): """HTTP status code :rtype: integer + :returns: The response status code. """ return int(self.info['status']) @@ -438,6 +442,7 @@ def get_http(**kwds): :param kwds: keyword arguments to pass to factories. :rtype: :class:`httplib2.Http` (or a workalike) + :returns: The HTTP object created. """ for factory in _HTTP_FACTORIES: http = factory(**kwds) diff --git a/gcloud/streaming/stream_slice.py b/gcloud/streaming/stream_slice.py index ada3c66e2169..52bcd1295a30 100644 --- a/gcloud/streaming/stream_slice.py +++ b/gcloud/streaming/stream_slice.py @@ -38,6 +38,7 @@ def length(self): For 32-bit python2.x, len() cannot exceed a 32-bit number. :rtype: integer + :returns: The max "length" of the stream. """ return self._max_bytes diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 7ef439d67f36..ac02951dd18e 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -76,9 +76,10 @@ def __repr__(self): @property def close_stream(self): - """Should this instance close the stream when deleted + """Should this instance close the stream when deleted. :rtype: boolean + :returns: Boolean indicated if the stream should be closed. """ return self._close_stream @@ -87,6 +88,7 @@ def http(self): """Http instance used to perform requests. :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for requests. """ return self._http @@ -97,6 +99,7 @@ def bytes_http(self): Defaults to :attr:`http`. :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for binary requests. """ return self._bytes_http or self.http @@ -114,6 +117,7 @@ def num_retries(self): """How many retries should the transfer attempt :rtype: integer + :returns: The number of retries allowed. """ return self._num_retries @@ -136,6 +140,7 @@ def stream(self): """Stream to/from which data is downloaded/uploaded. :rtype: file-like object + :returns: The stream that sends/receives data. """ return self._stream @@ -144,6 +149,7 @@ def url(self): """URL to / from which data is downloaded/uploaded. :rtype: string + :returns: The URL where data is sent/received. """ return self._url @@ -170,6 +176,8 @@ def initialized(self): """Has the instance been initialized :rtype: boolean + :returns: Boolean indicating if the current transfer + has been initialized. """ return self.url is not None and self.http is not None @@ -239,6 +247,9 @@ def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the file passed. """ path = os.path.expanduser(filename) if os.path.exists(path) and not overwrite: @@ -263,6 +274,9 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the stream passed. """ return cls(stream, auto_transfer=auto_transfer, total_size=total_size, **kwds) @@ -272,6 +286,7 @@ def progress(self): """Number of bytes have been downloaded. :rtype: integer >= 0 + :returns: The number of downloaded bytes. """ return self._progress @@ -280,6 +295,7 @@ def total_size(self): """Total number of bytes to be downloaded. :rtype: integer or None + :returns: The total number of bytes to download. """ return self._total_size @@ -288,6 +304,7 @@ def encoding(self): """'Content-Encoding' used to transfer the file :rtype: string or None + :returns: The encoding of the downloaded content. """ return self._encoding @@ -431,6 +448,7 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): :type use_chunks: boolean :param use_chunks: If False, ignore :attr:`chunksize`. + :rtype: str :returns: Last byte to use in a 'Range' header, or None. """ end_byte = end @@ -642,6 +660,9 @@ def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the file passed. """ path = os.path.expanduser(filename) if not mime_type: @@ -673,6 +694,9 @@ def from_stream(cls, stream, mime_type, :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the stream passed. """ if mime_type is None: raise ValueError( @@ -685,6 +709,7 @@ def complete(self): """Has the entire stream been uploaded. :rtype: boolean + :returns: Boolean indicated if the upload is complete. """ return self._complete @@ -693,6 +718,7 @@ def mime_type(self): """MIMEtype of the file being uploaded. :rtype: string + :returns: The mime-type of the upload. """ return self._mime_type @@ -701,6 +727,7 @@ def progress(self): """Bytes uploaded so far :rtype: integer + :returns: The amount uploaded so far. """ return self._progress @@ -709,6 +736,7 @@ def strategy(self): """Upload strategy to use :rtype: string or None + :returns: The strategy used to upload the data. """ return self._strategy @@ -733,6 +761,7 @@ def total_size(self): """Total size of the stream to be uploaded. :rtype: integer or None + :returns: The total size to be uploaded. """ return self._total_size @@ -929,6 +958,7 @@ def _get_range_header(response): :param response: response to be queried :rtype: string + :returns: The header used to determine the bytes range. """ # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, # not a response header. If the back-end is actually setting @@ -956,6 +986,9 @@ def initialize_upload(self, http_request, http): :raises: :exc:`ValueError` if the instance has not been configured with a strategy. + :rtype: :class:`~gcloud.streaming.http_wrapper.Response` + :returns: The response if the upload is resumable and auto transfer + is not used. """ if self.strategy is None: raise ValueError( @@ -988,6 +1021,9 @@ def _last_byte(range_header): :type range_header: string :param range_header: 'Range' header value per RFC 2616/7233 + + :rtype: int + :returns: The last byte from a range header. """ _, _, end = range_header.partition('-') return int(end) @@ -1016,7 +1052,10 @@ def stream_file(self, use_chunks=True): :type use_chunks: boolean :param use_chunks: If False, send the stream in a single request. - Otherwise, send it in chunks. + Otherwise, send it in chunks. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response for the final request made. """ if self.strategy != RESUMABLE_UPLOAD: raise ValueError( @@ -1082,12 +1121,15 @@ def _send_media_request(self, request, end): return response def _send_media_body(self, start): - """ Send the entire stream in a single request. + """Send the entire stream in a single request. Helper for :meth:`stream_file`: :type start: integer :param start: start byte of the range. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response from the media upload request. """ self._ensure_initialized() if self.total_size is None: @@ -1115,6 +1157,9 @@ def _send_chunk(self, start): :type start: integer :param start: start byte of the range. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response from the chunked upload request. """ self._ensure_initialized() no_log_body = self.total_size is None diff --git a/scripts/pylintrc_default b/scripts/pylintrc_default index 413ccd72ba41..df64b2784a50 100644 --- a/scripts/pylintrc_default +++ b/scripts/pylintrc_default @@ -92,11 +92,7 @@ load-plugins=pylint.extensions.check_docs # will be detected by our 100% code coverage. # # New opinions in pylint 1.6, enforcing PEP 257. #1968 for eventual fixes -# - catching-non-exception # - missing-raises-doc -# - missing-returns-doc -# - redundant-returns-doc -# - ungrouped-imports disable = maybe-no-member, no-member, @@ -106,11 +102,7 @@ disable = redefined-variable-type, wrong-import-position, no-name-in-module, - catching-non-exception, missing-raises-doc, - missing-returns-doc, - redundant-returns-doc, - ungrouped-imports [REPORTS] diff --git a/system_tests/bigtable.py b/system_tests/bigtable.py index 6933bc60847c..8418b002ed64 100644 --- a/system_tests/bigtable.py +++ b/system_tests/bigtable.py @@ -71,6 +71,9 @@ def _operation_wait(operation, max_attempts=5): :type max_attempts: int :param max_attempts: (Optional) The maximum number of times to check if the operation has finished. Defaults to 5. + + :rtype: bool + :returns: Boolean indicating if the operation finished. """ total_sleep = 0 while not operation.finished(): diff --git a/tox.ini b/tox.ini index 9ce84c03873a..a7c525cb9d78 100644 --- a/tox.ini +++ b/tox.ini @@ -133,7 +133,7 @@ commands = python {toxinidir}/scripts/run_pylint.py deps = pep8 - pylint >= 1.6.3 + pylint >= 1.6.4 unittest2 psutil Sphinx