diff --git a/README.rst b/README.rst index f1f7f996fea7..cbef6a8be88b 100644 --- a/README.rst +++ b/README.rst @@ -65,10 +65,14 @@ with the Cloud Datastore using this Client Library. .. code:: python from gcloud import datastore - dataset = datastore.get_dataset('dataset-id-here') + datastore.set_default_connection() + datastore.set_default_dataset_id() # Then do other things... - query = dataset.query().kind('EntityKind') - entity = dataset.entity('EntityKind') + from gcloud.datastore.entity import Entity + from gcloud.datastore.key import Key + from gcloud.datastore.query import Query + query = Query(kind='EntityKind') + entity = Entity(key=Key('EntityKind')) Google Cloud Storage -------------------- diff --git a/docs/_components/datastore-getting-started.rst b/docs/_components/datastore-getting-started.rst index c53e1b6b3541..68e9da1abcca 100644 --- a/docs/_components/datastore-getting-started.rst +++ b/docs/_components/datastore-getting-started.rst @@ -38,14 +38,18 @@ Add some data to your dataset Open a Python console and... >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('') - >>> dataset.query().fetch() + >>> datastore.set_default_connection() + >>> datastore.set_default_dataset_id('') + >>> from gcloud.datastore.query import Query + >>> list(Query(kind='Person').fetch()) [] - >>> entity = dataset.entity('Person') + >>> from gcloud.datastore.entity import Entity + >>> from gcloud.datastore.key import Key + >>> entity = Entity(key=Key('Person')) >>> entity['name'] = 'Your name' >>> entity['age'] = 25 >>> entity.save() - >>> dataset.query('Person').fetch() + >>> list(Query(kind='Person').fetch()) [] And that's it! diff --git a/docs/_components/datastore-quickstart.rst b/docs/_components/datastore-quickstart.rst index bb53731f515b..7a14a61d320d 100644 --- a/docs/_components/datastore-quickstart.rst +++ b/docs/_components/datastore-quickstart.rst @@ -46,28 +46,22 @@ You can interact with a demo dataset in a Python interactive shell. Start by importing the demo module -and instantiating the demo dataset:: +and initializing the demo settings:: >>> from gcloud.datastore import demo - >>> dataset = demo.get_dataset() + >>> demo.initialize() -Once you have the dataset, +Once you have initialized, you can create entities and save them:: - >>> dataset.query('MyExampleKind').fetch() - [>> entity = dataset.entity('Person') + >>> from gcloud.datastore.entity import Entity + >>> from gcloud.datastore.key import Key + >>> entity = Entity(key=Key('Person')) >>> entity['name'] = 'Your name' >>> entity['age'] = 25 >>> entity.save() - >>> dataset.query('Person').fetch() + >>> from gcloud.datastore.query import Query + >>> list(Query(kind='Person').fetch()) [] -.. note:: - The ``get_dataset`` method is just a shortcut for:: - - >>> from gcloud import datastore - >>> from gcloud.datastore import demo - >>> dataset = datastore.get_dataset(demo.DATASET_ID) - ---- diff --git a/docs/index.rst b/docs/index.rst index e1d0d7c8628a..179ab099389a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -28,8 +28,12 @@ Cloud Datastore .. code-block:: python from gcloud import datastore - dataset = datastore.get_dataset('') - entity = dataset.entity('Person') + datastore.set_default_connection() + datastore.set_default_dataset_id('') + + from gcloud.datastore.entity import Entity + from gcloud.datastore.key import Key + entity = Entity(key=Key('Person')) entity['name'] = 'Your name' entity['age'] = 25 entity.save() diff --git a/gcloud/connection.py b/gcloud/connection.py index a1ac7698a295..b4ae2f6ca76f 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -22,11 +22,8 @@ class Connection(object): """A generic connection to Google Cloud Platform. - Subclasses should understand - only the basic types - in method arguments, - however they should be capable - of returning advanced types. + Subclasses should understand only the basic types in method arguments, + however they should be capable of returning advanced types. """ API_BASE_URL = 'https://www.googleapis.com' @@ -39,8 +36,10 @@ class Connection(object): """The user agent for gcloud-python requests.""" def __init__(self, credentials=None): - """ - :type credentials: :class:`oauth2client.client.OAuth2Credentials` + """Constructor for Connection. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` :param credentials: The OAuth2 Credentials to use for this connection. """ self._http = None @@ -48,8 +47,10 @@ def __init__(self, credentials=None): @property def credentials(self): - """ - :rtype: :class:`oauth2client.client.OAuth2Credentials`, or None + """Getter for current credentials. + + :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` :returns: The credentials object associated with this connection. """ return self._credentials diff --git a/gcloud/credentials.py b/gcloud/credentials.py index 0744b67f1409..47ddb0961510 100644 --- a/gcloud/credentials.py +++ b/gcloud/credentials.py @@ -22,32 +22,32 @@ def get_credentials(): .. note:: You should not need to use this function directly. Instead, use the - helper methods provided in - :func:`gcloud.datastore.__init__.get_connection` and - :func:`gcloud.datastore.__init__.get_dataset` which use this method - under the hood. + helper method :func:`gcloud.datastore.__init__.get_connection` + which uses this method under the hood. Checks environment in order of precedence: - Google App Engine (production and testing) - Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to a file with stored credentials information. - - Stored "well known" file associated with `gcloud` command line tool. + - Stored "well known" file associated with ``gcloud`` command line tool. - Google Compute Engine production environment. The file referred to in GOOGLE_APPLICATION_CREDENTIALS is expected to contain information about credentials that are ready to use. This means either service account information or user account information with - a ready-to-use refresh token: - { { - 'type': 'authorized_user', 'type': 'service_account', - 'client_id': '...', 'client_id': '...', - 'client_secret': '...', OR 'client_email': '...', - 'refresh_token': '..., 'private_key_id': '...', - } 'private_key': '...', - } + a ready-to-use refresh token:: + + { { + 'type': 'authorized_user', 'type': 'service_account', + 'client_id': '...', 'client_id': '...', + 'client_secret': '...', OR 'client_email': '...', + 'refresh_token': '..., 'private_key_id': '...', + } 'private_key': '...', + } + The second of these is simply a JSON key downloaded from the Google APIs console. The first is a close cousin of the "client secrets" JSON file - used by `oauth2client.clientsecrets` but differs in formatting. + used by ``oauth2client.clientsecrets`` but differs in formatting. :rtype: :class:`oauth2client.client.GoogleCredentials`, :class:`oauth2client.appengine.AppAssertionCredentials`, @@ -76,14 +76,14 @@ def get_for_service_account_p12(client_email, private_key_path, scope=None): given to you when you created the service account). This file must be in P12 format. - :type scope: string or tuple of strings + :type scope: string or tuple of string :param scope: The scope against which to authenticate. (Different services require different scopes, check the documentation for which scope is required for the different levels of access to any particular API.) :rtype: :class:`oauth2client.client.SignedJwtAssertionCredentials` - :returns: A new SignedJwtAssertionCredentials instance with the + :returns: A new ``SignedJwtAssertionCredentials`` instance with the needed service account settings. """ return client.SignedJwtAssertionCredentials( diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 9edf1b5230ea..2244723e40eb 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -16,12 +16,17 @@ You'll typically use these to get started with the API: +>>> from gcloud import datastore >>> from gcloud.datastore.entity import Entity >>> from gcloud.datastore.key import Key >>> from gcloud.datastore.query import Query + +>>> datastore.set_default_connection() +>>> datastore.set_default_dataset_id() + >>> key = Key('EntityKind', 1234) >>> entity = Entity(key) ->>> query = Query('your-dataset-id', kind='EntityKind') +>>> query = Query(kind='EntityKind') The main concepts with this API are: @@ -39,6 +44,10 @@ - :class:`gcloud.datastore.query.Query` which represents a lookup or search over the rows in the datastore. + +- :class:`gcloud.datastore.transaction.Transaction` + which represents an all-or-none transaction and enables consistency + when race conditions may occur. """ import os @@ -49,9 +58,9 @@ from gcloud.datastore import helpers -SCOPE = ('https://www.googleapis.com/auth/datastore ', +SCOPE = ('https://www.googleapis.com/auth/datastore', 'https://www.googleapis.com/auth/userinfo.email') -"""The scope required for authenticating as a Cloud Datastore consumer.""" +"""The scopes required for authenticating as a Cloud Datastore consumer.""" _DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' @@ -65,7 +74,7 @@ def set_default_dataset_id(dataset_id=None): Local environment variable used is: - GCLOUD_DATASET_ID - :type dataset_id: :class:`str`. + :type dataset_id: string :param dataset_id: Optional. The dataset ID to use as default. """ if dataset_id is None: @@ -92,9 +101,13 @@ def get_connection(): with the same set of credentials (unlikely): >>> from gcloud import datastore + >>> from gcloud.datastore import Key + >>> connection = datastore.get_connection() - >>> dataset1 = connection.dataset('dataset1') - >>> dataset2 = connection.dataset('dataset2') + >>> key1 = Key('Kind', 1234, dataset_id='dataset1') + >>> key2 = Key('Kind', 1234, dataset_id='dataset2') + >>> entity1 = key1.get(connection=connection) + >>> entity2 = key2.get(connection=connection) :rtype: :class:`gcloud.datastore.connection.Connection` :returns: A connection defined with the proper credentials. @@ -107,12 +120,12 @@ def get_connection(): def _require_dataset_id(dataset_id=None): """Infer a dataset ID from the environment, if not passed explicitly. - :type dataset_id: :class:`str`. + :type dataset_id: string :param dataset_id: Optional. - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: A dataset based on the current environment. - :raises: :class:`EnvironmentError` if ``dataset_id`` is None, + :rtype: string + :returns: A dataset ID based on the current environment. + :raises: :class:`EnvironmentError` if ``dataset_id`` is ``None``, and cannot be inferred from the environment. """ if dataset_id is None: @@ -130,7 +143,7 @@ def _require_connection(connection=None): :rtype: :class:`gcloud.datastore.connection.Connection` :returns: A connection based on the current environment. - :raises: :class:`EnvironmentError` if ``connection`` is None, and + :raises: :class:`EnvironmentError` if ``connection`` is ``None``, and cannot be inferred from the environment. """ if connection is None: @@ -160,7 +173,7 @@ def get_entities(keys, missing=None, deferred=None, :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional. The connection used to connect to datastore. - :type dataset_id: :class:`str`. + :type dataset_id: string :param dataset_id: Optional. The ID of the dataset. :rtype: list of :class:`gcloud.datastore.entity.Entity` @@ -198,18 +211,18 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None): :type incomplete_key: A :class:`gcloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. - :type num_ids: A :class:`int`. + :type num_ids: integer :param num_ids: The number of IDs to allocate. :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional. The connection used to connect to datastore. - :type dataset_id: :class:`str`. + :type dataset_id: string :param dataset_id: Optional. The ID of the dataset. :rtype: list of :class:`gcloud.datastore.key.Key` - :returns: The (complete) keys allocated with `incomplete_key` as root. - :raises: `ValueError` if `incomplete_key` is not a partial key. + :returns: The (complete) keys allocated with ``incomplete_key`` as root. + :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. """ connection = _require_connection(connection) dataset_id = _require_dataset_id(dataset_id) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 08fb94c4b78c..d8d95de64b79 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -14,6 +14,8 @@ """Connections to gcloud datastore API servers.""" +import six + from gcloud import connection from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers @@ -56,8 +58,8 @@ def _request(self, dataset_id, method, data): :rtype: string :returns: The string response content from the API call. - - :raises: Exception if the response code is not 200 OK. + :raises: :class:`six.moves.http_client.HTTPException` if the response + code is not 200 OK. """ headers = { 'Content-Type': 'application/x-protobuf', @@ -68,13 +70,16 @@ def _request(self, dataset_id, method, data): uri=self.build_api_url(dataset_id=dataset_id, method=method), method='POST', headers=headers, body=data) - if headers['status'] != '200': - raise Exception('Request failed. Error was: %s' % content) + status = headers['status'] + if status != '200': + message = ('Request failed with status code %s. ' + 'Error was: %s' % (status, content)) + raise six.moves.http_client.HTTPException(message) return content def _rpc(self, dataset_id, method, request_pb, response_pb_cls): - """ Make a protobuf RPC request. + """Make a protobuf RPC request. :type dataset_id: string :param dataset_id: The ID of the dataset to connect to. This is @@ -84,11 +89,12 @@ def _rpc(self, dataset_id, method, request_pb, response_pb_cls): :param method: The name of the method to invoke. :type request_pb: :class:`google.protobuf.message.Message` instance - :param method: the protobuf instance representing the request. + :param request_pb: the protobuf instance representing the request. - :type response_pb_cls: a :class:`google.protobuf.message.Message' + :type response_pb_cls: A :class:`google.protobuf.message.Message' subclass. - :param method: The class used to unmarshall the response protobuf. + :param response_pb_cls: The class used to unmarshall the response + protobuf. """ response = self._request(dataset_id=dataset_id, method=method, data=request_pb.SerializeToString()) @@ -99,10 +105,8 @@ def build_api_url(cls, dataset_id, method, base_url=None, api_version=None): """Construct the URL for a particular API call. - This method is used internally - to come up with the URL - to use when making RPCs - to the Cloud Datastore API. + This method is used internally to come up with the URL to use when + making RPCs to the Cloud Datastore API. :type dataset_id: string :param dataset_id: The ID of the dataset to connect to. This is @@ -133,7 +137,7 @@ def transaction(self, transaction=connection.Connection._EMPTY): :rtype: :class:`gcloud.datastore.transaction.Transaction`, (getting) or :class:`gcloud.datastore.connection.Connection` (setting) - :returns: the current transaction (getting) or self (setting). + :returns: The current transaction (getting) or self (setting). """ if transaction is self._EMPTY: return self._current_transaction @@ -145,7 +149,7 @@ def mutation(self): """Getter for mutation usable with current connection. :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`. - :returns: the mutation instance associated with the current transaction + :returns: The mutation instance associated with the current transaction (if one exists) or or a new mutation instance. """ if self.transaction(): @@ -163,11 +167,11 @@ def lookup(self, dataset_id, key_pbs, (:class:`gcloud.datastore.datastore_v1_pb2.Key` and :class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used under the hood for methods like - :func:`gcloud.datastore.key.Key.get`: + :meth:`gcloud.datastore.key.Key.get`: >>> from gcloud import datastore >>> from gcloud.datastore.key import Key - >>> connection = datastore.get_connection() + >>> datastore.set_default_connection() >>> key = Key('MyKind', 1234, dataset_id='dataset-id') >>> key.get() @@ -194,7 +198,7 @@ def lookup(self, dataset_id, key_pbs, by the backend as "deferred" will be copied into it. Use only as a keyword param. - :type eventual: bool + :type eventual: boolean :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. If the connection has a current @@ -252,15 +256,18 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False): matching the query. You typically wouldn't use this method directly, in favor of the - :func:`gcloud.datastore.query.Query.fetch` method. + :meth:`gcloud.datastore.query.Query.fetch` method. Under the hood, the :class:`gcloud.datastore.query.Query` class uses this method to fetch data: >>> from gcloud import datastore >>> from gcloud.datastore.query import Query - >>> connection = datastore.get_connection() - >>> query = Query(dataset_id='dataset-id', 'MyKind') + + >>> datastore.set_default_connection() + >>> datastore.set_default_dataset_id() + + >>> query = Query(kind='MyKind') >>> query.add_filter('property', '=', 'val') Using the query's ``fetch_page`` method... @@ -287,11 +294,11 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False): :type namespace: string :param namespace: The namespace over which to run the query. - :type eventual: bool + :type eventual: boolean :param eventual: If False (the default), request ``STRONG`` read - consistency. If True, request ``EVENTUAL`` read - consistency. If the connection has a current - transaction, this value *must* be false. + consistency. If True, request ``EVENTUAL`` read + consistency. If the connection has a current + transaction, this value *must* be false. """ request = datastore_pb.RunQueryRequest() self._set_read_options(request, eventual) @@ -316,8 +323,15 @@ def begin_transaction(self, dataset_id, serializable=False): :type dataset_id: string :param dataset_id: The ID dataset to which the transaction applies. - """ + :type serializable: boolean + :param serializable: Boolean indicating if the isolation level of the + transaction should be SERIALIZABLE (True) or + SNAPSHOT (False). + + :rtype: :class:`.datastore_v1_pb2.BeginTransactionResponse` + :returns': the result protobuf for the begin transaction request. + """ if self.transaction(): raise ValueError('Cannot start a transaction with another already ' 'in progress.') @@ -368,12 +382,12 @@ def rollback(self, dataset_id): Maps the ``DatastoreService.Rollback`` protobuf RPC. - Raises a ``ValueError`` - if the connection isn't currently in a transaction. - :type dataset_id: string :param dataset_id: The ID of the dataset to which the transaction belongs. + + :raises: :class:`ValueError` if the connection isn't currently in a + transaction. """ if not self.transaction() or not self.transaction().id: raise ValueError('No transaction to rollback.') @@ -411,9 +425,9 @@ def save_entity(self, dataset_id, key_pb, properties, """Save an entity to the Cloud Datastore with the provided properties. .. note:: - Any existing properties for the entity identified by 'key_pb' - will be replaced by those passed in 'properties'; properties - not passed in 'properties' no longer be set for the entity. + Any existing properties for the entity identified by ``key_pb`` + will be replaced by those passed in ``properties``; properties + not passed in ``properties`` no longer be set for the entity. :type dataset_id: string :param dataset_id: The ID of the dataset in which to save the entity. @@ -424,13 +438,14 @@ def save_entity(self, dataset_id, key_pb, properties, :type properties: dict :param properties: The properties to store on the entity. - :type exclude_from_indexes: sequence of str + :type exclude_from_indexes: sequence of string :param exclude_from_indexes: Names of properties *not* to be indexed. - :rtype: :class:`tuple` - :returns: The pair (`assigned`, `new_id`) where `assigned` is a boolean - indicating if a new ID has been assigned and `new_id` is - either `None` or an integer that has been assigned. + :rtype: tuple + :returns: The pair (``assigned``, ``new_id``) where ``assigned`` is a + boolean indicating if a new ID has been assigned and + ``new_id`` is either ``None`` or an integer that has been + assigned. """ mutation = self.mutation() key_pb = helpers._prepare_key_for_request(key_pb) @@ -485,7 +500,7 @@ def delete_entities(self, dataset_id, key_pbs): :class:`gcloud.datastore.datastore_v1_pb2.Key` protobufs and not with any of the other abstractions. For example, it's used under the hood in the - :func:`gcloud.datastore.entity.Entity.delete` method. + :meth:`gcloud.datastore.entity.Entity.delete` method. :type dataset_id: string :param dataset_id: The ID of the dataset from which to delete the keys. @@ -494,7 +509,7 @@ def delete_entities(self, dataset_id, key_pbs): :param key_pbs: The keys to delete from the datastore. :rtype: boolean - :returns: `True` + :returns: ``True`` """ mutation = self.mutation() helpers._add_keys_to_request(mutation.delete, key_pbs) @@ -553,7 +568,7 @@ def _set_read_options(self, request, eventual): def _copy_deferred_keys(lookup_request, lookup_response): """Clear requested keys and copy deferred keys back in. - Helper ``Connection.lookup()``. + Helper for ``Connection.lookup()``. """ for old_key in list(lookup_request.key): lookup_request.key.remove(old_key) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index e67af50326e8..b36951c9099b 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -40,7 +40,7 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :method:`gcloud.datastore.key.Key.get` to retrieve an existing entity. + Use :meth:`gcloud.datastore.key.Key.get` to retrieve an existing entity. >>> key.get() @@ -54,7 +54,7 @@ class Entity(dict): And you can convert an entity to a regular Python dictionary with the - `dict` builtin: + ``dict`` builtin: >>> dict(entity) {'age': 20, 'name': 'JJ'} @@ -62,19 +62,19 @@ class Entity(dict): .. note:: When saving an entity to the backend, values which are "text" - ('unicode' in Python2, 'str' in Python3) will be saved using + (``unicode`` in Python2, ``str`` in Python3) will be saved using the 'text_value' field, after being encoded to UTF-8. When retrieved from the back-end, such values will be decoded to "text" - again. Values which are "bytes" ('str' in Python2, 'bytes' in + again. Values which are "bytes" (``str`` in Python2, ``bytes`` in Python3), will be saved using the 'blob_value' field, without any decoding / encoding step. :type key: :class:`gcloud.datastore.key.Key` - :param key: Optional key to be set on entity. Required for save() or - reload(). + :param key: Optional key to be set on entity. Required for :meth:`save()` + or :meth:`reload()`. - :type exclude_from_indexes: `tuple` of :class:`str` - :param exclude_from_indexes: names of fields whose values are not to be + :type exclude_from_indexes: tuple of string + :param exclude_from_indexes: Names of fields whose values are not to be indexed for this entity. """ @@ -109,8 +109,8 @@ def _must_key(self): """Return our key, or raise NoKey if not set. :rtype: :class:`gcloud.datastore.key.Key`. - :returns: our key - :raises: NoKey if key is None + :returns: The entity's key. + :raises: :class:`NoKey` if no key is set. """ if self.key is None: raise NoKey() @@ -149,9 +149,10 @@ def save(self, connection=None): the datastore. .. note:: - Property values which are "text" ('unicode' in Python2, 'str' in + Property values which are "text" (``unicode`` in Python2, ``str`` in Python3) map to 'string_value' in the datastore; values which are - "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. + "bytes" (``str`` in Python2, ``bytes`` in Python3) map to + 'blob_value'. :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional connection used to connect to datastore. diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 747eb586e9ae..110afa546aed 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -166,7 +166,6 @@ def _get_value_from_value_pb(value_pb): :returns: The value provided by the Protobuf. """ - result = None if value_pb.HasField('timestamp_microseconds_value'): microseconds = value_pb.timestamp_microseconds_value @@ -231,7 +230,7 @@ def _set_protobuf_value(value_pb, val): :type value_pb: :class:`gcloud.datastore.datastore_v1_pb2.Value` :param value_pb: The value protobuf to which the value is being assigned. - :type val: `datetime.datetime`, bool, float, integer, string + :type val: `datetime.datetime`, boolean, float, integer, string, :class:`gcloud.datastore.key.Key`, :class:`gcloud.datastore.entity.Entity`, :param val: The value to be assigned. diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 563ec315c579..6d471abc48c6 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -36,8 +36,10 @@ class Key(object): >>> Key('Parent', 'foo', 'Child', 1234) + >>> Key('Child', 1234, parent=parent_key) + - To create a paritial key: + To create a partial key: >>> Key('Parent', 'foo', 'Child') @@ -48,11 +50,11 @@ class Key(object): def __init__(self, *path_args, **kwargs): """Constructor / initializer for a key. - :type path_args: tuple of strings and ints + :type path_args: tuple of string and integer :param path_args: May represent a partial (odd length) or full (even length) key path. - :type namespace: :class:`str` + :type namespace: string :param namespace: A namespace identifier for the key. Can only be passed as a keyword argument. @@ -78,15 +80,15 @@ def __init__(self, *path_args, **kwargs): def _parse_path(path_args): """Parses positional arguments into key path with kinds and IDs. - :type path_args: :class:`tuple` + :type path_args: tuple :param path_args: A tuple from positional arguments. Should be alternating list of kinds (string) and ID/name parts (int or string). - :rtype: list of dict + :rtype: :class:`list` of :class:`dict` :returns: A list of key parts with kind and ID or name set. - :raises: `ValueError` if there are no `path_args`, if one of the - kinds is not a string or if one of the IDs/names is not + :raises: :class:`ValueError` if there are no ``path_args``, if one of + the kinds is not a string or if one of the IDs/names is not a string or an integer. """ if len(path_args) == 0: @@ -122,12 +124,12 @@ def _parse_path(path_args): def _combine_args(self): """Sets protected data by combining raw data set from the constructor. - If a _parent is set, updates the _flat_path and sets the - _namespace and _dataset_id if not already set. + If a ``_parent`` is set, updates the ``_flat_path`` and sets the + ``_namespace`` and ``_dataset_id`` if not already set. - :rtype: list of dict + :rtype: :class:`list` of :class:`dict` :returns: A list of key parts with kind and ID or name set. - :raises: `ValueError` if the parent key is not complete. + :raises: :class:`ValueError` if the parent key is not complete. """ child_path = self._parse_path(self._flat_path) @@ -153,11 +155,11 @@ def _clone(self): """Duplicates the Key. Most attributes are simple types, so don't require copying. Other - attributes like `parent` are long-lived and so we re-use them rather + attributes like ``parent`` are long-lived and so we re-use them rather than creating copies. :rtype: :class:`gcloud.datastore.key.Key` - :returns: A new `Key` instance with the same data as the current one. + :returns: A new ``Key`` instance with the same data as the current one. """ return self.__class__(*self.flat_path, parent=self.parent, dataset_id=self.dataset_id, @@ -166,11 +168,14 @@ def _clone(self): def completed_key(self, id_or_name): """Creates new key from existing partial key by adding final ID/name. + :type id_or_name: string or integer + :param id_or_name: ID or name to be added to the key. + :rtype: :class:`gcloud.datastore.key.Key` - :returns: A new `Key` instance with the same data as the current one + :returns: A new ``Key`` instance with the same data as the current one and an extra ID or name added. - :raises: `ValueError` if the current key is not partial or if - `id_or_name` is not a string or integer. + :raises: :class:`ValueError` if the current key is not partial or if + ``id_or_name`` is not a string or integer. """ if not self.is_partial: raise ValueError('Only a partial key can be completed.') @@ -193,7 +198,7 @@ def to_protobuf(self): """Return a protobuf corresponding to the key. :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key` - :returns: The Protobuf representing the key. + :returns: The protobuf representing the key. """ key = datastore_pb.Key() key.partition_id.dataset_id = self.dataset_id @@ -213,12 +218,12 @@ def to_protobuf(self): return key def get(self, connection=None): - """Retrieve entity corresponding to the curretn key. + """Retrieve entity corresponding to the current key. :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional connection used to connect to datastore. - :rtype: :class:`gcloud.datastore.entity.Entity` or `NoneType` + :rtype: :class:`gcloud.datastore.entity.Entity` or :class:`NoneType` :returns: The requested entity, or ``None`` if there was no match found. """ @@ -253,9 +258,9 @@ def delete(self, connection=None): def is_partial(self): """Boolean indicating if the key has an ID (or name). - :rtype: :class:`bool` - :returns: True if the last element of the key's path does not have - an 'id' or a 'name'. + :rtype: boolean + :returns: ``True`` if the last element of the key's path does not have + an ``id`` or a ``name``. """ return self.id_or_name is None @@ -263,7 +268,7 @@ def is_partial(self): def namespace(self): """Namespace getter. - :rtype: :class:`str` + :rtype: string :returns: The namespace of the current key. """ return self._namespace @@ -274,7 +279,7 @@ def path(self): Returns a copy so that the key remains immutable. - :rtype: :class:`str` + :rtype: :class:`list` of :class:`dict` :returns: The (key) path of the current key. """ return copy.deepcopy(self._path) @@ -283,7 +288,7 @@ def path(self): def flat_path(self): """Getter for the key path as a tuple. - :rtype: :class:`tuple` of string and int + :rtype: tuple of string and integer :returns: The tuple of elements in the path. """ return self._flat_path @@ -292,7 +297,7 @@ def flat_path(self): def kind(self): """Kind getter. Based on the last element of path. - :rtype: :class:`str` + :rtype: string :returns: The kind of the current key. """ return self.path[-1]['kind'] @@ -301,7 +306,7 @@ def kind(self): def id(self): """ID getter. Based on the last element of path. - :rtype: :class:`int` + :rtype: integer :returns: The (integer) ID of the key. """ return self.path[-1].get('id') @@ -310,7 +315,7 @@ def id(self): def name(self): """Name getter. Based on the last element of path. - :rtype: :class:`str` + :rtype: string :returns: The (string) name of the key. """ return self.path[-1].get('name') @@ -319,9 +324,9 @@ def name(self): def id_or_name(self): """Getter. Based on the last element of path. - :rtype: :class:`int` (if 'id') or :class:`str` (if 'name') - :returns: The last element of the key's path if it is either an 'id' - or a 'name'. + :rtype: integer (if ``id``) or string (if ``name``) + :returns: The last element of the key's path if it is either an ``id`` + or a ``name``. """ return self.id or self.name @@ -329,7 +334,7 @@ def id_or_name(self): def dataset_id(self): """Dataset ID getter. - :rtype: :class:`str` + :rtype: string :returns: The key's dataset ID. """ return self._dataset_id @@ -340,10 +345,10 @@ def _make_parent(self): Extracts all but the last element in the key path and creates a new key, while still matching the namespace and the dataset ID. - :rtype: :class:`gcloud.datastore.key.Key` or `NoneType` - :returns: a new `Key` instance, whose path consists of all but the last - element of self's path. If self has only one path element, - returns None. + :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. """ if self.is_partial: parent_args = self.flat_path[:-1] @@ -357,10 +362,10 @@ def _make_parent(self): def parent(self): """The parent of the current key. - :rtype: :class:`gcloud.datastore.key.Key` or `NoneType` - :returns: a new `Key` instance, whose path consists of all but the last - element of self's path. If self has only one path element, - returns None. + :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. """ if self._parent is None: self._parent = self._make_parent() @@ -379,7 +384,8 @@ def _validate_dataset_id(dataset_id, parent): If ``dataset_id`` is unset, attempt to infer the ID from the environment. - :raises: `ValueError` if ``dataset_id`` is None and none can be inferred. + :raises: :class:`ValueError` if ``dataset_id`` is ``None`` and no dataset + can be inferred. """ if parent is None: diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index b19d44e8066f..58b2570a26cc 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -28,31 +28,31 @@ class Query(object): This class serves as an abstraction for creating a query over data stored in the Cloud Datastore. - :type kind: string. + :type kind: string :param kind: The kind to query. - :type dataset_id: str + :type dataset_id: string :param dataset_id: The ID of the dataset to query. If not passed, uses the implicit default. - :type namespace: string or None. + :type namespace: string or None :param namespace: The namespace to which to restrict results. - :type ancestor: :class:`gcloud.datastore.key.Key` or None. + :type ancestor: :class:`gcloud.datastore.key.Key` or None :param ancestor: key of the ancestor to which this query's results are restricted. - :type filters: sequence of (property_name, operator, value) tuples. + :type filters: sequence of (property_name, operator, value) tuples :param filters: property filters applied by this query. - :type projection: sequence of string. + :type projection: sequence of string :param projection: fields returned as part of query results. - :type order: sequence of string. + :type order: sequence of string :param order: field names used to order query results. Prepend '-' to a field name to sort it in descending order. - :type group_by: sequence_of_string. + :type group_by: sequence of string :param group_by: field names used to group query results. :raises: ValueError if ``dataset_id`` is not passed and no implicit @@ -124,7 +124,7 @@ def namespace(self, value): def kind(self): """Get the Kind of the Query. - :rtype: string or :class:`Query` + :rtype: string """ return self._kind @@ -168,8 +168,7 @@ def ancestor(self, value): @ancestor.deleter def ancestor(self): - """Remove the ancestor for the query. - """ + """Remove the ancestor for the query.""" self._ancestor = None @property @@ -204,9 +203,10 @@ def add_filter(self, property_name, operator, value): :type value: integer, string, boolean, float, None, datetime :param value: The value to filter on. - :raises: `ValueError` if `operation` is not one of the specified - values, or if a filter names '__key__' but passes invalid - operator (``==`` is required) or value (a key is required). + :raises: :class:`ValueError` if ``operation`` is not one of the + specified values, or if a filter names ``'__key__'`` but + passes invalid operator (``==`` is required) or value (a key + is required). """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) @@ -226,7 +226,7 @@ def projection(self): """Fields names returned by the query. :rtype: sequence of string - :returns: names of fields in query results. + :returns: Names of fields in query results. """ return self._projection[:] @@ -284,7 +284,7 @@ def group_by(self, value): :type value: string or sequence of strings :param value: Each value is a string giving the name of a - property to use to group results together. + property to use to group results together. """ if isinstance(value, str): value = [value] @@ -320,7 +320,6 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, :param connection: An optional cursor passed through to the iterator. If not supplied, uses the implicit default. - :rtype: :class:`Iterator` :raises: ValueError if ``connection`` is not passed and no implicit default has been set. @@ -336,8 +335,8 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, class Iterator(object): - """Represent the state of a given execution of a Query. - """ + """Represent the state of a given execution of a Query.""" + _NOT_FINISHED = datastore_pb.QueryResultBatch.NOT_FINISHED _FINISHED = ( @@ -359,7 +358,7 @@ def next_page(self): """Fetch a single "page" of query results. Low-level API for fine control: the more convenient API is - to iterate on us. + to iterate on the current Iterator. :rtype: tuple, (entities, more_results, cursor) """ @@ -427,10 +426,10 @@ def _pb_from_query(query): """Convert a Query instance to the corresponding protobuf. :type query: :class:`Query` - :param query: the source query + :param query: The source query. :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Query` - :returns: a protobuf that can be sent to the protobuf API. N.b. that + :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). """ diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 2224c121f8cd..32246a6776de 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -100,15 +100,18 @@ def test__request_w_200(self): self.assertEqual(http._called_with['body'], DATA) def test__request_not_200(self): + import six + DATASET_ID = 'DATASET' METHOD = 'METHOD' DATA = 'DATA' conn = self._makeOne() conn._http = Http({'status': '400'}, 'Bad Request') - with self.assertRaises(Exception) as e: + with self.assertRaises(six.moves.http_client.HTTPException) as e: conn._request(DATASET_ID, METHOD, DATA) - self.assertEqual(str(e.exception), - 'Request failed. Error was: Bad Request') + expected_message = ('Request failed with status code 400. ' + 'Error was: Bad Request') + self.assertEqual(str(e.exception), expected_message) def test__rpc(self): diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 41e3cc575c59..5508313b1d98 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -29,19 +29,21 @@ class Transaction(object): mutation, and execute those within a transaction:: >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> with dataset.transaction() - ... entity1.save() - ... entity2.save() + >>> from gcloud.datastore.transaction import Transaction + + >>> datastore.set_default_connection() + >>> datastore.set_default_dataset_id() + + >>> with Transaction() + ... entity1.save() + ... entity2.save() By default, the transaction is rolled back if the transaction block exits with an error:: - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> with dataset.transaction() as t: - ... do_some_work() - ... raise Exception() # rolls back + >>> with Transaction() as txn: + ... do_some_work() + ... raise SomeException() # rolls back If the transaction block exists without an exception, it will commit by default. @@ -50,8 +52,9 @@ class Transaction(object): entities will not be available at save time! That means, if you try:: - >>> with dataset.transaction(): - ... entity = dataset.entity('Thing').save() + >>> with Transaction(): + ... entity = Entity(key=Key('Thing')) + ... entity.save() ``entity`` won't have a complete Key until the transaction is committed. @@ -59,16 +62,16 @@ class Transaction(object): Once you exit the transaction (or call ``commit()``), the automatically generated ID will be assigned to the entity:: - >>> with dataset.transaction(): - ... entity = dataset.entity('Thing') - ... entity.save() - ... assert entity.key().is_partial # There is no ID on this key. - >>> assert not entity.key().is_partial # There *is* an ID. + >>> with Transaction(): + ... entity = Entity(key=Key('Thing')) + ... entity.save() + ... assert entity.key.is_partial # There is no ID on this key. + >>> assert not entity.key.is_partial # There *is* an ID. .. warning:: If you're using the automatically generated ID functionality, it's important that you only use - :func:`gcloud.datastore.entity.Entity.save` rather than using - :func:`gcloud.datastore.connection.Connection.save_entity` + :meth:`gcloud.datastore.entity.Entity.save` rather than using + :meth:`gcloud.datastore.connection.Connection.save_entity` directly. If you mix the two, the results will have extra IDs generated and @@ -77,16 +80,16 @@ class Transaction(object): If you don't want to use the context manager you can initialize a transaction manually:: - >>> transaction = dataset.transaction() + >>> transaction = Transaction() >>> transaction.begin() - >>> entity = dataset.entity('Thing') + >>> entity = Entity(key=Key('Thing')) >>> entity.save() >>> if error: - ... transaction.rollback() + ... transaction.rollback() ... else: - ... transaction.commit() + ... transaction.commit() For now, this library will enforce a rule of one transaction per connection. That is, If you want to work with two transactions at @@ -95,33 +98,21 @@ class Transaction(object): For example, this is perfectly valid:: - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> with dataset.transaction(): - ... dataset.entity('Thing').save() + >>> with Transaction(): + ... entity = Entity(key=Key('Thing')) + ... entity.save() However, this **wouldn't** be acceptable:: - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> with dataset.transaction(): - ... dataset.entity('Thing').save() - ... with dataset.transaction(): - ... dataset.entity('Thing').save() + >>> with Transaction(): + ... Entity(key=Key('Thing')).save() + ... with Transaction(): + ... Entity(key=Key('Thing')).save() Technically, it looks like the Protobuf API supports this type of - pattern, however it makes the code particularly messy. If you - really need to nest transactions, try:: + pattern, however it makes the code particularly messy. - >>> from gcloud import datastore - >>> dataset1 = datastore.get_dataset('dataset-id1') - >>> dataset2 = datastore.get_dataset('dataset-id2') - >>> with dataset1.transaction(): - ... dataset1.entity('Thing').save() - ... with dataset2.transaction(): - ... dataset2.entity('Thing').save() - - :type dataset_id: :class:`str`. + :type dataset_id: string :param dataset_id: The ID of the dataset. :type connection: :class:`gcloud.datastore.connection.Connection` @@ -147,7 +138,7 @@ def __init__(self, dataset_id=None, connection=None): def dataset_id(self): """Getter for dataset ID in which the transaction will run. - :rtype: :class:`str` + :rtype: string :returns: The dataset ID in which the transaction will run. """ return self._dataset_id diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 9197ca9ab6d8..ebd24910e1c0 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -242,7 +242,7 @@ def has_entity(self, entity): :type entity: :class:`_ACLEntity` :param entity: The entity to check for existence in this ACL. - :rtype: bool + :rtype: boolean :returns: True of the entity exists in the ACL. """ self._ensure_loaded() diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index cae8205d667e..021ee8c47c33 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -256,7 +256,7 @@ def delete(self, force=False): parameter set to true. This will iterate through the bucket's keys and delete the related objects, before deleting the bucket. - :type force: bool + :type force: boolean :param full: If True, empties the bucket's objects then deletes it. :raises: :class:`gcloud.storage.exceptions.NotFound` if the @@ -636,7 +636,7 @@ def versioning_enabled(self, value): See: https://cloud.google.com/storage/docs/object-versioning for details. - :type value: convertible to bool + :type value: convertible to boolean :param value: should versioning be anabled for the bucket? """ self._patch_properties({'versioning': {'enabled': bool(value)}}) @@ -694,11 +694,11 @@ def disable_website(self): def make_public(self, recursive=False, future=False): """Make a bucket public. - :type recursive: bool + :type recursive: boolean :param recursive: If True, this will make all keys inside the bucket public as well. - :type future: bool + :type future: boolean :param future: If True, this will make all objects created in the future public as well. """ diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 5e0d52e8d240..74c2d046e89a 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -302,7 +302,7 @@ def api_request(self, method, path, query_params=None, latest API version supported by gcloud-python. - :type expect_json: bool + :type expect_json: boolean :param expect_json: If True, this method will try to parse the response as JSON and raise an exception if that cannot be done. Default is True. @@ -464,10 +464,10 @@ def delete_bucket(self, bucket, force=False): :type bucket: string or :class:`gcloud.storage.bucket.Bucket` :param bucket: The bucket name (or bucket object) to create. - :type force: bool + :type force: boolean :param full: If True, empties the bucket's objects then deletes it. - :rtype: bool + :rtype: boolean :returns: True if the bucket was deleted. :raises: :class:`gcloud.storage.exceptions.NotFound` if the bucket doesn't exist, or diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index ad6d2ad00217..f59e569c3869 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -78,7 +78,7 @@ def __iter__(self): def has_next_page(self): """Determines whether or not this iterator has more pages. - :rtype: bool + :rtype: boolean :returns: Whether the iterator has more pages or not. """ if self.page_number == 0: diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index baaf7cc8587d..1a6e0ae19f30 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -178,7 +178,7 @@ def generate_signed_url(self, expiration, method='GET'): def exists(self): """Determines whether or not this key exists. - :rtype: bool + :rtype: boolean :returns: True if the key exists in Cloud Storage. """ return self.bucket.get_key(self.name) is not None @@ -289,7 +289,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, :type file_obj: file :param file_obj: A file handle open for reading. - :type rewind: bool + :type rewind: boolean :param rewind: If True, seek to the beginning of the file handle before writing the file to Cloud Storage. diff --git a/run_pylint.py b/run_pylint.py index 67dfd268f282..3b90b5de0571 100644 --- a/run_pylint.py +++ b/run_pylint.py @@ -100,7 +100,7 @@ def valid_filename(filename): def is_production_filename(filename): """Checks if the file contains production code. - :rtype: `bool` + :rtype: boolean :returns: Boolean indicating production status. """ return not ('demo' in filename or 'test' in filename