diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index ee718e87f4b6a..1dc202ee8c39a 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -44,7 +44,10 @@ which represents a lookup or search over the rows in the datastore. """ +import os + from gcloud import credentials +from gcloud.datastore import _implicit_environ from gcloud.datastore.connection import Connection @@ -52,6 +55,23 @@ 'https://www.googleapis.com/auth/userinfo.email') """The scope required for authenticating as a Cloud Datastore consumer.""" +_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' + + +def _set_dataset_from_environ(): + """Determines auth settings from local enviroment. + + Currently only supports enviroment variable but will implicitly + support App Engine, Compute Engine and other environments in + the future. + + Local environment variable used is: + - GCLOUD_DATASET_ID + """ + local_dataset_id = os.getenv(_DATASET_ENV_VAR_NAME) + if local_dataset_id is not None: + _implicit_environ.DATASET = get_dataset(local_dataset_id) + def get_connection(): """Shortcut method to establish a connection to the Cloud Datastore. @@ -97,3 +117,58 @@ def get_dataset(dataset_id): """ connection = get_connection() return connection.dataset(dataset_id) + + +def _require_dataset(): + """Convenience method to ensure DATASET is set. + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: A dataset based on the current environment. + :raises: :class:`EnvironmentError` if DATASET is not set. + """ + if _implicit_environ.DATASET is None: + raise EnvironmentError('Dataset could not be implied.') + return _implicit_environ.DATASET + + +def get_entity(key): + """Retrieves entity from implicit dataset, along with its attributes. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: The name of the item to retrieve. + + :rtype: :class:`gcloud.datastore.entity.Entity` or ``None`` + :return: The requested entity, or ``None`` if there was no match found. + """ + return _require_dataset().get_entity(key) + + +def get_entities(keys): + """Retrieves entities from implied dataset, along with their attributes. + + :type keys: list of :class:`gcloud.datastore.key.Key` + :param keys: The name of the item to retrieve. + + :rtype: list of :class:`gcloud.datastore.entity.Entity` + :return: The requested entities. + """ + return _require_dataset().get_entities(keys) + + +def allocate_ids(incomplete_key, num_ids): + """Allocates a list of IDs from a partial key. + + :type incomplete_key: A :class:`gcloud.datastore.key.Key` + :param incomplete_key: The partial key to use as base for allocated IDs. + + :type num_ids: A :class:`int`. + :param num_ids: The number of IDs to allocate. + + :rtype: list of :class:`gcloud.datastore.key.Key` + :return: The (complete) keys allocated with `incomplete_key` as root. + """ + return _require_dataset().allocate_ids(incomplete_key, num_ids) + + +# Set DATASET if it can be implied from the environment. +_set_dataset_from_environ() diff --git a/gcloud/datastore/_implicit_environ.py b/gcloud/datastore/_implicit_environ.py new file mode 100644 index 0000000000000..0880678318200 --- /dev/null +++ b/gcloud/datastore/_implicit_environ.py @@ -0,0 +1,24 @@ +"""Module to provide implicit behavior based on enviroment. + +Acts as a mutable namespace to allow the datastore package to +imply the current dataset from the enviroment. + +Also provides a base class for classes in the `datastore` package +which could utilize the implicit enviroment. +""" + + +DATASET = None +"""Module global to allow persistent implied dataset from enviroment.""" + + +class _DatastoreBase(object): + """Base for all classes in the datastore package. + + Uses the implicit DATASET object as a default dataset attached + to the instances being created. Stores the dataset passed in + on the protected (i.e. non-public) attribute `_dataset`. + """ + + def __init__(self, dataset=None): + self._dataset = dataset or DATASET diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 6411d3ee64f61..3d208d4a2f3c5 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -14,6 +14,7 @@ """Class for representing a single entity in the Cloud Datastore.""" +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key @@ -95,7 +96,10 @@ class Entity(dict): def __init__(self, dataset=None, kind=None, exclude_from_indexes=()): super(Entity, self).__init__() - self._dataset = dataset + # DJH: Need to decide if this inherits from object/dict. Notice that + # `Entity` objects are False-y even if they have protected + # variables set. + self._dataset = dataset or _implicit_environ.DATASET if kind: self._key = Key().kind(kind) else: diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 6528087715efc..e992c854c8532 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -16,12 +16,13 @@ import base64 +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers from gcloud.datastore.key import Key -class Query(object): +class Query(_implicit_environ._DatastoreBase): """A Query against the Cloud Datastore. This class serves as an abstraction for creating a query over data @@ -66,7 +67,7 @@ class Query(object): """Mapping of operator strings and their protobuf equivalents.""" def __init__(self, kind=None, dataset=None, namespace=None): - self._dataset = dataset + super(Query, self).__init__(dataset=dataset) self._namespace = namespace self._pb = datastore_pb.Query() self._cursor = self._more_results = None diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index e258cda050034..54369e06e8459 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -35,6 +35,47 @@ def test_it(self): self.assertTrue(client._get_app_default_called) +class Test__set_dataset_from_environ(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore import _set_dataset_from_environ + return _set_dataset_from_environ() + + def _test_with_environ(self, environ, expected_result): + import os + from gcloud._testing import _Monkey + from gcloud import datastore + from gcloud.datastore import _implicit_environ + + # Check the environment is unset. + self.assertEqual(_implicit_environ.DATASET, None) + + def custom_getenv(key): + return environ.get(key) + + def custom_get_dataset(dataset_id): + return dataset_id + + with _Monkey(os, getenv=custom_getenv): + with _Monkey(datastore, get_dataset=custom_get_dataset): + self._callFUT() + + self.assertEqual(_implicit_environ.DATASET, expected_result) + + def test_set_from_env_var(self): + from gcloud.datastore import _DATASET_ENV_VAR_NAME + + # Make a custom getenv function to Monkey. + DATASET = 'dataset' + VALUES = { + _DATASET_ENV_VAR_NAME: DATASET, + } + self._test_with_environ(VALUES, DATASET) + + def test_no_env_var_set(self): + self._test_with_environ({}, None) + + class Test_get_dataset(unittest2.TestCase): def _callFUT(self, dataset_id): @@ -56,3 +97,104 @@ def test_it(self): self.assertTrue(isinstance(found.connection(), Connection)) self.assertEqual(found.id(), DATASET_ID) self.assertTrue(client._get_app_default_called) + + +class Test_implicit_behavior(unittest2.TestCase): + + def test__require_dataset(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + original_dataset = _implicit_environ.DATASET + + try: + _implicit_environ.DATASET = None + self.assertRaises(EnvironmentError, + gcloud.datastore._require_dataset) + NEW_DATASET = object() + _implicit_environ.DATASET = NEW_DATASET + self.assertEqual(gcloud.datastore._require_dataset(), NEW_DATASET) + finally: + _implicit_environ.DATASET = original_dataset + + def test_get_entity(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + DUMMY_KEY = object() + DUMMY_VAL = object() + CUSTOM_DATASET[DUMMY_KEY] = DUMMY_VAL + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + result = gcloud.datastore.get_entity(DUMMY_KEY) + self.assertTrue(result is DUMMY_VAL) + + def test_get_entities(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + DUMMY_KEYS = [object(), object()] + DUMMY_VALS = [object(), object()] + for key, val in zip(DUMMY_KEYS, DUMMY_VALS): + CUSTOM_DATASET[key] = val + + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + result = gcloud.datastore.get_entities(DUMMY_KEYS) + self.assertTrue(result == DUMMY_VALS) + + def test_allocate_ids(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.key import Key + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + INCOMPLETE_KEY = Key() + NUM_IDS = 2 + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + result = gcloud.datastore.allocate_ids(INCOMPLETE_KEY, NUM_IDS) + + # Check the IDs returned. + self.assertEqual([key.id() for key in result], range(1, NUM_IDS + 1)) + + def test_set_DATASET(self): + import os + from gcloud._testing import _Monkey + from gcloud.test_credentials import _Client + from gcloud import credentials + from gcloud.datastore import _implicit_environ + + # Make custom client for doing auth. Have to fake auth since we + # can't monkey patch `datastore.get_dataset` while reloading the + # `datastore.__init__` module. + client = _Client() + + # Fake auth variables. + DATASET = 'dataset' + + # Make a custom getenv function to Monkey. + VALUES = { + 'GCLOUD_DATASET_ID': DATASET, + } + + def custom_getenv(key): + return VALUES.get(key) + + # Perform the import again with our test patches. + with _Monkey(credentials, client=client): + with _Monkey(os, getenv=custom_getenv): + import gcloud.datastore + reload(gcloud.datastore) + + # Check that the DATASET was correctly implied from the environ. + implicit_dataset = _implicit_environ.DATASET + self.assertEqual(implicit_dataset.id(), DATASET) + # Check that the credentials on the implicit DATASET was set on the + # fake client. + cnxn_credentials = implicit_dataset.connection().credentials + self.assertTrue(cnxn_credentials is client._signed) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 955eebc8eeca4..8f919630e9b2e 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -227,7 +227,7 @@ def test_allocate_ids(self): DATASET = self._makeOne(DATASET_ID, connection=CONNECTION) result = DATASET.allocate_ids(INCOMPLETE_KEY, NUM_IDS) - # Check the IDs returned match _PathElementProto. + # Check the IDs returned match. self.assertEqual([key._id for key in result], range(NUM_IDS)) # Check connection is called correctly. diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index c036720d42417..c62c3156e02fa 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -23,8 +23,10 @@ class TestEntity(unittest2.TestCase): def _getTargetClass(self): + from gcloud.datastore import _implicit_environ from gcloud.datastore.entity import Entity + _implicit_environ.DATASET = None return Entity def _makeOne(self, dataset=_MARKER, kind=_KIND, exclude_from_indexes=()): @@ -265,6 +267,13 @@ def __init__(self, connection=None): super(_Dataset, self).__init__() self._connection = connection + def __bool__(self): + # Make sure the objects are Truth-y since an empty + # dict with _connection set will still be False-y. + return True + + __nonzero__ = __bool__ + def id(self): return _DATASET_ID @@ -274,6 +283,12 @@ def connection(self): def get_entity(self, key): return self.get(key) + def get_entities(self, keys): + return [self.get(key) for key in keys] + + def allocate_ids(self, incomplete_key, num_ids): + return [incomplete_key.id(i + 1) for i in range(num_ids)] + class _Connection(object): _transaction = _saved = _deleted = None diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 70db552aaa7a0..d3727c3f0bc0b 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -20,8 +20,11 @@ class Test_entity_from_protobuf(unittest2.TestCase): _MARKER = object() def _callFUT(self, val, dataset=_MARKER): + from gcloud.datastore import _implicit_environ from gcloud.datastore.helpers import entity_from_protobuf + _implicit_environ.DATASET = None + if dataset is self._MARKER: return entity_from_protobuf(val) diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 4b02b69253665..db9e2ac252eb4 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -18,8 +18,10 @@ class TestQuery(unittest2.TestCase): def _getTargetClass(self): + from gcloud.datastore import _implicit_environ from gcloud.datastore.query import Query + _implicit_environ.DATASET = None return Query def _makeOne(self, kind=None, dataset=None, namespace=None): diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 7d39ef1429eb1..b849a74b140bc 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -38,6 +38,15 @@ def test_ctor(self): self.assertEqual(len(xact._auto_id_entities), 0) self.assertTrue(xact.connection() is connection) + def test_ctor_with_env(self): + SENTINEL_VAL = object() + + from gcloud.datastore import _implicit_environ + _implicit_environ.DATASET = SENTINEL_VAL + + transaction = self._makeOne(dataset=None) + self.assertEqual(transaction.dataset(), SENTINEL_VAL) + def test_add_auto_id_entity(self): entity = _Entity() _DATASET = 'DATASET' diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 67de81b7af3c1..6ae0846a83523 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -14,11 +14,12 @@ """Create / interact with gcloud datastore transactions.""" +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers -class Transaction(object): +class Transaction(_implicit_environ._DatastoreBase): """An abstraction representing datastore Transactions. Transactions can be used to build up a bulk mutuation as well as @@ -125,8 +126,9 @@ class Transaction(object): :param dataset: The dataset to which this :class:`Transaction` belongs. """ - def __init__(self, dataset): - self._dataset = dataset + def __init__(self, dataset=None): + super(Transaction, self).__init__(dataset=dataset) + # If self._dataset is None, using this transaction will fail. self._id = None self._mutation = datastore_pb.Mutation() self._auto_id_entities = [] diff --git a/regression/datastore.py b/regression/datastore.py index c8670ae32892e..4af474554e751 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -17,23 +17,20 @@ import unittest2 from gcloud import datastore +datastore._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' +datastore._set_dataset_from_environ() # This assumes the command is being run via tox hence the # repository root is the current directory. from regression import populate_datastore -from regression import regression_utils class TestDatastore(unittest2.TestCase): - @classmethod - def setUpClass(cls): - cls.dataset = regression_utils.get_dataset() - def setUp(self): self.case_entities_to_delete = [] def tearDown(self): - with self.dataset.transaction(): + with datastore.transaction.Transaction(): for entity in self.case_entities_to_delete: entity.delete() @@ -42,15 +39,17 @@ class TestDatastoreAllocateIDs(TestDatastore): def test_allocate_ids(self): incomplete_key = datastore.key.Key(path=[{'kind': 'Kind'}]) - allocated_keys = self.dataset.allocate_ids(incomplete_key, 10) - self.assertEqual(len(allocated_keys), 10) + num_ids = 10 + allocated_keys = datastore.allocate_ids(incomplete_key, num_ids) + self.assertEqual(len(allocated_keys), num_ids) unique_ids = set() for key in allocated_keys: unique_ids.add(key.id()) - self.assertFalse(key.is_partial()) + self.assertEqual(key.name(), None) + self.assertNotEqual(key.id(), None) - self.assertEqual(len(unique_ids), 10) + self.assertEqual(len(unique_ids), num_ids) class TestDatastoreSave(TestDatastore): @@ -65,8 +64,8 @@ def _get_post(self, name=None, key_id=None, post_content=None): 'wordCount': 400, 'rating': 5.0, } - # Create an entity with the given content in our dataset. - entity = self.dataset.entity(kind='Post') + # Create an entity with the given content. + entity = datastore.entity.Entity(kind='Post') entity.update(post_content) # Update the entity key. @@ -91,7 +90,7 @@ def _generic_test_post(self, name=None, key_id=None): self.assertEqual(entity.key().name(), name) if key_id is not None: self.assertEqual(entity.key().id(), key_id) - retrieved_entity = self.dataset.get_entity(entity.key()) + retrieved_entity = datastore.get_entity(entity.key()) # Check the keys are the same. self.assertEqual(retrieved_entity.key().path(), entity.key().path()) self.assertEqual(retrieved_entity.key().namespace(), @@ -112,7 +111,7 @@ def test_post_with_generated_id(self): self._generic_test_post() def test_save_multiple(self): - with self.dataset.transaction(): + with datastore.transaction.Transaction(): entity1 = self._get_post() entity1.save() # Register entity to be deleted. @@ -133,11 +132,11 @@ def test_save_multiple(self): self.case_entities_to_delete.append(entity2) keys = [entity1.key(), entity2.key()] - matches = self.dataset.get_entities(keys) + matches = datastore.get_entities(keys) self.assertEqual(len(matches), 2) def test_empty_kind(self): - posts = self.dataset.query('Post').limit(2).fetch() + posts = datastore.query.Query(kind='Post').limit(2).fetch() self.assertEqual(posts, []) @@ -145,14 +144,14 @@ class TestDatastoreSaveKeys(TestDatastore): def test_save_key_self_reference(self): key = datastore.key.Key.from_path('Person', 'name') - entity = self.dataset.entity(kind=None).key(key) + entity = datastore.entity.Entity(kind=None).key(key) entity['fullName'] = u'Full name' entity['linkedTo'] = key # Self reference. entity.save() self.case_entities_to_delete.append(entity) - query = self.dataset.query('Person').filter( + query = datastore.query.Query(kind='Person').filter( 'linkedTo', '=', key).limit(2) stored_persons = query.fetch() @@ -174,7 +173,8 @@ def setUpClass(cls): path=[populate_datastore.ANCESTOR]) def _base_query(self): - return self.dataset.query('Character').ancestor(self.ANCESTOR_KEY) + return datastore.query.Query(kind='Character').ancestor( + self.ANCESTOR_KEY) def test_limit_queries(self): limit = 5 @@ -344,17 +344,17 @@ class TestDatastoreTransaction(TestDatastore): def test_transaction(self): key = datastore.key.Key.from_path('Company', 'Google') - entity = self.dataset.entity(kind=None).key(key) + entity = datastore.entity.Entity(kind=None).key(key) entity['url'] = u'www.google.com' - with self.dataset.transaction(): - retrieved_entity = self.dataset.get_entity(key) + with datastore.transaction.Transaction(): + retrieved_entity = datastore.get_entity(key) if retrieved_entity is None: entity.save() self.case_entities_to_delete.append(entity) # This will always return after the transaction. - retrieved_entity = self.dataset.get_entity(key) + retrieved_entity = datastore.get_entity(key) retrieved_dict = dict(retrieved_entity.items()) entity_dict = dict(entity.items()) self.assertEqual(retrieved_dict, entity_dict)