diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index e69f5b17bc06..88ecf1324ff2 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -23,6 +23,10 @@ class NoKey(RuntimeError): """Exception raised by Entity methods which require a key.""" +class NoDataset(RuntimeError): + """Exception raised by Entity methods which require a dataset.""" + + class Entity(dict): """:type dataset: :class:`gcloud.datastore.dataset.Dataset` :param dataset: The dataset in which this entity belongs. @@ -68,8 +72,9 @@ class Entity(dict): def __init__(self, dataset=None, kind=None): super(Entity, self).__init__() - if dataset and kind: - self._key = Key(dataset=dataset).kind(kind) + self._dataset = dataset + if kind: + self._key = Key().kind(kind) else: self._key = None @@ -86,8 +91,7 @@ def dataset(self): be `None`. It also means that if you change the key on the entity, this will refer to that key's dataset. """ - if self._key: - return self._key.dataset() + return self._dataset def key(self, key=None): """Get or set the :class:`.datastore.key.Key` on the current entity. @@ -127,7 +131,7 @@ def kind(self): return self._key.kind() @classmethod - def from_key(cls, key): + def from_key(cls, key, dataset=None): """Create entity based on :class:`.datastore.key.Key`. .. note:: @@ -140,7 +144,7 @@ def from_key(cls, key): :class:`gcloud.datastore.key.Key`. """ - return cls().key(key) + return cls(dataset).key(key) @classmethod def from_protobuf(cls, pb, dataset=None): @@ -159,8 +163,8 @@ def from_protobuf(cls, pb, dataset=None): # This is here to avoid circular imports. from gcloud.datastore import _helpers - key = Key.from_protobuf(pb.key, dataset=dataset) - entity = cls.from_key(key) + key = Key.from_protobuf(pb.key) + entity = cls.from_key(key, dataset) for property_pb in pb.property: value = _helpers._get_value_from_property_pb(property_pb) @@ -177,9 +181,21 @@ def _must_key(self): :raises: NoKey if key is None """ if self._key is None: - raise NoKey('no key') + raise NoKey() return self._key + @property + def _must_dataset(self): + """Return our dataset, or raise NoDataset if not set. + + :rtype: :class:`gcloud.datastore.key.Key`. + :returns: our key + :raises: NoDataset if key is None + """ + if self._dataset is None: + raise NoDataset() + return self._dataset + def reload(self): """Reloads the contents of this entity from the datastore. @@ -193,7 +209,8 @@ def reload(self): exist only locally. """ key = self._must_key - entity = key.dataset().get_entity(key.to_protobuf()) + dataset = self._must_dataset + entity = dataset.get_entity(key.to_protobuf()) if entity: self.update(entity) @@ -217,7 +234,7 @@ def save(self): :returns: The entity with a possibly updated Key. """ key = self._must_key - dataset = key.dataset() + dataset = self._must_dataset connection = dataset.connection() key_pb = connection.save_entity( dataset_id=dataset.id(), @@ -246,7 +263,7 @@ def delete(self): entity will be deleted. """ key = self._must_key - dataset = key.dataset() + dataset = self._must_dataset dataset.connection().delete_entities( dataset_id=dataset.id(), key_pbs=[key.to_protobuf()], diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index ccc89c2360c9..be28c4db42e7 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -4,7 +4,6 @@ from itertools import izip from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.dataset import Dataset class Key(object): @@ -13,22 +12,23 @@ class Key(object): .. automethod:: __init__ """ - def __init__(self, dataset=None, namespace=None, path=None): + def __init__(self, path=None, namespace=None, dataset_id=None): """Constructor / initializer for a key. - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance for the key. - :type namespace: :class:`str` :param namespace: A namespace identifier for the key. :type path: sequence of dicts :param path: Each dict must have keys 'kind' (a string) and optionally 'name' (a string) or 'id' (an integer). + + :type dataset_id: string + :param dataset: The dataset ID assigned by back-end for the key. + Leave as None for newly-created keys. """ - self._dataset = dataset - self._namespace = namespace self._path = path or [{'kind': ''}] + self._namespace = namespace + self._dataset_id = dataset_id def _clone(self): """Duplicates the Key. @@ -40,12 +40,10 @@ def _clone(self): :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ - clone = copy.deepcopy(self) - clone._dataset = self._dataset # Make a shallow copy of the Dataset. - return clone + return copy.deepcopy(self) @classmethod - def from_protobuf(cls, pb, dataset=None): + def from_protobuf(cls, pb): """Factory method for creating a key based on a protobuf. The protobuf should be one returned from the Cloud Datastore @@ -54,10 +52,6 @@ def from_protobuf(cls, pb, dataset=None): :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` :param pb: The Protobuf representing the key. - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance. If not passed, defaults to an - instance whose ID is derived from pb. - :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ @@ -75,13 +69,10 @@ def from_protobuf(cls, pb, dataset=None): path.append(element_dict) - if not dataset: - dataset = Dataset(id=pb.partition_id.dataset_id) - namespace = pb.partition_id.namespace - else: - namespace = None + dataset_id = pb.partition_id.dataset_id or None + namespace = pb.partition_id.namespace - return cls(dataset, namespace, path) + return cls(path, namespace, dataset_id) def to_protobuf(self): """Return a protobuf corresponding to the key. @@ -91,18 +82,8 @@ def to_protobuf(self): """ key = datastore_pb.Key() - # Technically a dataset is required to do anything with the key, - # but we shouldn't throw a cryptic error if one isn't provided - # in the initializer. - if self.dataset(): - # Apparently 's~' is a prefix for High-Replication and is necessary - # here. Another valid preflix is 'e~' indicating EU datacenters. - dataset_id = self.dataset().id() - if dataset_id: - if dataset_id[:2] not in ['s~', 'e~']: - dataset_id = 's~' + dataset_id - - key.partition_id.dataset_id = dataset_id + if self._dataset_id is not None: + key.partition_id.dataset_id = self._dataset_id if self._namespace: key.partition_id.namespace = self._namespace @@ -161,24 +142,6 @@ def is_partial(self): """ return self.id_or_name() is None - def dataset(self, dataset=None): - """Dataset setter / getter. - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance for the key. - - :rtype: :class:`Key` (for setter); or - :class:`gcloud.datastore.dataset.Dataset` (for getter) - :returns: a new key, cloned from self., with the given dataset - (setter); or self's dataset (getter). - """ - if dataset: - clone = self._clone() - clone._dataset = dataset - return clone - else: - return self._dataset - def namespace(self, namespace=None): """Namespace setter / getter. diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index a2bdfa661678..84ea41ea1ec0 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -32,14 +32,13 @@ def test_datetime_w_zone(self): self.assertEqual(value % 1000000, 4375) def test_key(self): - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = Key(dataset=Dataset(_DATASET), path=_PATH) + key = Key(dataset_id=_DATASET, path=_PATH) name, value = self._callFUT(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf()) @@ -131,7 +130,6 @@ def test_datetime(self): def test_key(self): from gcloud.datastore.datastore_v1_pb2 import Value - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' @@ -139,7 +137,7 @@ def test_key(self): _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] pb = Value() - expected = Key(dataset=Dataset(_DATASET), path=_PATH).to_protobuf() + expected = Key(dataset_id=_DATASET, path=_PATH).to_protobuf() pb.key_value.CopyFrom(expected) found = self._callFUT(pb) self.assertEqual(found.to_protobuf(), expected) @@ -236,7 +234,6 @@ def test_datetime(self): self.assertEqual(value % 1000000, 4375) def test_key(self): - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' @@ -244,7 +241,7 @@ def test_key(self): _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] pb = self._makePB() - key = Key(dataset=Dataset(_DATASET), path=_PATH) + key = Key(dataset_id=_DATASET, path=_PATH) self._callFUT(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf()) diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 60c5c6e3021f..e63aae1e6449 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -380,12 +380,10 @@ def test_run_query_w_namespace_nonempty_result(self): def test_lookup_single_key_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -413,12 +411,10 @@ def test_lookup_single_key_empty_response(self): def test_lookup_single_key_nonempty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() entity = datastore_pb.Entity() entity.key.CopyFrom(key_pb) @@ -451,14 +447,11 @@ def test_lookup_single_key_nonempty_response(self): def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -487,12 +480,10 @@ def test_lookup_multiple_keys_empty_response(self): def test_commit_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -528,15 +519,13 @@ def test_commit_wo_transaction(self): def test_commit_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key class Xact(object): def id(self): return 'xact' DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -573,12 +562,10 @@ def id(self): def test_save_entity_wo_transaction_w_upsert(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -617,14 +604,11 @@ def test_save_entity_wo_transaction_w_upsert(self): def test_save_entity_wo_transaction_w_auto_id(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind'}]).to_protobuf() - updated_key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind'}]).to_protobuf() + updated_key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mr_pb = rsp_pb.mutation_result mr_pb.index_updates = 0 @@ -668,7 +652,6 @@ def test_save_entity_wo_transaction_w_auto_id(self): def test_save_entity_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -677,8 +660,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -691,7 +673,6 @@ def mutation(self): def test_save_entity_w_transaction_nested_entity(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key @@ -703,8 +684,7 @@ def mutation(self): DATASET_ID = 'DATASET' nested = Entity() nested['bar'] = u'Bar' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -717,12 +697,10 @@ def mutation(self): def test_delete_entities_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -757,7 +735,6 @@ def test_delete_entities_wo_transaction(self): def test_delete_entities_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -766,8 +743,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index a734512119d0..588499c2f41a 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -56,7 +56,7 @@ def test_get_entities_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(dataset=dataset, path=[{'kind': 'Kind', 'id': 1234}]) + key = Key(path=[{'kind': 'Kind', 'id': 1234}]) self.assertEqual(dataset.get_entities([key]), []) def test_get_entities_hit(self): @@ -67,6 +67,7 @@ def test_get_entities_hit(self): ID = 1234 PATH = [{'kind': KIND, 'id': ID}] entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = DATASET_ID path_element = entity_pb.key.path_element.add() path_element.kind = KIND path_element.id = ID @@ -75,10 +76,10 @@ def test_get_entities_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(dataset=dataset, path=PATH) + key = Key(path=PATH) result, = dataset.get_entities([key]) key = result.key() - self.assertTrue(key.dataset() is dataset) + self.assertEqual(key._dataset_id, DATASET_ID) self.assertEqual(key.path(), PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') @@ -88,7 +89,7 @@ def test_get_entity_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(dataset=dataset, path=[{'kind': 'Kind', 'id': 1234}]) + key = Key(path=[{'kind': 'Kind', 'id': 1234}]) self.assertEqual(dataset.get_entity(key), None) def test_get_entity_hit(self): @@ -99,6 +100,7 @@ def test_get_entity_hit(self): ID = 1234 PATH = [{'kind': KIND, 'id': ID}] entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = DATASET_ID path_element = entity_pb.key.path_element.add() path_element.kind = KIND path_element.id = ID @@ -107,10 +109,10 @@ def test_get_entity_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(dataset=dataset, path=PATH) + key = Key(path=PATH) result = dataset.get_entity(key) key = result.key() - self.assertTrue(key.dataset() is dataset) + self.assertEqual(key._dataset_id, DATASET_ID) self.assertEqual(key.path(), PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 1d67001b4ab9..4d3675aedee1 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -41,7 +41,7 @@ def test_key_getter(self): entity = self._makeOne() key = entity.key() self.assertIsInstance(key, Key) - self.assertEqual(key.dataset().id(), _DATASET_ID) + self.assertEqual(key._dataset_id, None) self.assertEqual(key.kind(), _KIND) def test_key_setter(self): @@ -50,27 +50,60 @@ def test_key_setter(self): entity.key(key) self.assertTrue(entity.key() is key) - def test_from_key(self): + def test_from_key_wo_dataset(self): + from gcloud.datastore.key import Key + + klass = self._getTargetClass() + key = Key().kind(_KIND).id(_ID) + entity = klass.from_key(key) + self.assertTrue(entity.dataset() is None) + self.assertEqual(entity.kind(), _KIND) + key = entity.key() + self.assertEqual(key.kind(), _KIND) + self.assertEqual(key.id(), _ID) + + def test_from_key_w_dataset(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key klass = self._getTargetClass() dataset = Dataset(_DATASET_ID) - key = Key(dataset=dataset).kind(_KIND).id(_ID) - entity = klass.from_key(key) + key = Key().kind(_KIND).id(_ID) + entity = klass.from_key(key, dataset) self.assertTrue(entity.dataset() is dataset) self.assertEqual(entity.kind(), _KIND) key = entity.key() self.assertEqual(key.kind(), _KIND) self.assertEqual(key.id(), _ID) - def test_from_protobuf(self): + def test_from_protobuf_wo_dataset(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + + entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = _DATASET_ID + entity_pb.key.path_element.add(kind=_KIND, id=_ID) + entity_pb.key.partition_id.dataset_id = _DATASET_ID + prop_pb = entity_pb.property.add() + prop_pb.name = 'foo' + prop_pb.value.string_value = 'Foo' + klass = self._getTargetClass() + entity = klass.from_protobuf(entity_pb) + self.assertTrue(entity.dataset() is None) + self.assertEqual(entity.kind(), _KIND) + self.assertEqual(entity['foo'], 'Foo') + key = entity.key() + self.assertEqual(key._dataset_id, _DATASET_ID) + self.assertEqual(key.kind(), _KIND) + self.assertEqual(key.id(), _ID) + + def test_from_protobuf_w_dataset(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.dataset import Dataset entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) + entity_pb.key.partition_id.dataset_id = _DATASET_ID prop_pb = entity_pb.property.add() prop_pb.name = 'foo' prop_pb.value.string_value = 'Foo' @@ -81,10 +114,22 @@ def test_from_protobuf(self): self.assertEqual(entity.kind(), _KIND) self.assertEqual(entity['foo'], 'Foo') key = entity.key() - self.assertTrue(key.dataset() is dataset) + self.assertEqual(key._dataset_id, _DATASET_ID) self.assertEqual(key.kind(), _KIND) self.assertEqual(key.id(), _ID) + def test__must_key_no_key(self): + from gcloud.datastore.entity import NoKey + + entity = self._makeOne(None, None) + self.assertRaises(NoKey, getattr, entity, '_must_key') + + def test__must_dataset_no_dataset(self): + from gcloud.datastore.entity import NoDataset + + entity = self._makeOne(None, None) + self.assertRaises(NoDataset, getattr, entity, '_must_dataset') + def test_reload_no_key(self): from gcloud.datastore.entity import NoKey @@ -94,8 +139,8 @@ def test_reload_no_key(self): def test_reload_miss(self): dataset = _Dataset() - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' # Does not raise, does not update on miss. @@ -105,8 +150,8 @@ def test_reload_miss(self): def test_reload_hit(self): dataset = _Dataset() dataset['KEY'] = {'foo': 'Bar'} - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.reload() is entity) @@ -122,8 +167,8 @@ def test_save_no_key(self): def test_save_wo_transaction_wo_auto_id_wo_returned_key(self): connection = _Connection() dataset = _Dataset(connection) - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.save() is entity) @@ -136,8 +181,8 @@ def test_save_w_transaction_wo_partial_key(self): connection = _Connection() transaction = connection._transaction = _Transaction() dataset = _Dataset(connection) - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.save() is entity) @@ -151,9 +196,9 @@ def test_save_w_transaction_w_partial_key(self): connection = _Connection() transaction = connection._transaction = _Transaction() dataset = _Dataset(connection) - key = _Key(dataset) + key = _Key() key._partial = True - entity = self._makeOne() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.save() is entity) @@ -171,8 +216,8 @@ def test_save_w_returned_key(self): connection = _Connection() connection._save_result = key_pb dataset = _Dataset(connection) - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.save() is entity) @@ -191,8 +236,8 @@ def test_delete_no_key(self): def test_delete(self): connection = _Connection() dataset = _Dataset(connection) - key = _Key(dataset) - entity = self._makeOne() + key = _Key() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertTrue(entity.delete() is None) @@ -205,9 +250,9 @@ def test___repr___no_key_empty(self): def test___repr___w_key_non_empty(self): connection = _Connection() dataset = _Dataset(connection) - key = _Key(dataset) + key = _Key() key.path('/bar/baz') - entity = self._makeOne() + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' self.assertEqual(repr(entity), "") @@ -219,12 +264,6 @@ class _Key(object): _partial = False _path = None - def __init__(self, dataset): - self._dataset = dataset - - def dataset(self): - return self._dataset - def to_protobuf(self): return self._key diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 9e778758df41..a05e29de7646 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -7,8 +7,8 @@ def _getTargetClass(self): from gcloud.datastore.key import Key return Key - def _makeOne(self, dataset=None, namespace=None, path=None): - return self._getTargetClass()(dataset, namespace, path) + def _makeOne(self, path=None, namespace=None, dataset_id=None): + return self._getTargetClass()(path, namespace, dataset_id) def _makePB(self, dataset_id=None, namespace=None, path=()): from gcloud.datastore.datastore_v1_pb2 import Key @@ -28,72 +28,48 @@ def _makePB(self, dataset_id=None, namespace=None, path=()): def test_ctor_defaults(self): key = self._makeOne() - self.assertEqual(key.dataset(), None) + self.assertEqual(key._dataset_id, None) self.assertEqual(key.namespace(), None) self.assertEqual(key.kind(), '') self.assertEqual(key.path(), [{'kind': ''}]) def test_ctor_explicit(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE, _PATH) - self.assertTrue(key.dataset() is dataset) + key = self._makeOne(_PATH, _NAMESPACE, _DATASET) + self.assertEqual(key._dataset_id, _DATASET) self.assertEqual(key.namespace(), _NAMESPACE) self.assertEqual(key.kind(), _KIND) self.assertEqual(key.path(), _PATH) def test__clone(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE, _PATH) + key = self._makeOne(_PATH, _NAMESPACE, _DATASET) clone = key._clone() - self.assertTrue(clone.dataset() is dataset) + self.assertEqual(clone._dataset_id, _DATASET) self.assertEqual(clone.namespace(), _NAMESPACE) self.assertEqual(clone.kind(), _KIND) self.assertEqual(clone.path(), _PATH) - def test_from_protobuf_empty_path_explicit_dataset(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - dataset = Dataset(_DATASET) - pb = self._makePB() - key = self._getTargetClass().from_protobuf(pb, dataset) - self.assertTrue(key.dataset() is dataset) - self.assertEqual(key.namespace(), None) - self.assertEqual(key.kind(), '') - self.assertEqual(key.path(), [{'kind': ''}]) - - def test_from_protobuf_w_dataset_in_pb(self): + def test_from_protobuf_w_dataset_id_in_pb(self): _DATASET = 'DATASET' pb = self._makePB(_DATASET) key = self._getTargetClass().from_protobuf(pb) - self.assertEqual(key.dataset().id(), _DATASET) + self.assertEqual(key._dataset_id, _DATASET) - def test_from_protobuf_w_namespace_in_pb_wo_dataset_passed(self): + def test_from_protobuf_w_namespace_in_pb(self): _NAMESPACE = 'NAMESPACE' pb = self._makePB(namespace=_NAMESPACE) key = self._getTargetClass().from_protobuf(pb) self.assertEqual(key.namespace(), _NAMESPACE) - def test_from_protobuf_w_namespace_in_pb_w_dataset_passed(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) - pb = self._makePB(namespace=_NAMESPACE) - key = self._getTargetClass().from_protobuf(pb, dataset) - self.assertEqual(key.namespace(), None) - def test_from_protobuf_w_path_in_pb(self): _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' @@ -126,34 +102,9 @@ def test_to_protobuf_defaults(self): self.assertEqual(elem.name, '') self.assertEqual(elem.id, 0) - def test_to_protobuf_w_explicit_dataset_empty_id(self): - from gcloud.datastore.dataset import Dataset - dataset = Dataset('') - key = self._makeOne(dataset) - pb = key.to_protobuf() - self.assertEqual(pb.partition_id.dataset_id, '') - def test_to_protobuf_w_explicit_dataset_no_prefix(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset) - pb = key.to_protobuf() - self.assertEqual(pb.partition_id.dataset_id, 's~%s' % _DATASET) - - def test_to_protobuf_w_explicit_dataset_w_s_prefix(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 's~DATASET' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset) - pb = key.to_protobuf() - self.assertEqual(pb.partition_id.dataset_id, _DATASET) - - def test_to_protobuf_w_explicit_dataset_w_e_prefix(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'e~DATASET' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset) + key = self._makeOne(dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) @@ -187,7 +138,7 @@ def test_to_protobuf_w_explicit_path(self): def test_from_path_empty(self): key = self._getTargetClass().from_path() - self.assertEqual(key.dataset(), None) + self.assertEqual(key._dataset_id, None) self.assertEqual(key.namespace(), None) self.assertEqual(key.kind(), '') self.assertEqual(key.path(), [{'kind': ''}]) @@ -236,129 +187,93 @@ def test_is_partial_w_name(self): key = self._makeOne(path=_PATH) self.assertFalse(key.is_partial()) - def test_dataset_setter(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' - _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - dataset = Dataset(_DATASET) - key = self._makeOne(namespace=_NAMESPACE, path=_PATH) - after = key.dataset(dataset) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), _PATH) - def test_namespace_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _NAME = 'NAME' _PATH = [{'kind': _KIND, 'name': _NAME}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, path=_PATH) + key = self._makeOne(path=_PATH, dataset_id=_DATASET) after = key.namespace(_NAMESPACE) self.assertFalse(after is key) self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) + self.assertEqual(after._dataset_id, _DATASET) self.assertEqual(after.namespace(), _NAMESPACE) self.assertEqual(after.path(), _PATH) def test_path_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _NAME = 'NAME' _PATH = [{'kind': _KIND, 'name': _NAME}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE) + key = self._makeOne(namespace=_NAMESPACE, dataset_id=_DATASET) after = key.path(_PATH) self.assertFalse(after is key) self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) + self.assertEqual(after._dataset_id, _DATASET) self.assertEqual(after.namespace(), _NAMESPACE) self.assertEqual(after.path(), _PATH) def test_kind_getter_empty_path(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE) + key = self._makeOne(namespace=_NAMESPACE, dataset_id=_DATASET) key._path = () # edge case self.assertEqual(key.kind(), None) def test_kind_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' _NAME = 'NAME' _PATH = [{'kind': _KIND_BEFORE, 'name': _NAME}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE, _PATH) + key = self._makeOne(_PATH, _NAMESPACE, _DATASET) after = key.kind(_KIND_AFTER) self.assertFalse(after is key) self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) + self.assertEqual(after._dataset_id, _DATASET) self.assertEqual(after.namespace(), _NAMESPACE) self.assertEqual(after.path(), [{'kind': _KIND_AFTER, 'name': _NAME}]) def test_id_getter_empty_path(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE) + key = self._makeOne() key._path = () # edge case self.assertEqual(key.id(), None) def test_id_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID_BEFORE = 1234 _ID_AFTER = 5678 _PATH = [{'kind': _KIND, 'id': _ID_BEFORE}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE, _PATH) + key = self._makeOne(_PATH, _NAMESPACE, _DATASET) after = key.id(_ID_AFTER) self.assertFalse(after is key) self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) + self.assertEqual(after._dataset_id, _DATASET) self.assertEqual(after.namespace(), _NAMESPACE) self.assertEqual(after.path(), [{'kind': _KIND, 'id': _ID_AFTER}]) def test_name_getter_empty_path(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE) + key = self._makeOne() key._path = () # edge case self.assertEqual(key.name(), None) def test_name_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _NAME_BEFORE = 'NAME_BEFORE' _NAME_AFTER = 'NAME_AFTER' _PATH = [{'kind': _KIND, 'name': _NAME_BEFORE}] - dataset = Dataset(_DATASET) - key = self._makeOne(dataset, _NAMESPACE, _PATH) + key = self._makeOne(_PATH, _NAMESPACE, _DATASET) after = key.name(_NAME_AFTER) self.assertFalse(after is key) self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertTrue(after.dataset() is dataset) + self.assertEqual(after._dataset_id, _DATASET) self.assertEqual(after.namespace(), _NAMESPACE) self.assertEqual(after.path(), [{'kind': _KIND, 'name': _NAME_AFTER}])