Skip to content

Commit

Permalink
Merge pull request #276 from tseaver/121-dont_copy_key_datasetid_to_pb
Browse files Browse the repository at this point in the history
Fix #121: don't copy project ID to the Key protobuf's dataset_id.
  • Loading branch information
silvolu committed Oct 22, 2014
2 parents c2a38fb + c769cc7 commit fe6e4fb
Show file tree
Hide file tree
Showing 7 changed files with 159 additions and 250 deletions.
41 changes: 29 additions & 12 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ class NoKey(RuntimeError):
"""Exception raised by Entity methods which require a key."""


class NoDataset(RuntimeError):
"""Exception raised by Entity methods which require a dataset."""


class Entity(dict):
""":type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: The dataset in which this entity belongs.
Expand Down Expand Up @@ -68,8 +72,9 @@ class Entity(dict):

def __init__(self, dataset=None, kind=None):
super(Entity, self).__init__()
if dataset and kind:
self._key = Key(dataset=dataset).kind(kind)
self._dataset = dataset
if kind:
self._key = Key().kind(kind)
else:
self._key = None

Expand All @@ -86,8 +91,7 @@ def dataset(self):
be `None`. It also means that if you change the key on the entity,
this will refer to that key's dataset.
"""
if self._key:
return self._key.dataset()
return self._dataset

def key(self, key=None):
"""Get or set the :class:`.datastore.key.Key` on the current entity.
Expand Down Expand Up @@ -127,7 +131,7 @@ def kind(self):
return self._key.kind()

@classmethod
def from_key(cls, key):
def from_key(cls, key, dataset=None):
"""Create entity based on :class:`.datastore.key.Key`.
.. note::
Expand All @@ -140,7 +144,7 @@ def from_key(cls, key):
:class:`gcloud.datastore.key.Key`.
"""

return cls().key(key)
return cls(dataset).key(key)

@classmethod
def from_protobuf(cls, pb, dataset=None):
Expand All @@ -159,8 +163,8 @@ def from_protobuf(cls, pb, dataset=None):
# This is here to avoid circular imports.
from gcloud.datastore import _helpers

key = Key.from_protobuf(pb.key, dataset=dataset)
entity = cls.from_key(key)
key = Key.from_protobuf(pb.key)
entity = cls.from_key(key, dataset)

for property_pb in pb.property:
value = _helpers._get_value_from_property_pb(property_pb)
Expand All @@ -177,9 +181,21 @@ def _must_key(self):
:raises: NoKey if key is None
"""
if self._key is None:
raise NoKey('no key')
raise NoKey()
return self._key

@property
def _must_dataset(self):
"""Return our dataset, or raise NoDataset if not set.
:rtype: :class:`gcloud.datastore.key.Key`.
:returns: our key
:raises: NoDataset if key is None
"""
if self._dataset is None:
raise NoDataset()
return self._dataset

def reload(self):
"""Reloads the contents of this entity from the datastore.
Expand All @@ -193,7 +209,8 @@ def reload(self):
exist only locally.
"""
key = self._must_key
entity = key.dataset().get_entity(key.to_protobuf())
dataset = self._must_dataset
entity = dataset.get_entity(key.to_protobuf())

if entity:
self.update(entity)
Expand All @@ -217,7 +234,7 @@ def save(self):
:returns: The entity with a possibly updated Key.
"""
key = self._must_key
dataset = key.dataset()
dataset = self._must_dataset
connection = dataset.connection()
key_pb = connection.save_entity(
dataset_id=dataset.id(),
Expand Down Expand Up @@ -246,7 +263,7 @@ def delete(self):
entity will be deleted.
"""
key = self._must_key
dataset = key.dataset()
dataset = self._must_dataset
dataset.connection().delete_entities(
dataset_id=dataset.id(),
key_pbs=[key.to_protobuf()],
Expand Down
65 changes: 14 additions & 51 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from itertools import izip

from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore.dataset import Dataset


class Key(object):
Expand All @@ -13,22 +12,23 @@ class Key(object):
.. automethod:: __init__
"""

def __init__(self, dataset=None, namespace=None, path=None):
def __init__(self, path=None, namespace=None, dataset_id=None):
"""Constructor / initializer for a key.
:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance for the key.
:type namespace: :class:`str`
:param namespace: A namespace identifier for the key.
:type path: sequence of dicts
:param path: Each dict must have keys 'kind' (a string) and optionally
'name' (a string) or 'id' (an integer).
:type dataset_id: string
:param dataset: The dataset ID assigned by back-end for the key.
Leave as None for newly-created keys.
"""
self._dataset = dataset
self._namespace = namespace
self._path = path or [{'kind': ''}]
self._namespace = namespace
self._dataset_id = dataset_id

def _clone(self):
"""Duplicates the Key.
Expand All @@ -40,12 +40,10 @@ def _clone(self):
:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
clone = copy.deepcopy(self)
clone._dataset = self._dataset # Make a shallow copy of the Dataset.
return clone
return copy.deepcopy(self)

@classmethod
def from_protobuf(cls, pb, dataset=None):
def from_protobuf(cls, pb):
"""Factory method for creating a key based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Expand All @@ -54,10 +52,6 @@ def from_protobuf(cls, pb, dataset=None):
:type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param pb: The Protobuf representing the key.
:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance. If not passed, defaults to an
instance whose ID is derived from pb.
:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
Expand All @@ -75,13 +69,10 @@ def from_protobuf(cls, pb, dataset=None):

path.append(element_dict)

if not dataset:
dataset = Dataset(id=pb.partition_id.dataset_id)
namespace = pb.partition_id.namespace
else:
namespace = None
dataset_id = pb.partition_id.dataset_id or None
namespace = pb.partition_id.namespace

return cls(dataset, namespace, path)
return cls(path, namespace, dataset_id)

def to_protobuf(self):
"""Return a protobuf corresponding to the key.
Expand All @@ -91,18 +82,8 @@ def to_protobuf(self):
"""
key = datastore_pb.Key()

# Technically a dataset is required to do anything with the key,
# but we shouldn't throw a cryptic error if one isn't provided
# in the initializer.
if self.dataset():
# Apparently 's~' is a prefix for High-Replication and is necessary
# here. Another valid preflix is 'e~' indicating EU datacenters.
dataset_id = self.dataset().id()
if dataset_id:
if dataset_id[:2] not in ['s~', 'e~']:
dataset_id = 's~' + dataset_id

key.partition_id.dataset_id = dataset_id
if self._dataset_id is not None:
key.partition_id.dataset_id = self._dataset_id

if self._namespace:
key.partition_id.namespace = self._namespace
Expand Down Expand Up @@ -161,24 +142,6 @@ def is_partial(self):
"""
return self.id_or_name() is None

def dataset(self, dataset=None):
"""Dataset setter / getter.
:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance for the key.
:rtype: :class:`Key` (for setter); or
:class:`gcloud.datastore.dataset.Dataset` (for getter)
:returns: a new key, cloned from self., with the given dataset
(setter); or self's dataset (getter).
"""
if dataset:
clone = self._clone()
clone._dataset = dataset
return clone
else:
return self._dataset

def namespace(self, namespace=None):
"""Namespace setter / getter.
Expand Down
9 changes: 3 additions & 6 deletions gcloud/datastore/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,13 @@ def test_datetime_w_zone(self):
self.assertEqual(value % 1000000, 4375)

def test_key(self):
from gcloud.datastore.dataset import Dataset
from gcloud.datastore.key import Key

_DATASET = 'DATASET'
_KIND = 'KIND'
_ID = 1234
_PATH = [{'kind': _KIND, 'id': _ID}]
key = Key(dataset=Dataset(_DATASET), path=_PATH)
key = Key(dataset_id=_DATASET, path=_PATH)
name, value = self._callFUT(key)
self.assertEqual(name, 'key_value')
self.assertEqual(value, key.to_protobuf())
Expand Down Expand Up @@ -131,15 +130,14 @@ def test_datetime(self):

def test_key(self):
from gcloud.datastore.datastore_v1_pb2 import Value
from gcloud.datastore.dataset import Dataset
from gcloud.datastore.key import Key

_DATASET = 'DATASET'
_KIND = 'KIND'
_ID = 1234
_PATH = [{'kind': _KIND, 'id': _ID}]
pb = Value()
expected = Key(dataset=Dataset(_DATASET), path=_PATH).to_protobuf()
expected = Key(dataset_id=_DATASET, path=_PATH).to_protobuf()
pb.key_value.CopyFrom(expected)
found = self._callFUT(pb)
self.assertEqual(found.to_protobuf(), expected)
Expand Down Expand Up @@ -236,15 +234,14 @@ def test_datetime(self):
self.assertEqual(value % 1000000, 4375)

def test_key(self):
from gcloud.datastore.dataset import Dataset
from gcloud.datastore.key import Key

_DATASET = 'DATASET'
_KIND = 'KIND'
_ID = 1234
_PATH = [{'kind': _KIND, 'id': _ID}]
pb = self._makePB()
key = Key(dataset=Dataset(_DATASET), path=_PATH)
key = Key(dataset_id=_DATASET, path=_PATH)
self._callFUT(pb, key)
value = pb.key_value
self.assertEqual(value, key.to_protobuf())
Expand Down
Loading

0 comments on commit fe6e4fb

Please sign in to comment.