Skip to content

Commit

Permalink
Removing hacks that avoid using project ID in key protos.
Browse files Browse the repository at this point in the history
This was because returned dataset IDs in v1beta2 turned
foo into s~foo and sometimes this caused mismatches.
  • Loading branch information
dhermes committed Feb 13, 2016
1 parent 710fe77 commit 8434764
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 236 deletions.
5 changes: 2 additions & 3 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ def delete(self, key):
if not _projects_equal(self.project, key.project):
raise ValueError("Key must be from same project as batch")

key_pb = helpers._prepare_key_for_request(key.to_protobuf())
key_pb = key.to_protobuf()
self._add_delete_key_pb().CopyFrom(key_pb)

def begin(self):
Expand Down Expand Up @@ -297,6 +297,5 @@ def _assign_entity_to_pb(entity_pb, entity):
:param entity: The entity being updated within the batch / transaction.
"""
bare_entity_pb = helpers.entity_to_protobuf(entity)
key_pb = helpers._prepare_key_for_request(bare_entity_pb.key)
bare_entity_pb.key.CopyFrom(key_pb)
bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
entity_pb.CopyFrom(bare_entity_pb)
24 changes: 0 additions & 24 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from gcloud.environment_vars import GCD_HOST
from gcloud.exceptions import make_exception
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
from gcloud.datastore._generated import entity_pb2 as _entity_pb2


class Connection(connection.Connection):
Expand Down Expand Up @@ -384,28 +383,6 @@ def _set_read_options(request, eventual, transaction_id):
opts.transaction = transaction_id


def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers
"""Add protobuf keys to a request object.
.. note::
This is copied from `helpers` to avoid a cycle:
_implicit_environ -> connection -> helpers -> key -> _implicit_environ
:type key_pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
:param key_pb: A key to be added to a request.
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if key_pb.partition_id.project_id: # Simple field (string)
new_key_pb = _entity_pb2.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('project_id')
key_pb = new_key_pb
return key_pb


def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.
Expand All @@ -416,7 +393,6 @@ def _add_keys_to_request(request_field_pb, key_pbs):
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)


Expand Down
67 changes: 0 additions & 67 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,46 +35,6 @@
INT_VALUE_CHECKER = Int64ValueChecker()


def find_true_project(project, connection):
"""Find the true (unaliased) project.
If the given ID already has a 's~' or 'e~' prefix, does nothing.
Otherwise, looks up a bogus Key('__MissingLookupKind', 1) and reads the
true prefixed project from the response (either from found or from
missing).
For some context, see:
github.com/GoogleCloudPlatform/gcloud-python/pull/528
github.com/GoogleCloudPlatform/google-cloud-datastore/issues/59
:type project: string
:param project: The project to un-alias / prefix.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: A connection provided to connect to the project.
:rtype: string
:returns: The true / prefixed / un-aliased project.
"""
if project.startswith('s~') or project.startswith('e~'):
return project

# Create the bogus Key protobuf to be looked up and remove
# the project so the backend won't complain.
bogus_key_pb = Key('__MissingLookupKind', 1,
project=project).to_protobuf()
bogus_key_pb.partition_id.ClearField('project_id')

found_pbs, missing_pbs, _ = connection.lookup(project, [bogus_key_pb])
# By not passing in `deferred`, lookup will continue until
# all results are `found` or `missing`.
all_pbs = missing_pbs + found_pbs
# We only asked for one, so should only receive one.
returned_pb, = all_pbs

return returned_pb.key.partition_id.project_id


def _get_meaning(value_pb, is_list=False):
"""Get the meaning from a protobuf value.
Expand Down Expand Up @@ -435,33 +395,6 @@ def _set_protobuf_value(value_pb, val):
setattr(value_pb, attr, val)


def _prepare_key_for_request(key_pb):
"""Add protobuf keys to a request object.
:type key_pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
:param key_pb: A key to be added to a request.
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if key_pb.partition_id.project_id: # Simple field (string)
# We remove the project_id from the protobuf. This is because
# the backend fails a request if the key contains un-prefixed
# project. The backend fails because requests to
# /v1beta3/projects/foo:...
# and
# /v1beta3/projects/s~foo:...
# both go to the datastore given by 's~foo'. So if the key
# protobuf in the request body has project_id='foo', the
# backend will reject since 'foo' != 's~foo'.
new_key_pb = _entity_pb2.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('project_id')
key_pb = new_key_pb
return key_pb


class GeoPoint(object):
"""Simple container for a geo point value.
Expand Down
6 changes: 2 additions & 4 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,8 +486,7 @@ def _pb_from_query(query):
composite_filter.op = _query_pb2.CompositeFilter.AND

if query.ancestor:
ancestor_pb = helpers._prepare_key_for_request(
query.ancestor.to_protobuf())
ancestor_pb = query.ancestor.to_protobuf()

# Filter on __key__ HAS_ANCESTOR == ancestor.
ancestor_filter = composite_filter.filters.add().property_filter
Expand All @@ -506,8 +505,7 @@ def _pb_from_query(query):
# Set the value to filter on based on the type.
if property_name == '__key__':
key_pb = value.to_protobuf()
property_filter.value.key_value.CopyFrom(
helpers._prepare_key_for_request(key_pb))
property_filter.value.key_value.CopyFrom(key_pb)
else:
helpers._set_protobuf_value(property_filter.value, value)

Expand Down
33 changes: 11 additions & 22 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def test_lookup_single_key_empty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(key_pb, keys[0])

def test_lookup_single_key_empty_response_w_eventual(self):
from gcloud.datastore._generated import datastore_pb2
Expand Down Expand Up @@ -277,7 +277,7 @@ def test_lookup_single_key_empty_response_w_eventual(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
self.assertEqual(request.read_options.transaction, b'')
Expand Down Expand Up @@ -317,7 +317,7 @@ def test_lookup_single_key_empty_response_w_transaction(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.transaction, TRANSACTION)

def test_lookup_single_key_nonempty_response(self):
Expand Down Expand Up @@ -350,7 +350,7 @@ def test_lookup_single_key_nonempty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(key_pb, keys[0])

def test_lookup_multiple_keys_empty_response(self):
from gcloud.datastore._generated import datastore_pb2
Expand Down Expand Up @@ -378,8 +378,8 @@ def test_lookup_multiple_keys_empty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])

def test_lookup_multiple_keys_w_missing(self):
from gcloud.datastore._generated import datastore_pb2
Expand Down Expand Up @@ -412,8 +412,8 @@ def test_lookup_multiple_keys_w_missing(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])

def test_lookup_multiple_keys_w_deferred(self):
from gcloud.datastore._generated import datastore_pb2
Expand Down Expand Up @@ -448,8 +448,8 @@ def test_lookup_multiple_keys_w_deferred(self):
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])

def test_run_query_w_eventual_no_transaction(self):
from gcloud.datastore._generated import datastore_pb2
Expand Down Expand Up @@ -800,7 +800,7 @@ def test_allocate_ids_non_empty(self):
request.ParseFromString(cw['body'])
self.assertEqual(len(request.keys), len(before_key_pbs))
for key_before, key_after in zip(before_key_pbs, request.keys):
_compare_key_pb_after_request(self, key_before, key_after)
self.assertEqual(key_before, key_after)


class Test__parse_commit_response(unittest2.TestCase):
Expand Down Expand Up @@ -856,17 +856,6 @@ def request(self, **kw):
return self._response, self._content


def _compare_key_pb_after_request(test, key_before, key_after):
# Unset values are False-y.
test.assertEqual(key_after.partition_id.project_id, '')
test.assertEqual(key_before.partition_id.namespace_id,
key_after.partition_id.namespace_id)
test.assertEqual(len(key_before.path),
len(key_after.path))
for elt1, elt2 in zip(key_before.path, key_after.path):
test.assertEqual(elt1, elt2)


class _PathElementProto(object):

def __init__(self, _id):
Expand Down
112 changes: 0 additions & 112 deletions gcloud/datastore/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,86 +774,6 @@ def test_geo_point(self):
self.assertEqual(pb.geo_point_value, geo_pt_pb)


class Test__prepare_key_for_request(unittest2.TestCase):

def _callFUT(self, key_pb):
from gcloud.datastore.helpers import _prepare_key_for_request

return _prepare_key_for_request(key_pb)

def test_prepare_project_valid(self):
from gcloud.datastore._generated import entity_pb2
key = entity_pb2.Key()
key.partition_id.project_id = 'foo'
new_key = self._callFUT(key)
self.assertFalse(new_key is key)

key_without = entity_pb2.Key()
new_key.ClearField('partition_id')
self.assertEqual(new_key, key_without)

def test_prepare_project_unset(self):
from gcloud.datastore._generated import entity_pb2
key = entity_pb2.Key()
new_key = self._callFUT(key)
self.assertTrue(new_key is key)


class Test_find_true_project(unittest2.TestCase):

def _callFUT(self, project, connection):
from gcloud.datastore.helpers import find_true_project
return find_true_project(project, connection)

def test_prefixed(self):
PREFIXED = 's~PROJECT'
result = self._callFUT(PREFIXED, object())
self.assertEqual(PREFIXED, result)

def test_unprefixed_bogus_key_miss(self):
UNPREFIXED = 'PROJECT'
PREFIX = 's~'
CONNECTION = _Connection(PREFIX, from_missing=False)
result = self._callFUT(UNPREFIXED, CONNECTION)

self.assertEqual(CONNECTION._called_project, UNPREFIXED)

self.assertEqual(len(CONNECTION._lookup_result), 1)

# Make sure just one.
called_key_pb, = CONNECTION._called_key_pbs
path_element = called_key_pb.path
self.assertEqual(len(path_element), 1)
self.assertEqual(path_element[0].kind, '__MissingLookupKind')
self.assertEqual(path_element[0].id, 1)
# Unset values are False-y.
self.assertEqual(path_element[0].name, '')

PREFIXED = PREFIX + UNPREFIXED
self.assertEqual(result, PREFIXED)

def test_unprefixed_bogus_key_hit(self):
UNPREFIXED = 'PROJECT'
PREFIX = 'e~'
CONNECTION = _Connection(PREFIX, from_missing=True)
result = self._callFUT(UNPREFIXED, CONNECTION)

self.assertEqual(CONNECTION._called_project, UNPREFIXED)
self.assertEqual(CONNECTION._lookup_result, [])

# Make sure just one.
called_key_pb, = CONNECTION._called_key_pbs
path_element = called_key_pb.path
self.assertEqual(len(path_element), 1)
self.assertEqual(path_element[0].kind, '__MissingLookupKind')
self.assertEqual(path_element[0].id, 1)
# Unset values are False-y.
self.assertEqual(path_element[0].name, '')

PREFIXED = PREFIX + UNPREFIXED
self.assertEqual(result, PREFIXED)


class Test__get_meaning(unittest2.TestCase):

def _callFUT(self, *args, **kwargs):
Expand Down Expand Up @@ -986,35 +906,3 @@ def test___ne__(self):
geo_pt1 = self._makeOne(0.0, 1.0)
geo_pt2 = self._makeOne(2.0, 3.0)
self.assertNotEqual(geo_pt1, geo_pt2)


class _Connection(object):

_called_project = _called_key_pbs = _lookup_result = None

def __init__(self, prefix, from_missing=False):
self.prefix = prefix
self.from_missing = from_missing

def lookup(self, project, key_pbs):
from gcloud.datastore._generated import entity_pb2

# Store the arguments called with.
self._called_project = project
self._called_key_pbs = key_pbs

key_pb, = key_pbs

response = entity_pb2.Entity()
response.key.CopyFrom(key_pb)
response.key.partition_id.project_id = self.prefix + project

missing = []
deferred = []
if self.from_missing:
missing[:] = [response]
self._lookup_result = []
else:
self._lookup_result = [response]

return self._lookup_result, missing, deferred
Loading

0 comments on commit 8434764

Please sign in to comment.