From c10939080c8841382a4be2e3b0b0e202e946095d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 23 Dec 2015 01:06:37 -0800 Subject: [PATCH] Replacing uses of HasField with _has_field in datastore. --- gcloud/datastore/connection.py | 3 ++- gcloud/datastore/helpers.py | 31 +++++++++++++++-------------- gcloud/datastore/test_connection.py | 3 ++- gcloud/datastore/test_helpers.py | 12 ++++++++--- gcloud/datastore/test_key.py | 12 +++++++---- 5 files changed, 37 insertions(+), 24 deletions(-) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 6b48f74e1d303..5b2c5d8bd6b2e 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -16,6 +16,7 @@ import os +from gcloud._helpers import _has_field from gcloud import connection from gcloud.environment_vars import GCD_HOST from gcloud.exceptions import make_exception @@ -399,7 +400,7 @@ def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers :returns: A key which will be added to a request. It will be the original if nothing needs to be changed. """ - if key_pb.partition_id.HasField('dataset_id'): + if _has_field(key_pb.partition_id, 'dataset_id'): new_key_pb = _entity_pb2.Key() new_key_pb.CopyFrom(key_pb) new_key_pb.partition_id.ClearField('dataset_id') diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index b7488e1b3d3be..8462772929cfe 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -23,6 +23,7 @@ import six from gcloud._helpers import _datetime_from_microseconds +from gcloud._helpers import _has_field from gcloud._helpers import _microseconds_from_datetime from gcloud.datastore._generated import entity_pb2 as _entity_pb2 from gcloud.datastore.entity import Entity @@ -109,7 +110,7 @@ def _get_meaning(value_pb, is_list=False): if all_meanings: raise ValueError('Different meanings set on values ' 'within a list_value') - elif value_pb.HasField('meaning'): + elif _has_field(value_pb, 'meaning'): meaning = value_pb.meaning return meaning @@ -128,7 +129,7 @@ def entity_from_protobuf(pb): :returns: The entity derived from the protobuf. """ key = None - if pb.HasField('key'): + if _has_field(pb, 'key'): key = key_from_protobuf(pb.key) entity_props = {} @@ -232,18 +233,18 @@ def key_from_protobuf(pb): path_args = [] for element in pb.path_element: path_args.append(element.kind) - if element.HasField('id'): + if _has_field(element, 'id'): path_args.append(element.id) # This is safe: we expect proto objects returned will only have # one of `name` or `id` set. - if element.HasField('name'): + if _has_field(element, 'name'): path_args.append(element.name) dataset_id = None - if pb.partition_id.HasField('dataset_id'): + if _has_field(pb.partition_id, 'dataset_id'): dataset_id = pb.partition_id.dataset_id namespace = None - if pb.partition_id.HasField('namespace'): + if _has_field(pb.partition_id, 'namespace'): namespace = pb.partition_id.namespace return Key(*path_args, namespace=namespace, dataset_id=dataset_id) @@ -323,29 +324,29 @@ def _get_value_from_value_pb(value_pb): :returns: The value provided by the Protobuf. """ result = None - if value_pb.HasField('timestamp_microseconds_value'): + if _has_field(value_pb, 'timestamp_microseconds_value'): microseconds = value_pb.timestamp_microseconds_value result = _datetime_from_microseconds(microseconds) - elif value_pb.HasField('key_value'): + elif _has_field(value_pb, 'key_value'): result = key_from_protobuf(value_pb.key_value) - elif value_pb.HasField('boolean_value'): + elif _has_field(value_pb, 'boolean_value'): result = value_pb.boolean_value - elif value_pb.HasField('double_value'): + elif _has_field(value_pb, 'double_value'): result = value_pb.double_value - elif value_pb.HasField('integer_value'): + elif _has_field(value_pb, 'integer_value'): result = value_pb.integer_value - elif value_pb.HasField('string_value'): + elif _has_field(value_pb, 'string_value'): result = value_pb.string_value - elif value_pb.HasField('blob_value'): + elif _has_field(value_pb, 'blob_value'): result = value_pb.blob_value - elif value_pb.HasField('entity_value'): + elif _has_field(value_pb, 'entity_value'): result = entity_from_protobuf(value_pb.entity_value) elif value_pb.list_value: @@ -401,7 +402,7 @@ def _prepare_key_for_request(key_pb): :returns: A key which will be added to a request. It will be the original if nothing needs to be changed. """ - if key_pb.partition_id.HasField('dataset_id'): + if _has_field(key_pb.partition_id, 'dataset_id'): # We remove the dataset_id from the protobuf. This is because # the backend fails a request if the key contains un-prefixed # dataset ID. The backend fails because requests to diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 2f49f4bde06de..aab3bf848f079 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -898,7 +898,8 @@ def request(self, **kw): def _compare_key_pb_after_request(test, key_before, key_after): - test.assertFalse(key_after.partition_id.HasField('dataset_id')) + from gcloud._helpers import _has_field + test.assertFalse(_has_field(key_after.partition_id, 'dataset_id')) test.assertEqual(key_before.partition_id.namespace, key_after.partition_id.namespace) test.assertEqual(len(key_before.path_element), diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index de2f7ed41d789..d1e0916c7f02d 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -159,6 +159,8 @@ def _callFUT(self, entity): return entity_to_protobuf(entity) def _compareEntityProto(self, entity_pb1, entity_pb2): + from gcloud._helpers import _has_field + import operator self.assertEqual(entity_pb1.key, entity_pb2.key) name_getter = operator.attrgetter('name') @@ -166,7 +168,7 @@ def _compareEntityProto(self, entity_pb1, entity_pb2): prop_list2 = sorted(entity_pb2.property, key=name_getter) self.assertEqual(len(prop_list1), len(prop_list2)) for val1, val2 in zip(prop_list1, prop_list2): - if val1.value.HasField('entity_value'): + if _has_field(val1.value, 'entity_value'): self.assertEqual(val1.name, val2.name) self.assertEqual(val1.value.meaning, val2.value.meaning) self._compareEntityProto(val1.value.entity_value, @@ -727,6 +729,8 @@ def test_prefixed(self): self.assertEqual(PREFIXED, result) def test_unprefixed_bogus_key_miss(self): + from gcloud._helpers import _has_field + UNPREFIXED = 'DATASET' PREFIX = 's~' CONNECTION = _Connection(PREFIX, from_missing=False) @@ -742,12 +746,14 @@ def test_unprefixed_bogus_key_miss(self): self.assertEqual(len(path_element), 1) self.assertEqual(path_element[0].kind, '__MissingLookupKind') self.assertEqual(path_element[0].id, 1) - self.assertFalse(path_element[0].HasField('name')) + self.assertFalse(_has_field(path_element[0], 'name')) PREFIXED = PREFIX + UNPREFIXED self.assertEqual(result, PREFIXED) def test_unprefixed_bogus_key_hit(self): + from gcloud._helpers import _has_field + UNPREFIXED = 'DATASET' PREFIX = 'e~' CONNECTION = _Connection(PREFIX, from_missing=True) @@ -762,7 +768,7 @@ def test_unprefixed_bogus_key_hit(self): self.assertEqual(len(path_element), 1) self.assertEqual(path_element[0].kind, '__MissingLookupKind') self.assertEqual(path_element[0].id, 1) - self.assertFalse(path_element[0].HasField('name')) + self.assertFalse(_has_field(path_element[0], 'name')) PREFIXED = PREFIX + UNPREFIXED self.assertEqual(result, PREFIXED) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 77e7b51565654..a5a889d353ece 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -333,7 +333,9 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): + from gcloud._helpers import _has_field from gcloud.datastore._generated import entity_pb2 + _KIND = 'KIND' key = self._makeOne(_KIND, dataset_id=self._DEFAULT_DATASET) pb = key.to_protobuf() @@ -342,15 +344,15 @@ def test_to_protobuf_defaults(self): # Check partition ID. self.assertEqual(pb.partition_id.dataset_id, self._DEFAULT_DATASET) self.assertEqual(pb.partition_id.namespace, '') - self.assertFalse(pb.partition_id.HasField('namespace')) + self.assertFalse(_has_field(pb.partition_id, 'namespace')) # Check the element PB matches the partial key and kind. elem, = list(pb.path_element) self.assertEqual(elem.kind, _KIND) self.assertEqual(elem.name, '') - self.assertFalse(elem.HasField('name')) + self.assertFalse(_has_field(elem, 'name')) self.assertEqual(elem.id, 0) - self.assertFalse(elem.HasField('id')) + self.assertFalse(_has_field(elem, 'id')) def test_to_protobuf_w_explicit_dataset_id(self): _DATASET = 'DATASET-ALT' @@ -381,12 +383,14 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): + from gcloud._helpers import _has_field + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') pb = key.to_protobuf() - self.assertFalse(pb.path_element[0].HasField('kind')) + self.assertFalse(_has_field(pb.path_element[0], 'kind')) def test_is_partial_no_name_or_id(self): key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET)