Skip to content

Commit

Permalink
Merge pull request #1299 from dhermes/datastore-query-file-shim
Browse files Browse the repository at this point in the history
Replacing datastore pb uses with query shim.
  • Loading branch information
dhermes committed Dec 17, 2015
2 parents 30bec2d + 5629136 commit b2f5d6c
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 43 deletions.
33 changes: 33 additions & 0 deletions gcloud/datastore/_query_pb2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Datastore shim to emulate v1beta3 module structure.
This module intended to pair with query.proto.
"""

from gcloud.datastore import _datastore_v1_pb2


EntityResult = _datastore_v1_pb2.EntityResult
Query = _datastore_v1_pb2.Query
KindExpression = _datastore_v1_pb2.KindExpression
PropertyReference = _datastore_v1_pb2.PropertyReference
PropertyOrder = _datastore_v1_pb2.PropertyOrder
Filter = _datastore_v1_pb2.Filter
CompositeFilter = _datastore_v1_pb2.CompositeFilter
PropertyFilter = _datastore_v1_pb2.PropertyFilter
GqlQuery = _datastore_v1_pb2.GqlQuery
GqlQueryArg = _datastore_v1_pb2.GqlQueryArg
QueryResultBatch = _datastore_v1_pb2.QueryResultBatch
4 changes: 2 additions & 2 deletions gcloud/datastore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def _extended_lookup(connection, dataset_id, key_pbs,
:type dataset_id: string
:param dataset_id: The ID of the dataset of which to make the request.
:type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key`
:type key_pbs: list of :class:`gcloud.datastore._entity_pb2.Key`
:param key_pbs: The keys to retrieve from the datastore.
:type missing: an empty list or None.
Expand All @@ -113,7 +113,7 @@ def _extended_lookup(connection, dataset_id, key_pbs,
the given transaction. Incompatible with
``eventual==True``.
:rtype: list of :class:`gcloud.datastore._datastore_v1_pb2.Entity`
:rtype: list of :class:`gcloud.datastore._entity_pb2.Entity`
:returns: The requested entities.
:raises: :class:`ValueError` if missing / deferred are not null or
empty list.
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def run_query(self, dataset_id, query_pb, namespace=None,
:type dataset_id: string
:param dataset_id: The ID of the dataset over which to run the query.
:type query_pb: :class:`gcloud.datastore._datastore_v1_pb2.Query`
:type query_pb: :class:`gcloud.datastore._query_pb2.Query`
:param query_pb: The Protobuf representing the query to run.
:type namespace: string
Expand Down
26 changes: 13 additions & 13 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import base64

from gcloud._helpers import _ensure_tuple_or_list
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2
from gcloud.datastore import helpers
from gcloud.datastore.key import Key

Expand Down Expand Up @@ -64,11 +64,11 @@ class Query(object):
"""

OPERATORS = {
'<=': datastore_pb.PropertyFilter.LESS_THAN_OR_EQUAL,
'>=': datastore_pb.PropertyFilter.GREATER_THAN_OR_EQUAL,
'<': datastore_pb.PropertyFilter.LESS_THAN,
'>': datastore_pb.PropertyFilter.GREATER_THAN,
'=': datastore_pb.PropertyFilter.EQUAL,
'<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
'>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
'<': _query_pb2.PropertyFilter.LESS_THAN,
'>': _query_pb2.PropertyFilter.GREATER_THAN,
'=': _query_pb2.PropertyFilter.EQUAL,
}
"""Mapping of operator strings and their protobuf equivalents."""

Expand Down Expand Up @@ -359,11 +359,11 @@ class Iterator(object):
query results.
"""

_NOT_FINISHED = datastore_pb.QueryResultBatch.NOT_FINISHED
_NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED

_FINISHED = (
datastore_pb.QueryResultBatch.NO_MORE_RESULTS,
datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
_query_pb2.QueryResultBatch.NO_MORE_RESULTS,
_query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
)

def __init__(self, query, client, limit=None, offset=0,
Expand Down Expand Up @@ -456,12 +456,12 @@ def _pb_from_query(query):
:type query: :class:`Query`
:param query: The source query.
:rtype: :class:`gcloud.datastore._datastore_v1_pb2.Query`
:rtype: :class:`gcloud.datastore._query_pb2.Query`
:returns: A protobuf that can be sent to the protobuf API. N.b. that
it does not contain "in-flight" fields for ongoing query
executions (cursors, offset, limit).
"""
pb = datastore_pb.Query()
pb = _query_pb2.Query()

for projection_name in query.projection:
pb.projection.add().property.name = projection_name
Expand All @@ -470,7 +470,7 @@ def _pb_from_query(query):
pb.kind.add().name = query.kind

composite_filter = pb.filter.composite_filter
composite_filter.operator = datastore_pb.CompositeFilter.AND
composite_filter.operator = _query_pb2.CompositeFilter.AND

if query.ancestor:
ancestor_pb = helpers._prepare_key_for_request(
Expand All @@ -479,7 +479,7 @@ def _pb_from_query(query):
# Filter on __key__ HAS_ANCESTOR == ancestor.
ancestor_filter = composite_filter.filter.add().property_filter
ancestor_filter.property.name = '__key__'
ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR
ancestor_filter.operator = _query_pb2.PropertyFilter.HAS_ANCESTOR
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)

for property_name, operator, value in query.filters:
Expand Down
24 changes: 14 additions & 10 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ def _make_key_pb(self, dataset_id, id=1234):
return Key(*path_args, dataset_id=dataset_id).to_protobuf()

def _make_query_pb(self, kind):
from gcloud.datastore.connection import datastore_pb
pb = datastore_pb.Query()
from gcloud.datastore import _query_pb2
pb = _query_pb2.Query()
pb.kind.add().name = kind
return pb

Expand Down Expand Up @@ -476,16 +476,17 @@ def test_lookup_multiple_keys_w_deferred(self):

def test_run_query_w_eventual_no_transaction(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
CURSOR = b'\x00'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS
no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL
rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
Expand Down Expand Up @@ -515,6 +516,7 @@ def test_run_query_w_eventual_no_transaction(self):

def test_run_query_wo_eventual_w_transaction(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
Expand All @@ -523,9 +525,9 @@ def test_run_query_wo_eventual_w_transaction(self):
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS
no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL
rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
Expand Down Expand Up @@ -555,6 +557,7 @@ def test_run_query_wo_eventual_w_transaction(self):

def test_run_query_w_eventual_and_transaction(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
Expand All @@ -563,25 +566,26 @@ def test_run_query_w_eventual_and_transaction(self):
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS
no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL
rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL
conn = self._makeOne()
self.assertRaises(ValueError, conn.run_query, DATASET_ID, q_pb,
eventual=True, transaction_id=TRANSACTION)

def test_run_query_wo_namespace_empty_result(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
CURSOR = b'\x00'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS
no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL
rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
Expand Down
39 changes: 22 additions & 17 deletions gcloud/datastore/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,11 +326,11 @@ def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)

def _addQueryResults(self, connection, cursor=_END, more=False):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _entity_pb2
from gcloud.datastore import _query_pb2

MORE = datastore_pb.QueryResultBatch.NOT_FINISHED
NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
MORE = _query_pb2.QueryResultBatch.NOT_FINISHED
NO_MORE = _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
_ID = 123
entity_pb = _entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = self._DATASET
Expand Down Expand Up @@ -531,15 +531,16 @@ def _callFUT(self, query):
return _pb_from_query(query)

def test_empty(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

pb = self._callFUT(_Query())
self.assertEqual(list(pb.projection), [])
self.assertEqual(list(pb.kind), [])
self.assertEqual(list(pb.order), [])
self.assertEqual(list(pb.group_by), [])
self.assertEqual(pb.filter.property_filter.property.name, '')
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND)
self.assertEqual(list(cfilter.filter), [])
self.assertEqual(pb.start_cursor, b'')
self.assertEqual(pb.end_cursor, b'')
Expand All @@ -556,60 +557,64 @@ def test_kind(self):
self.assertEqual([item.name for item in pb.kind], ['KIND'])

def test_ancestor(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key
from gcloud.datastore.helpers import _prepare_key_for_request
from gcloud.datastore import _query_pb2

ancestor = Key('Ancestor', 123, dataset_id='DATASET')
pb = self._callFUT(_Query(ancestor=ancestor))
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
self.assertEqual(pfilter.value.key_value, ancestor_pb)

def test_filter(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

query = _Query(filters=[('name', '=', u'John')])
query.OPERATORS = {
'=': datastore_pb.PropertyFilter.EQUAL,
'=': _query_pb2.PropertyFilter.EQUAL,
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, 'name')
self.assertEqual(pfilter.value.string_value, u'John')

def test_filter_key(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key
from gcloud.datastore.helpers import _prepare_key_for_request
from gcloud.datastore import _query_pb2

key = Key('Kind', 123, dataset_id='DATASET')
query = _Query(filters=[('__key__', '=', key)])
query.OPERATORS = {
'=': datastore_pb.PropertyFilter.EQUAL,
'=': _query_pb2.PropertyFilter.EQUAL,
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
key_pb = _prepare_key_for_request(key.to_protobuf())
self.assertEqual(pfilter.value.key_value, key_pb)

def test_order(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import _query_pb2

pb = self._callFUT(_Query(order=['a', '-b', 'c']))
self.assertEqual([item.property.name for item in pb.order],
['a', 'b', 'c'])
self.assertEqual([item.direction for item in pb.order],
[datastore_pb.PropertyOrder.ASCENDING,
datastore_pb.PropertyOrder.DESCENDING,
datastore_pb.PropertyOrder.ASCENDING])
[_query_pb2.PropertyOrder.ASCENDING,
_query_pb2.PropertyOrder.DESCENDING,
_query_pb2.PropertyOrder.ASCENDING])

def test_group_by(self):
pb = self._callFUT(_Query(group_by=['a', 'b', 'c']))
Expand Down

0 comments on commit b2f5d6c

Please sign in to comment.