Skip to content

Commit

Permalink
Result.single() throws error if not exactly one record
Browse files Browse the repository at this point in the history
Also include minor performance improvement for repeated calls of
`Result.peek()`, `.peek()`, and `.graph()`.
  • Loading branch information
robsdedude committed Jan 20, 2022
1 parent 5b0a5ad commit 7c03b6e
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 94 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@
- Creation of a driver with `bolt[+s[sc]]://` scheme now raises an error if the
URI contains a query part (a routing context). Previously, the routing context
was silently ignored.
- `Result.single` now raises `ResultNotSingleError` if not exactly one result is
available.

## Version 4.4

Expand Down
43 changes: 22 additions & 21 deletions neo4j/_async/work/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from ..._async_compat.util import AsyncUtil
from ...data import DataDehydrator
from ...exceptions import ResultNotSingleError
from ...work import ResultSummary
from ..io import ConnectionErrorHandler

Expand All @@ -37,6 +38,7 @@ def __init__(self, connection, hydrant, fetch_size, on_closed,
self._hydrant = hydrant
self._on_closed = on_closed
self._metadata = None
self._keys = None
self._record_buffer = deque()
self._summary = None
self._bookmark = None
Expand Down Expand Up @@ -179,7 +181,7 @@ def on_success(summary_metadata):
async def __aiter__(self):
"""Iterator returning Records.
:returns: Record, it is an immutable ordered collection of key-value pairs.
:rtype: :class:`neo4j.Record`
:rtype: :class:`neo4j.AsyncRecord`
"""
while self._record_buffer or self._attached:
if self._record_buffer:
Expand Down Expand Up @@ -211,6 +213,8 @@ async def _buffer(self, n=None):
Might ent up with fewer records in the buffer if there are not enough
records available.
"""
if n is not None and len(self._record_buffer) >= n:
return
record_buffer = deque()
async for record in self:
record_buffer.append(record)
Expand Down Expand Up @@ -304,24 +308,21 @@ async def single(self):
A warning is generated if more than one record is available but
the first of these is still returned.
:returns: the next :class:`neo4j.Record` or :const:`None` if none remain
:warns: if more than one record is available
:returns: the next :class:`neo4j.AsyncRecord`.
:raises: ResultNotSingleError if not exactly one record is available.
"""
# TODO in 5.0 replace with this code that raises an error if there's not
# exactly one record in the left result stream.
# self._buffer(2).
# if len(self._record_buffer) != 1:
# raise SomeError("Expected exactly 1 record, found %i"
# % len(self._record_buffer))
# return self._record_buffer.popleft()
# TODO: exhausts the result with self.consume if there are more records.
records = await AsyncUtil.list(self)
size = len(records)
if size == 0:
return None
if size != 1:
warn("Expected a result with a single record, but this result contains %d" % size)
return records[0]
await self._buffer(2)
if not self._record_buffer:
raise ResultNotSingleError(
"No records found. "
"Make sure your query returns exactly one record."
)
elif len(self._record_buffer) > 1:
raise ResultNotSingleError(
"More than one record found. "
"Make sure your query returns exactly one record."
)
return self._record_buffer.popleft()

async def peek(self):
"""Obtain the next record from this result without consuming it.
Expand All @@ -347,7 +348,7 @@ async def graph(self):
async def value(self, key=0, default=None):
"""Helper function that return the remainder of the result as a list of values.
See :class:`neo4j.Record.value`
See :class:`neo4j.AsyncRecord.value`
:param key: field to return for each remaining record. Obtain a single value from the record by index or key.
:param default: default value, used if the index of key is unavailable
Expand All @@ -359,7 +360,7 @@ async def value(self, key=0, default=None):
async def values(self, *keys):
"""Helper function that return the remainder of the result as a list of values lists.
See :class:`neo4j.Record.values`
See :class:`neo4j.AsyncRecord.values`
:param keys: fields to return for each remaining record. Optionally filtering to include only certain values by index or key.
:returns: list of values lists
Expand All @@ -370,7 +371,7 @@ async def values(self, *keys):
async def data(self, *keys):
"""Helper function that return the remainder of the result as a list of dictionaries.
See :class:`neo4j.Record.data`
See :class:`neo4j.AsyncRecord.data`
:param keys: fields to return for each remaining record. Optionally filtering to include only certain values by index or key.
:returns: list of dictionaries
Expand Down
35 changes: 18 additions & 17 deletions neo4j/_sync/work/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from ..._async_compat.util import Util
from ...data import DataDehydrator
from ...exceptions import ResultNotSingleError
from ...work import ResultSummary
from ..io import ConnectionErrorHandler

Expand All @@ -37,6 +38,7 @@ def __init__(self, connection, hydrant, fetch_size, on_closed,
self._hydrant = hydrant
self._on_closed = on_closed
self._metadata = None
self._keys = None
self._record_buffer = deque()
self._summary = None
self._bookmark = None
Expand Down Expand Up @@ -211,6 +213,8 @@ def _buffer(self, n=None):
Might ent up with fewer records in the buffer if there are not enough
records available.
"""
if n is not None and len(self._record_buffer) >= n:
return
record_buffer = deque()
for record in self:
record_buffer.append(record)
Expand Down Expand Up @@ -304,24 +308,21 @@ def single(self):
A warning is generated if more than one record is available but
the first of these is still returned.
:returns: the next :class:`neo4j.Record` or :const:`None` if none remain
:warns: if more than one record is available
:returns: the next :class:`neo4j.Record`.
:raises: ResultNotSingleError if not exactly one record is available.
"""
# TODO in 5.0 replace with this code that raises an error if there's not
# exactly one record in the left result stream.
# self._buffer(2).
# if len(self._record_buffer) != 1:
# raise SomeError("Expected exactly 1 record, found %i"
# % len(self._record_buffer))
# return self._record_buffer.popleft()
# TODO: exhausts the result with self.consume if there are more records.
records = Util.list(self)
size = len(records)
if size == 0:
return None
if size != 1:
warn("Expected a result with a single record, but this result contains %d" % size)
return records[0]
self._buffer(2)
if not self._record_buffer:
raise ResultNotSingleError(
"No records found. "
"Make sure your query returns exactly one record."
)
elif len(self._record_buffer) > 1:
raise ResultNotSingleError(
"More than one record found. "
"Make sure your query returns exactly one record."
)
return self._record_buffer.popleft()

def peek(self):
"""Obtain the next record from this result without consuming it.
Expand Down
4 changes: 4 additions & 0 deletions neo4j/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,10 @@ class ResultConsumedError(DriverError):
"""


class ResultNotSingleError(DriverError):
"""Raised when result.single() detects not exactly one record in result."""


class ConfigurationError(DriverError):
""" Raised when there is an error concerning a configuration.
"""
Expand Down
2 changes: 1 addition & 1 deletion testkitbackend/test_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"Feature:API:Liveness.Check": false,
"Feature:API:Result.List": true,
"Feature:API:Result.Peek": true,
"Feature:API:Result.Single": "Does not raise error when not exactly one record is available. To be fixed in 5.0.",
"Feature:API:Result.Single": true,
"Feature:API:SSLConfig": true,
"Feature:API:SSLSchemes": true,
"Feature:Auth:Bearer": true,
Expand Down
55 changes: 0 additions & 55 deletions tests/integration/test_result.py

This file was deleted.

0 comments on commit 7c03b6e

Please sign in to comment.