diff --git a/.travis.yml b/.travis.yml index 5e78b69..dd118b2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,16 @@ sudo: false + language: python +python: 3.7 + +dist: xenial services: - - mysql + - docker + +before_install: + - make docker-mysql-run + - make docker-postgres-run install: - pip install tox @@ -28,7 +36,6 @@ matrix: - stage: test python: 3.7 env: TOX_ENV=py37 - dist: xenial - stage: deploy script: skip diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ff98e50..e18deae 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,7 +10,7 @@ Version 0.8.0 Released 2018-06-25 -* Adds support for `ilike` (case-insensitive) string comparison. +* Adds support for ``ilike`` (case-insensitive) string comparison. Version 0.7.0 @@ -51,7 +51,8 @@ Released 2017-05-22 * Adds support for boolean functions within filters * Adds the possibility of supplying a single dictionary as filters when only one filter is provided -* Makes the `op` filter attribute optional: `==` is the default operator +* Makes the ``op`` filter attribute optional: ``==`` is the default + operator Version 0.2.0 ------------- diff --git a/Makefile b/Makefile index 6277c98..10b9e90 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,8 @@ .PHONY: test +POSTGRES_VERSION?=9.6 +MYSQL_VERSION?=5.7 + rst-lint: rst-lint README.rst @@ -14,3 +17,19 @@ test: flake8 coverage: flake8 rst-lint coverage run --source sqlalchemy_filters -m pytest test $(ARGS) coverage report -m --fail-under 100 + + +# Docker test containers + +docker-mysql-run: + docker run -d --rm --name mysql-sqlalchemy-filters -p 3306:3306 \ + -e MYSQL_ALLOW_EMPTY_PASSWORD=yes \ + mysql:$(MYSQL_VERSION) + +docker-postgres-run: + docker run -d --rm --name postgres-sqlalchemy-filters -p 5432:5432 \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD= \ + -e POSTGRES_DB=test_sqlalchemy_filters \ + -e POSTGRES_INITDB_ARGS="--encoding=UTF8 --lc-collate=en_US.utf8 --lc-ctype=en_US.utf8" \ + postgres:$(POSTGRES_VERSION) diff --git a/README.rst b/README.rst index e62dda0..3972e6a 100644 --- a/README.rst +++ b/README.rst @@ -20,7 +20,7 @@ SQLAlchemy-filters Filtering --------- -Assuming that we have a SQLAlchemy `query` object: +Assuming that we have a SQLAlchemy ``query`` object: .. code-block:: python @@ -61,7 +61,8 @@ Then we can apply filters to that ``query`` object (multiple times): result = filtered_query.all() -It is also possible to filter queries that contain multiple models, including joins: +It is also possible to filter queries that contain multiple models, +including joins: .. code-block:: python @@ -84,7 +85,10 @@ It is also possible to filter queries that contain multiple models, including jo result = filtered_query.all() -`apply_filters` will attempt to automatically join models to `query` if they're not already present and a model-specific filter is supplied. For example, the value of `filtered_query` in the following two code blocks is identical: +``apply_filters`` will attempt to automatically join models to ``query`` +if they're not already present and a model-specific filter is supplied. +For example, the value of ``filtered_query`` in the following two code +blocks is identical: .. code-block:: python @@ -106,13 +110,20 @@ It is also possible to filter queries that contain multiple models, including jo ] filtered_query = apply_filters(query, filter_spec) -The automatic join is only possible if sqlalchemy can implictly determine the condition for the join, for example because of a foreign key relationship. +The automatic join is only possible if sqlalchemy can implictly +determine the condition for the join, for example because of a foreign +key relationship. -Automatic joins allow flexibility for clients to filter and sort by related objects without specifying all possible joins on the server beforehand. +Automatic joins allow flexibility for clients to filter and sort by +related objects without specifying all possible joins on the server +beforehand. -Note that first filter of the second block does not specify a model. It is implictly applied to the `Foo` model because that is the only model in the original query passed to `apply_filters`. +Note that first filter of the second block does not specify a model. +It is implictly applied to the ``Foo`` model because that is the only +model in the original query passed to ``apply_filters``. -It is also possible to apply filters to queries defined by fields or functions: +It is also possible to apply filters to queries defined by fields or +functions: .. code-block:: python @@ -123,8 +134,8 @@ It is also possible to apply filters to queries defined by fields or functions: Restricted Loads ---------------- -You can restrict the fields that SQLAlchemy loads from the database by using -the `apply_loads` function: +You can restrict the fields that SQLAlchemy loads from the database by +using the ``apply_loads`` function: .. code-block:: python @@ -136,13 +147,18 @@ the `apply_loads` function: query = apply_loads(query, load_spec) # will load only Foo.name and Bar.count -The effect of the `apply_loads` function is to _defer_ the load of any other fields to when/if they're accessed, rather than loading them when the query is executed. It only applies to fields that would be loaded during normal query execution. +The effect of the ``apply_loads`` function is to ``_defer_`` the load +of any other fields to when/if they're accessed, rather than loading +them when the query is executed. It only applies to fields that would be +loaded during normal query execution. Effect on joined queries ^^^^^^^^^^^^^^^^^^^^^^^^ -The default SQLAlchemy join is lazy, meaning that columns from the joined table are loaded only when required. Therefore `apply_loads` has limited effect in the following scenario: +The default SQLAlchemy join is lazy, meaning that columns from the +joined table are loaded only when required. Therefore ``apply_loads`` +has limited effect in the following scenario: .. code-block:: python @@ -154,9 +170,14 @@ The default SQLAlchemy join is lazy, meaning that columns from the joined table query = apply_loads(query, load_spec) # will load only Foo.name -`apply_loads` cannot be applied to columns that are loaded as `joined eager loads `_. This is because a joined eager load does not add the joined model to the original query, as explained `here `_ +``apply_loads`` cannot be applied to columns that are loaded as +`joined eager loads `_. +This is because a joined eager load does not add the joined model to the +original query, as explained +`here `_ -The following would not prevent all columns from Bar being eagerly loaded: +The following would not prevent all columns from Bar being eagerly +loaded: .. code-block:: python @@ -169,10 +190,14 @@ The following would not prevent all columns from Bar being eagerly loaded: .. sidebar:: Automatic Join - In fact, what happens here is that `Bar` is automatically joined to `query`, because it is determined that `Bar` is not part of the original query. The `load_spec` therefore has no effect because the automatic join - results in lazy evaluation. + In fact, what happens here is that ``Bar`` is automatically joined + to ``query``, because it is determined that ``Bar`` is not part of + the original query. The ``load_spec`` therefore has no effect + because the automatic join results in lazy evaluation. -If you wish to perform a joined load with restricted columns, you must specify the columns as part of the joined load, rather than with `apply_loads`: +If you wish to perform a joined load with restricted columns, you must +specify the columns as part of the joined load, rather than with +``apply_loads``: .. code-block:: python @@ -201,9 +226,12 @@ Sort result = sorted_query.all() -`apply_sort` will attempt to automatically join models to `query` if they're not already present and a model-specific sort is supplied. The behaviour is the same as in `apply_filters`. +``apply_sort`` will attempt to automatically join models to ``query`` if +they're not already present and a model-specific sort is supplied. +The behaviour is the same as in ``apply_filters``. -This allows flexibility for clients to sort by fields on related objects without specifying all possible joins on the server beforehand. +This allows flexibility for clients to sort by fields on related objects +without specifying all possible joins on the server beforehand. Pagination @@ -240,7 +268,8 @@ following format: # ... ] -The `model` key is optional if the original query being filtered only applies to one model. +The ``model`` key is optional if the original query being filtered only +applies to one model. If there is only one filter, the containing list may be omitted: @@ -249,7 +278,7 @@ If there is only one filter, the containing list may be omitted: filter_spec = {'field': 'field_name', 'op': '==', 'value': 'field_value'} Where ``field`` is the name of the field that will be filtered using the -operator provided in ``op`` (optional, defaults to `==`) and the +operator provided in ``op`` (optional, defaults to ``==``) and the provided ``value`` (optional, depending on the operator). This is the list of operators that can be used: @@ -269,7 +298,8 @@ This is the list of operators that can be used: Boolean Functions ^^^^^^^^^^^^^^^^^ -``and``, ``or``, and ``not`` functions can be used and nested within the filter specification: +``and``, ``or``, and ``not`` functions can be used and nested within the +filter specification: .. code-block:: python @@ -292,7 +322,8 @@ Boolean Functions ] -Note: ``or`` and ``and`` must reference a list of at least one element. ``not`` must reference a list of exactly one element. +Note: ``or`` and ``and`` must reference a list of at least one element. +``not`` must reference a list of exactly one element. Sort format ----------- @@ -311,25 +342,44 @@ applied sequentially: Where ``field`` is the name of the field that will be sorted using the provided ``direction``. -The `model` key is optional if the original query being sorted only applies to one model. +The ``model`` key is optional if the original query being sorted only +applies to one model. Running tests ------------- -There are some Makefile targets that can be used to run the tests. A -test database will be created, used during the tests and destroyed -afterwards. - -The default configuration uses both SQLite and MySQL (if the driver is -installed) to run the tests, with the following URIs: +The default configuration uses **SQLite**, **MySQL** (if the driver is +installed, which is the case when ``tox`` is used) and **PostgreSQL** +(if the driver is installed, which is the case when ``tox`` is used) to +run the tests, with the following URIs: .. code-block:: shell sqlite+pysqlite:///test_sqlalchemy_filters.db mysql+mysqlconnector://root:@localhost:3306/test_sqlalchemy_filters + postgresql+psycopg2://postgres:@localhost:5432/test_sqlalchemy_filters?client_encoding=utf8' + +A test database will be created, used during the tests and destroyed +afterwards for each RDBMS configured. + +There are Makefile targets to run docker containers locally for both +**MySQL** and **PostgreSQL**, using the default ports and configuration: + +.. code-block:: shell + + $ make docker-mysql-run + $ make docker-postgres-run -Example of usage: +To run the tests locally: + +.. code-block:: shell + + $ # Create/activate a virtual environment + $ pip install tox + $ tox + +There are some other Makefile targets that can be used to run the tests: .. code-block:: shell @@ -345,10 +395,22 @@ Example of usage: $ ARGS='--sqlite-test-db-uri sqlite+pysqlite:///test_sqlalchemy_filters.db' make coverage + +Database management systems +--------------------------- + +The following RDBMS are supported (tested): + +- SQLite +- MySQL +- PostgreSQL + + Python 2 -------- -There is no active support for python 2, however it is compatiable as of February 2019, if you install funcsigs. +There is no active support for python 2, however it is compatible as of +February 2019, if you install ``funcsigs``. License diff --git a/setup.py b/setup.py index 43edb08..aaa7e65 100644 --- a/setup.py +++ b/setup.py @@ -35,6 +35,9 @@ 'mysql': [ 'mysql-connector-python-rf==2.2.2', ], + 'postgresql': [ + 'psycopg2==2.7.7' + ], 'python2': [ "funcsigs>=1.0.2" ] diff --git a/test/conftest.py b/test/conftest.py index 02c368c..164cc3e 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -10,6 +10,7 @@ SQLITE_TEST_DB_URI = 'SQLITE_TEST_DB_URI' MYSQL_TEST_DB_URI = 'MYSQL_TEST_DB_URI' +POSTGRESQL_TEST_DB_URI = 'POSTGRESQL_TEST_DB_URI' def pytest_addoption(parser): @@ -39,12 +40,30 @@ def pytest_addoption(parser): ) ) + parser.addoption( + '--postgresql-test-db-uri', + action='store', + dest=POSTGRESQL_TEST_DB_URI, + default=( + 'postgresql+psycopg2://postgres:@localhost:5432' + '/test_sqlalchemy_filters?client_encoding=utf8' + ), + help=( + 'DB uri for testing (e.g. ' + '"postgresql+psycopg2://username:password@localhost:5432' + '/test_sqlalchemy_filters?client_encoding=utf8")' + ) + ) + @pytest.fixture(scope='session') def config(request): return { SQLITE_TEST_DB_URI: request.config.getoption(SQLITE_TEST_DB_URI), MYSQL_TEST_DB_URI: request.config.getoption(MYSQL_TEST_DB_URI), + POSTGRESQL_TEST_DB_URI: request.config.getoption( + POSTGRESQL_TEST_DB_URI + ), } @@ -60,6 +79,13 @@ def test_db_keys(): else: test_db_uris.append(MYSQL_TEST_DB_URI) + try: + import psycopg2 # noqa: F401 + except ImportError: + pass + else: + test_db_uris.append(POSTGRESQL_TEST_DB_URI) + return test_db_uris @@ -69,9 +95,26 @@ def db_uri(request, config): @pytest.fixture(scope='session') -def connection(db_uri): +def is_postgresql(db_uri): + if 'postgresql' in db_uri: + return True + return False + + +@pytest.fixture(scope='session') +def db_engine_options(db_uri, is_postgresql): + if is_postgresql: + return dict( + client_encoding='utf8', + connect_args={'client_encoding': 'utf8'} + ) + return {} + + +@pytest.fixture(scope='session') +def connection(db_uri, db_engine_options): create_db(db_uri) - engine = create_engine(db_uri) + engine = create_engine(db_uri, **db_engine_options) Base.metadata.create_all(engine) connection = engine.connect() Base.metadata.bind = engine diff --git a/test/interface/test_sorting.py b/test/interface/test_sorting.py index c68e98d..3addf9d 100644 --- a/test/interface/test_sorting.py +++ b/test/interface/test_sorting.py @@ -12,7 +12,7 @@ @pytest.fixture -def multiple_foos_inserted(session, multiple_bars_inserted): +def multiple_foos_inserted(session): foo_1 = Foo(id=1, bar_id=1, name='name_1', count=1) foo_2 = Foo(id=2, bar_id=2, name='name_2', count=1) foo_3 = Foo(id=3, bar_id=3, name='name_1', count=1) @@ -21,25 +21,21 @@ def multiple_foos_inserted(session, multiple_bars_inserted): foo_6 = Foo(id=6, bar_id=6, name='name_4', count=2) foo_7 = Foo(id=7, bar_id=7, name='name_1', count=2) foo_8 = Foo(id=8, bar_id=8, name='name_5', count=2) - session.add_all( - [foo_1, foo_2, foo_3, foo_4, foo_5, foo_6, foo_7, foo_8] - ) + session.add_all([foo_1, foo_2, foo_3, foo_4, foo_5, foo_6, foo_7, foo_8]) session.commit() @pytest.fixture -def multiple_bars_inserted(session): +def multiple_bars_with_no_nulls_inserted(session): bar_1 = Bar(id=1, name='name_1', count=5) bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=None) + bar_3 = Bar(id=3, name='name_1', count=3) bar_4 = Bar(id=4, name='name_4', count=12) bar_5 = Bar(id=5, name='name_1', count=2) bar_6 = Bar(id=6, name='name_4', count=15) bar_7 = Bar(id=7, name='name_1', count=2) bar_8 = Bar(id=8, name='name_5', count=1) - session.add_all( - [bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8] - ) + session.add_all([bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8]) session.commit() @@ -109,43 +105,51 @@ def test_invalid_direction(self, session): class TestSortApplied(object): - @pytest.mark.usefixtures('multiple_bars_inserted') + """Tests that results are sorted only according to the provided + filters. + + Does NOT test how rows with the same values are sorted since this is + not consistent across RDBMS. + + Does NOT test whether `NULL` field values are placed first or last + when sorting since this may differ across RDBMSs. + + SQL defines that `NULL` values should be placed together when + sorting, but it does not specify whether they should be placed first + or last. + """ + + @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') def test_single_sort_field_asc(self, session): query = session.query(Bar) order_by = [{'field': 'name', 'direction': 'asc'}] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 1 - assert result[1].id == 3 - assert result[2].id == 5 - assert result[3].id == 7 - assert result[4].id == 2 - assert result[5].id == 4 - assert result[6].id == 6 - assert result[7].id == 8 - - @pytest.mark.usefixtures('multiple_bars_inserted') + results = sorted_query.all() + + assert [result.name for result in results] == [ + 'name_1', 'name_1', 'name_1', 'name_1', + 'name_2', + 'name_4', 'name_4', + 'name_5', + ] + + @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') def test_single_sort_field_desc(self, session): query = session.query(Bar) order_by = [{'field': 'name', 'direction': 'desc'}] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 8 - assert result[1].id == 4 - assert result[2].id == 6 - assert result[3].id == 2 - assert result[4].id == 1 - assert result[5].id == 3 - assert result[6].id == 5 - assert result[7].id == 7 - - @pytest.mark.usefixtures('multiple_bars_inserted') + results = sorted_query.all() + + assert [result.name for result in results] == [ + 'name_5', + 'name_4', 'name_4', + 'name_2', + 'name_1', 'name_1', 'name_1', 'name_1', + ] + + @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') def test_multiple_sort_fields(self, session): query = session.query(Bar) order_by = [ @@ -155,27 +159,30 @@ def test_multiple_sort_fields(self, session): ] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 1 - assert result[1].id == 7 - assert result[2].id == 5 - assert result[3].id == 3 - assert result[4].id == 2 - assert result[5].id == 6 - assert result[6].id == 4 - assert result[7].id == 8 + results = sorted_query.all() + + assert [ + (result.name, result.count, result.id) for result in results + ] == [ + ('name_1', 5, 1), + ('name_1', 3, 3), + ('name_1', 2, 7), + ('name_1', 2, 5), + ('name_2', 10, 2), + ('name_4', 15, 6), + ('name_4', 12, 4), + ('name_5', 1, 8), + ] def test_multiple_models(self, session): - bar_1 = Bar(id=1, name='name_1', count=5) + bar_1 = Bar(id=1, name='name_1', count=15) bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=None) - bar_4 = Bar(id=4, name='name_1', count=12) + bar_3 = Bar(id=3, name='name_1', count=20) + bar_4 = Bar(id=4, name='name_1', count=10) qux_1 = Qux( - id=1, name='name_1', count=5, + id=1, name='name_1', count=15, created_at=datetime.date(2016, 7, 12), execution_time=datetime.datetime(2016, 7, 12, 1, 5, 9) ) @@ -185,11 +192,11 @@ def test_multiple_models(self, session): execution_time=datetime.datetime(2016, 7, 13, 2, 5, 9) ) qux_3 = Qux( - id=3, name='name_1', count=None, + id=3, name='name_1', count=10, created_at=None, execution_time=None ) qux_4 = Qux( - id=4, name='name_1', count=15, + id=4, name='name_1', count=20, created_at=datetime.date(2016, 7, 14), execution_time=datetime.datetime(2016, 7, 14, 3, 5, 9) ) @@ -202,42 +209,41 @@ def test_multiple_models(self, session): query = session.query(Bar).join(Qux, Bar.id == Qux.id) order_by = [ {'model': 'Bar', 'field': 'name', 'direction': 'asc'}, - {'model': 'Qux', 'field': 'count', 'direction': 'asc'} + {'model': 'Qux', 'field': 'count', 'direction': 'asc'}, ] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() + results = sorted_query.all() - assert len(result) == 4 - assert result[0].id == 3 - assert result[1].id == 1 - assert result[2].id == 4 - assert result[3].id == 2 + assert len(results) == 4 + assert results[0].id == 3 + assert results[1].id == 1 + assert results[2].id == 4 + assert results[3].id == 2 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') def test_a_single_dict_can_be_supplied_as_sort_spec(self, session): query = session.query(Bar) sort_spec = {'field': 'name', 'direction': 'desc'} sorted_query = apply_sort(query, sort_spec) - result = sorted_query.all() + results = sorted_query.all() - assert len(result) == 8 - assert result[0].id == 8 - assert result[1].id == 4 - assert result[2].id == 6 - assert result[3].id == 2 - assert result[4].id == 1 - assert result[5].id == 3 - assert result[6].id == 5 - assert result[7].id == 7 + assert [result.name for result in results] == [ + 'name_5', + 'name_4', 'name_4', + 'name_2', + 'name_1', 'name_1', 'name_1', 'name_1', + ] class TestAutoJoin: - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures( + 'multiple_bars_with_no_nulls_inserted', + 'multiple_foos_inserted' + ) def test_auto_join(self, session): - query = session.query(Foo) order_by = [ {'field': 'count', 'direction': 'desc'}, @@ -246,21 +252,26 @@ def test_auto_join(self, session): ] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 5 - assert result[1].id == 7 - assert result[2].id == 6 - assert result[3].id == 8 - assert result[4].id == 1 - assert result[5].id == 3 - assert result[6].id == 2 - assert result[7].id == 4 - - @pytest.mark.usefixtures('multiple_foos_inserted') - def test_noop_if_query_contains_named_models(self, session): + results = sorted_query.all() + + assert [ + (result.count, result.bar.name, result.id) for result in results + ] == [ + (2, 'name_1', 5), + (2, 'name_1', 7), + (2, 'name_4', 6), + (2, 'name_5', 8), + (1, 'name_1', 1), + (1, 'name_1', 3), + (1, 'name_2', 2), + (1, 'name_4', 4), + ] + @pytest.mark.usefixtures( + 'multiple_bars_with_no_nulls_inserted', + 'multiple_foos_inserted' + ) + def test_noop_if_query_contains_named_models(self, session): query = session.query(Foo).join(Bar) order_by = [ {'model': 'Foo', 'field': 'count', 'direction': 'desc'}, @@ -269,21 +280,26 @@ def test_noop_if_query_contains_named_models(self, session): ] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 5 - assert result[1].id == 7 - assert result[2].id == 6 - assert result[3].id == 8 - assert result[4].id == 1 - assert result[5].id == 3 - assert result[6].id == 2 - assert result[7].id == 4 - - @pytest.mark.usefixtures('multiple_foos_inserted') - def test_auto_join_to_invalid_model(self, session): + results = sorted_query.all() + + assert [ + (result.count, result.bar.name, result.id) for result in results + ] == [ + (2, 'name_1', 5), + (2, 'name_1', 7), + (2, 'name_4', 6), + (2, 'name_5', 8), + (1, 'name_1', 1), + (1, 'name_1', 3), + (1, 'name_2', 2), + (1, 'name_4', 4), + ] + @pytest.mark.usefixtures( + 'multiple_bars_with_no_nulls_inserted', + 'multiple_foos_inserted' + ) + def test_auto_join_to_invalid_model(self, session): query = session.query(Foo) order_by = [ {'model': 'Foo', 'field': 'count', 'direction': 'desc'}, @@ -296,9 +312,11 @@ def test_auto_join_to_invalid_model(self, session): assert 'The query does not contain model `Qux`.' == err.value.args[0] - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures( + 'multiple_bars_with_no_nulls_inserted', + 'multiple_foos_inserted' + ) def test_ambiguous_query(self, session): - query = session.query(Foo).join(Bar) order_by = [ {'field': 'count', 'direction': 'asc'}, # ambiguous @@ -309,9 +327,11 @@ def test_ambiguous_query(self, session): assert 'Ambiguous spec. Please specify a model.' == err.value.args[0] - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures( + 'multiple_bars_with_no_nulls_inserted', + 'multiple_foos_inserted' + ) def test_eager_load(self, session): - # behaves as if the joinedload wasn't present query = session.query(Foo).options(joinedload(Foo.bar)) order_by = [ @@ -321,14 +341,17 @@ def test_eager_load(self, session): ] sorted_query = apply_sort(query, order_by) - result = sorted_query.all() - - assert len(result) == 8 - assert result[0].id == 5 - assert result[1].id == 7 - assert result[2].id == 6 - assert result[3].id == 8 - assert result[4].id == 1 - assert result[5].id == 3 - assert result[6].id == 2 - assert result[7].id == 4 + results = sorted_query.all() + + assert [ + (result.count, result.bar.name, result.id) for result in results + ] == [ + (2, 'name_1', 5), + (2, 'name_1', 7), + (2, 'name_4', 6), + (2, 'name_5', 8), + (1, 'name_1', 1), + (1, 'name_1', 3), + (1, 'name_2', 2), + (1, 'name_4', 4), + ] diff --git a/tox.ini b/tox.ini index 2103bc7..77b829b 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,7 @@ usedevelop = true extras = dev mysql + postgresql deps = py27: funcsigs commands =