Skip to content

Commit

Permalink
[Backport 8.15] Drop Python 3.7 support (#2618)
Browse files Browse the repository at this point in the history
Signed-off-by: Tomasz Kłoczko <kloczek@github.com>
Co-authored-by: Quentin Pradet <quentin.pradet@elastic.co>
(cherry picked from commit dec4858)

Co-authored-by: Tomasz Kłoczko <31284574+kloczek@users.noreply.github.com>
  • Loading branch information
github-actions[bot] and kloczek committed Jul 31, 2024
1 parent 3a40e00 commit 0ec8d2f
Show file tree
Hide file tree
Showing 25 changed files with 35 additions and 60 deletions.
3 changes: 1 addition & 2 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ steps:
matrix:
setup:
python:
- "3.7"
- "3.8"
- "3.9"
- "3.10"
Expand All @@ -24,7 +23,7 @@ steps:
- "test"
adjustments:
- with:
python: "3.7"
python: "3.8"
connection: "urllib3"
nox_session: "test_otel"
- with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
nox-session: [""]
runs-on: ["ubuntu-latest"]

Expand Down
2 changes: 1 addition & 1 deletion docs/guide/getting-started.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ operations with it.
[discrete]
=== Requirements

* https://www.python.org/[Python] 3.7 or newer
* https://www.python.org/[Python] 3.8 or newer
* https://pip.pypa.io/en/stable/[`pip`], installed by default alongside Python

[discrete]
Expand Down
1 change: 0 additions & 1 deletion docs/sphinx/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down
2 changes: 1 addition & 1 deletion docs/sphinx/quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ operations like indexing or searching documents.
Requirements
------------

- `Python <https://www.python.org/>`_ 3.7 or newer
- `Python <https://www.python.org/>`_ 3.8 or newer
- `pip <https://pip.pypa.io/en/stable/>`_


Expand Down
8 changes: 2 additions & 6 deletions elasticsearch/_otel.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,7 @@

import contextlib
import os
from typing import TYPE_CHECKING, Generator, Mapping

if TYPE_CHECKING:
from typing import Literal
from typing import Generator, Literal, Mapping

try:
from opentelemetry import trace
Expand All @@ -48,8 +45,7 @@ def __init__(
self,
enabled: bool | None = None,
tracer: trace.Tracer | None = None,
# TODO import Literal at the top-level when dropping Python 3.7
body_strategy: 'Literal["omit", "raw"]' | None = None,
body_strategy: Literal["omit", "raw"] | None = None,
):
if enabled is None:
enabled = os.environ.get(ENABLED_ENV_VAR, "true") == "true"
Expand Down
2 changes: 1 addition & 1 deletion examples/bulk-ingest/bulk-ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def generate_actions():
yields a single document. This function is passed into the bulk()
helper to create many documents in sequence.
"""
with open(DATASET_PATH, mode="r") as f:
with open(DATASET_PATH) as f:
reader = csv.DictReader(f)

for row in reader:
Expand Down
4 changes: 2 additions & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ def pytest_argv():
]


@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"])
@nox.session(python=["3.8", "3.9", "3.10", "3.11", "3.12"])
def test(session):
session.install(".[dev]", env=INSTALL_ENV, silent=False)

session.run(*pytest_argv())


@nox.session(python=["3.7", "3.12"])
@nox.session(python=["3.8", "3.12"])
def test_otel(session):
session.install(
".[dev]",
Expand Down
5 changes: 1 addition & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ name = "elasticsearch"
description = "Python client for Elasticsearch"
readme = "README.md"
license = "Apache-2.0"
requires-python = ">=3.7"
requires-python = ">=3.8"
authors = [
{ name = "Elastic Client Library Maintainers", email = "client-libs@elastic.co" },
]
Expand All @@ -21,7 +21,6 @@ classifiers = [
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
Expand Down Expand Up @@ -72,8 +71,6 @@ dev = [
"simsimd",
"pandas",
"mapbox-vector-tile",
# Python 3.7 gets an old version of mapbox-vector-tile, requiring an old version of protobuf
"protobuf<4; python_version<=\"3.7\"",
]
docs = [
"sphinx",
Expand Down
2 changes: 0 additions & 2 deletions test_elasticsearch/test_async/test_server/test_clients.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import pytest

Expand Down
18 changes: 9 additions & 9 deletions test_elasticsearch/test_async/test_server/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@

class AsyncMock(MagicMock):
async def __call__(self, *args, **kwargs):
return super(AsyncMock, self).__call__(*args, **kwargs)
return super().__call__(*args, **kwargs)

def __await__(self):
return self().__await__()


class FailingBulkClient(object):
class FailingBulkClient:
def __init__(
self,
client,
Expand Down Expand Up @@ -68,7 +68,7 @@ def options(self, **_):
return self


class TestStreamingBulk(object):
class TestStreamingBulk:
async def test_actions_remain_unchanged(self, async_client):
actions = [{"_id": 1}, {"_id": 2}]
async for ok, item in helpers.async_streaming_bulk(
Expand Down Expand Up @@ -294,7 +294,7 @@ async def streaming_bulk():
assert 4 == failing_client._called


class TestBulk(object):
class TestBulk:
async def test_bulk_works_with_single_item(self, async_client):
docs = [{"answer": 42, "_id": 1}]
success, failed = await helpers.async_bulk(
Expand Down Expand Up @@ -458,7 +458,7 @@ async def scan_teardown(async_client):
await async_client.clear_scroll(scroll_id="_all")


class TestScan(object):
class TestScan:
async def test_order_can_be_preserved(self, async_client, scan_teardown):
bulk = []
for x in range(100):
Expand Down Expand Up @@ -493,8 +493,8 @@ async def test_all_documents_are_read(self, async_client, scan_teardown):
]

assert 100 == len(docs)
assert set(map(str, range(100))) == set(d["_id"] for d in docs)
assert set(range(100)) == set(d["_source"]["answer"] for d in docs)
assert set(map(str, range(100))) == {d["_id"] for d in docs}
assert set(range(100)) == {d["_source"]["answer"] for d in docs}

async def test_scroll_error(self, async_client, scan_teardown):
bulk = []
Expand Down Expand Up @@ -881,7 +881,7 @@ async def reindex_setup(async_client):
yield


class TestReindex(object):
class TestReindex:
async def test_reindex_passes_kwargs_to_scan_and_bulk(
self, async_client, reindex_setup
):
Expand Down Expand Up @@ -1031,7 +1031,7 @@ async def reindex_data_stream_setup(async_client):
yield


class TestAsyncDataStreamReindex(object):
class TestAsyncDataStreamReindex:
@pytest.mark.parametrize("op_type", [None, "create"])
async def test_reindex_index_datastream(
self, op_type, async_client, reindex_data_stream_setup
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,9 +228,9 @@ async def _feature_enabled(self, name):
if XPACK_FEATURES is None:
try:
xinfo = await self.client.xpack.info()
XPACK_FEATURES = set(
XPACK_FEATURES = {
f for f in xinfo["features"] if xinfo["features"][f]["enabled"]
)
}
IMPLEMENTED_FEATURES.add("xpack")
except RequestError:
XPACK_FEATURES = set()
Expand Down
4 changes: 1 addition & 3 deletions test_elasticsearch/test_async/test_transport.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import asyncio
import re
Expand Down Expand Up @@ -280,7 +278,7 @@ def test_kwargs_passed_on_to_node_pool(self):
)
assert dt is client.transport.node_pool.dead_node_backoff_factor

class MyConnection(object):
class MyConnection:
def __init__(self, *_, **__):
pass

Expand Down
1 change: 0 additions & 1 deletion test_elasticsearch/test_client/test_options.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down
1 change: 0 additions & 1 deletion test_elasticsearch/test_client/test_overrides.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down
2 changes: 0 additions & 2 deletions test_elasticsearch/test_client/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

from elasticsearch._sync.client.utils import _quote

Expand Down
3 changes: 1 addition & 2 deletions test_elasticsearch/test_helpers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down Expand Up @@ -75,7 +74,7 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk):
chunk_size=2,
)
)
assert len(set([r[1] for r in results])) > 1
assert len({r[1] for r in results}) > 1


class TestChunkActions:
Expand Down
1 change: 0 additions & 1 deletion test_elasticsearch/test_serializer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down
1 change: 0 additions & 1 deletion test_elasticsearch/test_server/test_clients.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down
6 changes: 3 additions & 3 deletions test_elasticsearch/test_server/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from elasticsearch.helpers import ScanError


class FailingBulkClient(object):
class FailingBulkClient:
def __init__(
self,
client,
Expand Down Expand Up @@ -463,8 +463,8 @@ def test_all_documents_are_read(sync_client):
docs = list(helpers.scan(sync_client, index="test_index", size=2))

assert 100 == len(docs)
assert set(map(str, range(100))) == set(d["_id"] for d in docs)
assert set(range(100)) == set(d["_source"]["answer"] for d in docs)
assert set(map(str, range(100))) == {d["_id"] for d in docs}
assert set(range(100)) == {d["_source"]["answer"] for d in docs}


@pytest.mark.usefixtures("scan_teardown")
Expand Down
12 changes: 4 additions & 8 deletions test_elasticsearch/test_server/test_rest_api_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import json
import os
import re
import sys
import warnings
import zipfile
from typing import Tuple, Union
Expand Down Expand Up @@ -131,10 +130,7 @@

XPACK_FEATURES = None
ES_VERSION = None
RUN_ASYNC_REST_API_TESTS = (
sys.version_info >= (3, 8)
and os.environ.get("PYTHON_CONNECTION_CLASS") == "requests"
)
RUN_ASYNC_REST_API_TESTS = os.environ.get("PYTHON_CONNECTION_CLASS") == "requests"

FALSEY_VALUES = ("", None, False, 0, 0.0)

Expand Down Expand Up @@ -456,7 +452,7 @@ def _resolve(self, value):
if isinstance(value, string_types):
value = value.strip()
elif isinstance(value, dict):
value = dict((k, self._resolve(v)) for (k, v) in value.items())
value = {k: self._resolve(v) for (k, v) in value.items()}
elif isinstance(value, list):
value = list(map(self._resolve, value))
return value
Expand Down Expand Up @@ -495,9 +491,9 @@ def _feature_enabled(self, name):
if XPACK_FEATURES is None:
try:
xinfo = self.client.xpack.info()
XPACK_FEATURES = set(
XPACK_FEATURES = {
f for f in xinfo["features"] if xinfo["features"][f]["enabled"]
)
}
IMPLEMENTED_FEATURES.add("xpack")
except RequestError:
XPACK_FEATURES = set()
Expand Down
3 changes: 1 addition & 2 deletions test_elasticsearch/test_transport.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand Down Expand Up @@ -311,7 +310,7 @@ def test_kwargs_passed_on_to_node_pool(self):
assert dt is client.transport.node_pool.dead_node_backoff_factor

def test_custom_node_class(self):
class MyConnection(object):
class MyConnection:
def __init__(self, *_, **__):
pass

Expand Down
2 changes: 1 addition & 1 deletion utils/build-dists.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def run(*argv, expect_exit_code=0):
else:
os.chdir(tmp_dir)

cmd = " ".join(shlex.quote(x) for x in argv)
cmd = shlex.join(argv)
print("$ " + cmd)
exit_code = os.system(cmd)
if exit_code != expect_exit_code:
Expand Down
2 changes: 1 addition & 1 deletion utils/bump-version.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
def find_and_replace(path, pattern, replace):
# Does a find and replace within a file path and complains
# if the given pattern isn't found in the file.
with open(path, "r") as f:
with open(path) as f:
old_data = f.read()

if re.search(pattern, old_data, flags=re.MULTILINE) is None:
Expand Down
4 changes: 2 additions & 2 deletions utils/license-headers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]:
def does_file_need_fix(filepath: str) -> bool:
if not re.search(r"\.pyi?$", filepath):
return False
with open(filepath, mode="r") as f:
with open(filepath) as f:
first_license_line = None
for line in f:
if line == license_header_lines[0]:
Expand All @@ -83,7 +83,7 @@ def does_file_need_fix(filepath: str) -> bool:


def add_header_to_file(filepath: str) -> None:
with open(filepath, mode="r") as f:
with open(filepath) as f:
lines = list(f)
i = 0
for i, line in enumerate(lines):
Expand Down

0 comments on commit 0ec8d2f

Please sign in to comment.