From 78d6c9b096504851043fd0d10a15b4ead036e6fb Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 12 Jan 2023 13:44:41 -0500 Subject: [PATCH] sigstore, test: relax timeout on TUF retrieval (#432) * sigstore, test: relax timeout on TUF retrieval Fixes #431. Signed-off-by: William Woodruff * CHANGELOG: record changes Signed-off-by: William Woodruff Signed-off-by: William Woodruff --- CHANGELOG.md | 6 ++++++ sigstore/_internal/tuf.py | 17 ++++++++++++++--- test/unit/conftest.py | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04cfedf63..6750595a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,12 @@ All versions prior to 0.9.0 are untracked. * `sigstore.transparency.RekorEntryMissing` is now `LogEntryMissing` ([#414](https://github.com/sigstore/sigstore-python/pull/414)) +### Fixed + +* The TUF network timeout has been relaxed from 4 seconds to 30 seconds, + which should reduce the likelihood of spurious timeout errors in environments + like GitHub Actions ([#432](https://github.com/sigstore/sigstore-python/pull/432)) + ## [0.10.0] ### Added diff --git a/sigstore/_internal/tuf.py b/sigstore/_internal/tuf.py index 0fba01733..bb855dbb4 100644 --- a/sigstore/_internal/tuf.py +++ b/sigstore/_internal/tuf.py @@ -19,12 +19,14 @@ from __future__ import annotations import logging +from functools import lru_cache from pathlib import Path from urllib import parse import appdirs from cryptography.x509 import Certificate, load_pem_x509_certificate from tuf.ngclient import Updater +from tuf.ngclient._internal.requests_fetcher import RequestsFetcher from sigstore._utils import read_embedded @@ -33,8 +35,17 @@ DEFAULT_TUF_URL = "https://sigstore-tuf-root.storage.googleapis.com/" STAGING_TUF_URL = "https://tuf-root-staging.storage.googleapis.com/" -# for tests to override -_fetcher = None + +@lru_cache() +def _get_fetcher() -> RequestsFetcher: + # NOTE: We poke into the underlying fetcher here to set a more reasonable timeout. + # The default timeout is 4 seconds, which can cause spurious timeout errors on + # CI systems like GitHub Actions (where traffic may be delayed/deprioritized due + # to network load). + fetcher = RequestsFetcher() + fetcher.socket_timeout = 30 + + return fetcher def _get_dirs(url: str) -> tuple[Path, Path]: @@ -119,7 +130,7 @@ def _setup(self) -> Updater: metadata_base_url=self._repo_url, target_base_url=parse.urljoin(f"{self._repo_url}/", "targets/"), target_dir=str(self._targets_dir), - fetcher=_fetcher, + fetcher=_get_fetcher(), ) # NOTE: we would like to avoid refresh if the toplevel metadata is valid. diff --git a/test/unit/conftest.py b/test/unit/conftest.py index a84ddc1a9..1e8a6aa3a 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -160,7 +160,7 @@ def _fetch(self, url: str) -> Iterator[bytes]: failure[filename] += 1 raise DownloadHTTPError("File not found", 404) - monkeypatch.setattr(tuf, "_fetcher", MockFetcher()) + monkeypatch.setattr(tuf, "_get_fetcher", lambda: MockFetcher()) return success, failure