diff --git a/betty/_package/pyinstaller/main.py b/betty/_package/pyinstaller/main.py index 56d078218..08182db48 100644 --- a/betty/_package/pyinstaller/main.py +++ b/betty/_package/pyinstaller/main.py @@ -1,5 +1,4 @@ import sys -from multiprocessing import freeze_support from betty.app import App from betty.asyncio import sync @@ -12,7 +11,6 @@ async def main() -> None: """ Launch Betty for PyInstaller builds. """ - freeze_support() async with App() as app: qapp = BettyApplication([sys.argv[0]], app=app) window = WelcomeWindow(app) diff --git a/betty/app/__init__.py b/betty/app/__init__.py index 9a59cea7e..def7cba16 100644 --- a/betty/app/__init__.py +++ b/betty/app/__init__.py @@ -3,9 +3,7 @@ from __future__ import annotations import operator -import os as stdos import weakref -from collections.abc import Callable from contextlib import suppress from functools import reduce from graphlib import CycleError, TopologicalSorter @@ -157,21 +155,6 @@ def _unreduce(cls, dumped_app_configuration: VoidableDump, project: Project) -> project, ) - def __reduce__(self) -> tuple[ - Callable[[VoidableDump, Project], Self], - tuple[ - VoidableDump, - Project, - ], - ]: - return ( - App._unreduce, - ( - self._configuration.dump(), - self._project, - ), - ) - async def __aenter__(self) -> Self: await self.start() return self @@ -341,17 +324,6 @@ def renderer(self) -> Renderer: def renderer(self) -> None: self._renderer = None - @property - def concurrency(self) -> int: - with suppress(KeyError): - return int(stdos.environ['BETTY_CONCURRENCY']) - # Assume that any machine that runs Betty has at least two CPU cores. - return stdos.cpu_count() or 2 - - @property - def async_concurrency(self) -> int: - return self.concurrency ** 2 - @property def http_client(self) -> aiohttp.ClientSession: if not self._http_client: diff --git a/betty/assets/betty.pot b/betty/assets/betty.pot index 1f69fb0ed..ce1ceb1ae 100644 --- a/betty/assets/betty.pot +++ b/betty/assets/betty.pot @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: Betty VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-02-17 18:26+0000\n" +"POT-Creation-Date: 2024-02-24 13:08+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -410,10 +410,7 @@ msgstr "" msgid "Generate site" msgstr "" -msgid "Generated pages for {count} {entity_type} in {locale}." -msgstr "" - -msgid "Generated the listing page for {entity_type} in {locale}." +msgid "Generated {completed_job_count} out of {total_job_count} items ({completed_job_percentage}%)." msgstr "" msgid "Generating JSON Schema..." diff --git a/betty/assets/locale/de-DE/betty.po b/betty/assets/locale/de-DE/betty.po index 7f80ec55b..cbd0e099d 100644 --- a/betty/assets/locale/de-DE/betty.po +++ b/betty/assets/locale/de-DE/betty.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: Betty VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-02-17 18:26+0000\n" +"POT-Creation-Date: 2024-02-24 13:08+0000\n" "PO-Revision-Date: 2024-02-08 13:24+0000\n" "Last-Translator: Bart Feenstra \n" "Language: de\n" @@ -574,11 +574,10 @@ msgstr "Seiten zur Auflistung von Entitäten generieren" msgid "Generate site" msgstr "Seite erzeugen" -msgid "Generated pages for {count} {entity_type} in {locale}." -msgstr "Generierte Seiten für {count} {entity_type} in {locale}." - -msgid "Generated the listing page for {entity_type} in {locale}." -msgstr "Erzeugt die Auflistungsseite für {entity_type} in {locale}." +msgid "" +"Generated {completed_job_count} out of {total_job_count} items " +"({completed_job_percentage}%)." +msgstr "" msgid "Generating JSON Schema..." msgstr "Generiere JSON Schema..." diff --git a/betty/assets/locale/fr-FR/betty.po b/betty/assets/locale/fr-FR/betty.po index 897c1e2dc..44afa4cb3 100644 --- a/betty/assets/locale/fr-FR/betty.po +++ b/betty/assets/locale/fr-FR/betty.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-02-17 18:26+0000\n" +"POT-Creation-Date: 2024-02-24 13:08+0000\n" "PO-Revision-Date: 2024-02-08 13:24+0000\n" "Last-Translator: Bart Feenstra \n" "Language: fr\n" @@ -489,10 +489,9 @@ msgstr "" msgid "Generate site" msgstr "" -msgid "Generated pages for {count} {entity_type} in {locale}." -msgstr "" - -msgid "Generated the listing page for {entity_type} in {locale}." +msgid "" +"Generated {completed_job_count} out of {total_job_count} items " +"({completed_job_percentage}%)." msgstr "" msgid "Generating JSON Schema..." diff --git a/betty/assets/locale/nl-NL/betty.po b/betty/assets/locale/nl-NL/betty.po index 49b3bad4b..2814771c2 100644 --- a/betty/assets/locale/nl-NL/betty.po +++ b/betty/assets/locale/nl-NL/betty.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-02-17 18:26+0000\n" +"POT-Creation-Date: 2024-02-24 13:08+0000\n" "PO-Revision-Date: 2024-02-11 15:31+0000\n" "Last-Translator: Bart Feenstra \n" "Language: nl\n" @@ -567,13 +567,12 @@ msgstr "Genereer pagina's met entiteitsoverzichten" msgid "Generate site" msgstr "Genereer site" -msgid "Generated pages for {count} {entity_type} in {locale}." -msgstr "Pagina's voor {count} {entity_type} in het {locale} gegenereerd." - -msgid "Generated the listing page for {entity_type} in {locale}." +msgid "" +"Generated {completed_job_count} out of {total_job_count} items " +"({completed_job_percentage}%)." msgstr "" -"Pagina met het overzicht van alle {entity_type} in het {locale} " -"gegenereerd." +"{completed_job_count} van de {total_job_count} items gegenereerd" +"({completed_job_percentage}%)." msgid "Generating JSON Schema..." msgstr "JSON Schema aan het genereren..." diff --git a/betty/assets/locale/uk/betty.po b/betty/assets/locale/uk/betty.po index 3dba03c23..e080dea49 100644 --- a/betty/assets/locale/uk/betty.po +++ b/betty/assets/locale/uk/betty.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: Betty VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-02-17 18:26+0000\n" +"POT-Creation-Date: 2024-02-24 13:08+0000\n" "PO-Revision-Date: 2024-02-08 13:08+0000\n" "Last-Translator: Rainer Thieringer \n" "Language: uk\n" @@ -490,10 +490,9 @@ msgstr "" msgid "Generate site" msgstr "" -msgid "Generated pages for {count} {entity_type} in {locale}." -msgstr "" - -msgid "Generated the listing page for {entity_type} in {locale}." +msgid "" +"Generated {completed_job_count} out of {total_job_count} items " +"({completed_job_percentage}%)." msgstr "" msgid "Generating JSON Schema..." diff --git a/betty/concurrent.py b/betty/concurrent.py index d595830ca..c25bdaad3 100644 --- a/betty/concurrent.py +++ b/betty/concurrent.py @@ -10,7 +10,7 @@ class RateLimiter: """ - Rate-limit tasks. + Rate-limit operations. This class implements the `Token Bucket algorithm `_. """ diff --git a/betty/extension/cotton_candy/__init__.py b/betty/extension/cotton_candy/__init__.py index b642eadb1..46dbba44c 100644 --- a/betty/extension/cotton_candy/__init__.py +++ b/betty/extension/cotton_candy/__init__.py @@ -24,7 +24,7 @@ from betty.functools import walk from betty.generate import Generator, GenerationContext from betty.gui import GuiBuilder -from betty.jinja2 import Jinja2Provider, context_app, context_localizer, context_task_context +from betty.jinja2 import Jinja2Provider, context_app, context_localizer, context_job_context from betty.locale import Date, Str, Datey from betty.model import Entity, UserFacingEntity, GeneratedEntityId from betty.model.ancestry import Event, Person, Presence, is_public, Subject @@ -228,7 +228,7 @@ async def _copy_npm_build(self, source_directory_path: Path, destination_directo await asyncio.to_thread(copy2, source_directory_path / 'cotton_candy.css', destination_directory_path / 'cotton_candy.css') await asyncio.to_thread(copy2, source_directory_path / 'cotton_candy.js', destination_directory_path / 'cotton_candy.js') - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: assets_directory_path = await self.app.extensions[_Npm].ensure_assets(self) await makedirs(self.app.project.configuration.www_directory_path, exist_ok=True) await self._copy_npm_build(assets_directory_path, self.app.project.configuration.www_directory_path) @@ -238,7 +238,7 @@ async def generate(self, task_context: GenerationContext) -> None: async def _global_search_index(context: Context) -> AsyncIterable[dict[str, str]]: return Index( context_app(context), - context_task_context(context), + context_job_context(context), context_localizer(context), ).build() diff --git a/betty/extension/cotton_candy/search.py b/betty/extension/cotton_candy/search.py index f50780527..cfad01358 100644 --- a/betty/extension/cotton_candy/search.py +++ b/betty/extension/cotton_candy/search.py @@ -11,18 +11,18 @@ from betty.model import get_entity_type_name, Entity from betty.model.ancestry import Person, Place, File from betty.string import camel_case_to_snake_case -from betty.task import Context +from betty.job import Context class Index: def __init__( self, app: App, - task_context: Context | None, + job_context: Context | None, localizer: Localizer, ): self._app = app - self._task_context = task_context + self._job_context = job_context self._localizer = localizer async def build(self) -> AsyncIterable[dict[str, str]]: @@ -57,7 +57,7 @@ async def _render_entity(self, entity: Entity) -> str: f'search/result-{camel_case_to_snake_case(entity_type_name)}.html.j2', 'search/result.html.j2', ]).render_async({ - 'task_context': self._task_context, + 'job_context': self._job_context, 'localizer': self._localizer, 'entity': entity, }) diff --git a/betty/extension/http_api_doc/__init__.py b/betty/extension/http_api_doc/__init__.py index 83d78466a..05e1801ab 100644 --- a/betty/extension/http_api_doc/__init__.py +++ b/betty/extension/http_api_doc/__init__.py @@ -29,7 +29,7 @@ async def npm_build(self, working_directory_path: Path, assets_directory_path: P def npm_cache_scope(cls) -> CacheScope: return CacheScope.BETTY - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: assets_directory_path = await self.app.extensions[_Npm].ensure_assets(self) await makedirs(self.app.project.configuration.www_directory_path, exist_ok=True) await asyncio.to_thread(copy2, assets_directory_path / 'http-api-doc.js', self.app.project.configuration.www_directory_path / 'http-api-doc.js') diff --git a/betty/extension/maps/__init__.py b/betty/extension/maps/__init__.py index 0c68b19f7..81d4e6f49 100644 --- a/betty/extension/maps/__init__.py +++ b/betty/extension/maps/__init__.py @@ -39,7 +39,7 @@ async def _copy_npm_build(self, source_directory_path: Path, destination_directo def npm_cache_scope(cls) -> CacheScope: return CacheScope.BETTY - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: assets_directory_path = await self.app.extensions[_Npm].ensure_assets(self) await makedirs(self.app.project.configuration.www_directory_path, exist_ok=True) await self._copy_npm_build(assets_directory_path, self.app.project.configuration.www_directory_path) diff --git a/betty/extension/nginx/__init__.py b/betty/extension/nginx/__init__.py index 93a8af903..0db98877b 100644 --- a/betty/extension/nginx/__init__.py +++ b/betty/extension/nginx/__init__.py @@ -103,7 +103,7 @@ def servers(self) -> Sequence[Server]: return [DockerizedNginxServer(self._app)] return [] - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: await generate_configuration_file(self._app) await generate_dockerfile_file(self._app) diff --git a/betty/extension/nginx/serve.py b/betty/extension/nginx/serve.py index 5c0f1195b..5288e4612 100644 --- a/betty/extension/nginx/serve.py +++ b/betty/extension/nginx/serve.py @@ -5,7 +5,6 @@ from pathlib import Path from typing import Any -import dill import docker from aiofiles.tempfile import TemporaryDirectory, AiofilesContextManagerTempDir from docker.errors import DockerException @@ -13,21 +12,27 @@ from betty.app import App from betty.extension.nginx.artifact import generate_dockerfile_file, generate_configuration_file from betty.extension.nginx.docker import Container +from betty.project import Project from betty.serve import NoPublicUrlBecauseServerNotStartedError, AppServer class DockerizedNginxServer(AppServer): def __init__(self, app: App) -> None: - super().__init__( - # Create a new app so we can modify it later. - dill.loads(dill.dumps(app)) - ) + from betty.extension import Nginx + + project = Project(ancestry=app.project.ancestry) + project.configuration.autowrite = False + project.configuration.configuration_file_path = app.project.configuration.configuration_file_path + project.configuration.update(app.project.configuration) + project.configuration.debug = True + app = App(app.configuration, project) + # Work around https://github.com/bartfeenstra/betty/issues/1056. + app.extensions[Nginx].configuration.https = False + super().__init__(app) self._container: Container | None = None self._output_directory: AiofilesContextManagerTempDir[None, Any, Any] | None = None async def start(self) -> None: - from betty.extension import Nginx - await super().start() logging.getLogger(__name__).info('Starting a Dockerized nginx web server...') self._output_directory = TemporaryDirectory() @@ -36,10 +41,6 @@ async def start(self) -> None: docker_directory_path = Path(output_directory_name) dockerfile_file_path = docker_directory_path / 'Dockerfile' - self._app.project.configuration.debug = True - # Work around https://github.com/bartfeenstra/betty/issues/1056. - self._app.extensions[Nginx].configuration.https = False - await generate_configuration_file( self._app, destination_file_path=nginx_configuration_file_path, diff --git a/betty/extension/trees/__init__.py b/betty/extension/trees/__init__.py index bead7c731..d82d43c93 100644 --- a/betty/extension/trees/__init__.py +++ b/betty/extension/trees/__init__.py @@ -36,7 +36,7 @@ async def _copy_npm_build(self, source_directory_path: Path, destination_directo def npm_cache_scope(cls) -> CacheScope: return CacheScope.BETTY - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: assets_directory_path = await self.app.extensions[_Npm].ensure_assets(self) await self._copy_npm_build(assets_directory_path, self.app.project.configuration.www_directory_path) diff --git a/betty/generate.py b/betty/generate.py index f59a68592..5cbae72f5 100644 --- a/betty/generate.py +++ b/betty/generate.py @@ -6,26 +6,20 @@ import asyncio import json import logging -import multiprocessing import os -import queue import shutil -import threading -from concurrent.futures import ProcessPoolExecutor, Executor, Future, as_completed +from asyncio import create_task, Task, as_completed, Semaphore, CancelledError, sleep +from collections.abc import AsyncIterator from contextlib import suppress -from ctypes import c_char_p -from multiprocessing.managers import ValueProxy +from math import floor from pathlib import Path -from types import TracebackType -from typing import cast, AsyncContextManager, Self, Any, ParamSpec, Callable, Concatenate, MutableSequence +from typing import cast, AsyncContextManager, ParamSpec, Callable, Awaitable, Sequence import aiofiles -import dill from aiofiles.os import makedirs from aiofiles.threadpool.text import AsyncTextIOWrapper from betty.app import App -from betty.asyncio import sync, gather from betty.json.linked_data import LinkedDataDumpable from betty.json.schema import Schema from betty.locale import get_display_name @@ -34,9 +28,7 @@ from betty.openapi import Specification from betty.serde.dump import DictDump, Dump from betty.string import camel_case_to_kebab_case, camel_case_to_snake_case, upper_camel_case_to_lower_camel_case -from betty.task import Context - -_GenerationProcessPoolTaskP = ParamSpec('_GenerationProcessPoolTaskP') +from betty.job import Context def getLogger() -> logging.Logger: @@ -47,123 +39,26 @@ def getLogger() -> logging.Logger: class Generator: - async def generate(self, task_context: GenerationContext) -> None: + async def generate(self, job_context: GenerationContext) -> None: raise NotImplementedError(repr(self)) class GenerationContext(Context): def __init__(self, app: App): super().__init__() - self._pickled_app = multiprocessing.Manager().Value(c_char_p, dill.dumps(app)) - self._unpickle_app_lock: threading.Lock = multiprocessing.Manager().Lock() - self._app: App | None = None - - def __getstate__(self) -> tuple[threading.Lock, MutableSequence[str], ValueProxy[bytes]]: - return self._claims_lock, self._claimed_task_ids, self._pickled_app - - def __setstate__(self, state: tuple[threading.Lock, MutableSequence[str], ValueProxy[bytes]]) -> None: - self._claims_lock, self._claimed_task_ids, self._pickled_app = state - self._unpickle_app_lock = multiprocessing.Manager().Lock() - self._app = None + self._app = app @property def app(self) -> App: - with self._unpickle_app_lock: - if self._app is None: - self._app = cast(App, dill.loads(self._pickled_app.value)) return self._app -class _GenerationProcessPool: - def __init__(self, app: App, task_context: GenerationContext): - self._app = app - self._task_context = task_context - self._queue = multiprocessing.Manager().Queue() - self._cancel = multiprocessing.Manager().Event() - self._finish = multiprocessing.Manager().Event() - self._executor: Executor | None = None - self._workers: list[Future[None]] = [] - - async def __aenter__(self) -> Self: - self._executor = ProcessPoolExecutor(max_workers=self._app.concurrency) - for _ in range(0, self._app.concurrency): - self._workers.append(self._executor.submit(_GenerationProcessPoolWorker( - self._queue, - self._cancel, - self._finish, - self._app.concurrency, - self._task_context, - ))) - return self - - async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> None: - assert self._executor is not None - if exc_val is None: - self._finish.set() - else: - self._cancel.set() - try: - for worker in as_completed(self._workers): - worker.result() - except BaseException: - self._cancel.set() - raise - finally: - self._executor.shutdown() - - def delegate( - self, - task_callable: Callable[Concatenate[GenerationContext, _GenerationProcessPoolTaskP], Any], - *task_args: _GenerationProcessPoolTaskP.args, - **task_kwargs: _GenerationProcessPoolTaskP.kwargs, - ) -> None: - self._queue.put((task_callable, task_args, task_kwargs)) - - -class _GenerationProcessPoolWorker: - def __init__( - self, - task_queue: queue.Queue[tuple[Callable[Concatenate[GenerationContext, _GenerationProcessPoolTaskP], Any], _GenerationProcessPoolTaskP.args, _GenerationProcessPoolTaskP.kwargs]], - cancel: threading.Event, - finish: threading.Event, - async_concurrency: int, - task_context: GenerationContext, - ): - self._task_queue = task_queue - self._cancel = cancel - self._finish = finish - self._async_concurrency = async_concurrency - self._context = task_context - - @sync - async def __call__(self) -> None: - async with self._context.app: - await gather(*( - self._perform_tasks() - for _ in range(0, self._async_concurrency) - )) - - async def _perform_tasks(self) -> None: - while not self._cancel.is_set(): - try: - task_callable, task_args, task_kwargs = self._task_queue.get_nowait() - except queue.Empty: - if self._finish.is_set(): - return - else: - await task_callable( - self._context, - *task_args, - **task_kwargs, - ) - - async def generate(app: App) -> None: """ Generate a new site. """ logger = getLogger() - task_context = GenerationContext(app) + job_context = GenerationContext(app) with suppress(FileNotFoundError): await asyncio.to_thread(shutil.rmtree, app.project.configuration.output_directory_path) @@ -172,45 +67,14 @@ async def generate(app: App) -> None: # The static public assets may be overridden depending on the number of locales rendered, so ensure they are # generated before anything else. - await _generate_static_public(app, task_context) + await _generate_static_public(app, job_context) - locales = app.project.configuration.locales - - async with _GenerationProcessPool(app, task_context) as process_pool: - process_pool.delegate(_generate_dispatch) - process_pool.delegate(_generate_sitemap) - process_pool.delegate(_generate_json_schema) - process_pool.delegate(_generate_openapi) - - for locale in locales: - process_pool.delegate(_generate_public, locale) - - for entity_type in app.entity_types: - if not issubclass(entity_type, UserFacingEntity): - continue - if app.project.configuration.entity_types[entity_type].generate_html_list: - for locale in locales: - process_pool.delegate(_generate_entity_type_list_html, locale, entity_type) - process_pool.delegate(_generate_entity_type_list_json, entity_type) - for entity in app.project.ancestry[entity_type]: - if isinstance(entity.id, GeneratedEntityId): - continue - - process_pool.delegate(_generate_entity_json, entity_type, entity.id) - if is_public(entity): - for locale in locales: - process_pool.delegate(_generate_entity_html, locale, entity_type, entity.id) - - # Log the generated pages. - for locale in app.project.configuration.locales: - locale_label = get_display_name(locale, app.localizer.locale) - for entity_type in app.entity_types: - if issubclass(entity_type, UserFacingEntity): - logger.info(app.localizer._('Generated pages for {count} {entity_type} in {locale}.').format( - count=len(app.project.ancestry[entity_type]), - entity_type=entity_type.entity_type_label_plural().localize(app.localizer), - locale=locale_label, - )) + jobs = [job async for job in _run_jobs(app, job_context)] + log_job = create_task(_log_jobs_forever(app, jobs)) + for completed_job in as_completed(jobs): + await completed_job + log_job.cancel() + await _log_jobs(app, jobs) os.chmod(app.project.configuration.output_directory_path, 0o755) for directory_path_str, subdirectory_names, file_names in os.walk(app.project.configuration.output_directory_path): @@ -221,6 +85,63 @@ async def generate(app: App) -> None: os.chmod(directory_path / file_name, 0o644) +async def _log_jobs(app: App, jobs: Sequence[Task[None]]) -> None: + total_job_count = len(jobs) + completed_job_count = len([job for job in jobs if job.done()]) + getLogger().info(app.localizer._( + 'Generated {completed_job_count} out of {total_job_count} items ({completed_job_percentage}%).').format( + completed_job_count=completed_job_count, + total_job_count=total_job_count, + completed_job_percentage=floor(completed_job_count / (total_job_count / 100)), + )) + + +async def _log_jobs_forever(app: App, jobs: Sequence[Task[None]]) -> None: + with suppress(CancelledError): + while True: + await sleep(5) + await _log_jobs(app, jobs) + + +_JobP = ParamSpec('_JobP') + + +def _run_job(semaphore: Semaphore, f: Callable[_JobP, Awaitable[None]], *args: _JobP.args, **kwargs: _JobP.kwargs) -> Task[None]: + async def _job(): + async with semaphore: + await f(*args, **kwargs) + return create_task(_job()) + + +async def _run_jobs(app: App, job_context: GenerationContext) -> AsyncIterator[Task[None]]: + semaphore = Semaphore(512) + yield _run_job(semaphore, _generate_dispatch, job_context) + yield _run_job(semaphore, _generate_sitemap, job_context) + yield _run_job(semaphore, _generate_json_schema, job_context) + yield _run_job(semaphore, _generate_openapi, job_context) + + locales = app.project.configuration.locales + + for locale in locales: + yield _run_job(semaphore, _generate_public, job_context, locale) + + for entity_type in app.entity_types: + if not issubclass(entity_type, UserFacingEntity): + continue + if app.project.configuration.entity_types[entity_type].generate_html_list: + for locale in locales: + yield _run_job(semaphore, _generate_entity_type_list_html, job_context, locale, entity_type) + yield _run_job(semaphore, _generate_entity_type_list_json, job_context, entity_type) + for entity in app.project.ancestry[entity_type]: + if isinstance(entity.id, GeneratedEntityId): + continue + + yield _run_job(semaphore, _generate_entity_json, job_context, entity_type, entity.id) + if is_public(entity): + for locale in locales: + yield _run_job(semaphore, _generate_entity_html, job_context, locale, entity_type, entity.id) + + async def create_file(path: Path) -> AsyncContextManager[AsyncTextIOWrapper]: """ Create the file for a resource. @@ -244,47 +165,47 @@ async def create_json_resource(path: Path) -> AsyncContextManager[AsyncTextIOWra async def _generate_dispatch( - task_context: GenerationContext, + job_context: GenerationContext, ) -> None: - app = task_context.app - await app.dispatcher.dispatch(Generator)(task_context), + app = job_context.app + await app.dispatcher.dispatch(Generator)(job_context), async def _generate_public( - task_context: GenerationContext, + job_context: GenerationContext, locale: str, ) -> None: - app = task_context.app + app = job_context.app locale_label = get_display_name(locale, app.localizer.locale) - getLogger().info(app.localizer._('Generating localized public files in {locale}...').format( + getLogger().debug(app.localizer._('Generating localized public files in {locale}...').format( locale=locale_label, )) async for file_path in app.assets.copytree(Path('public') / 'localized', app.project.configuration.localize_www_directory_path(locale)): await app.renderer.render_file( file_path, - task_context=task_context, + job_context=job_context, localizer=app.localizers[locale], ) async def _generate_static_public( app: App, - task_context: Context, + job_context: Context, ) -> None: getLogger().info(app.localizer._('Generating static public files...')) async for file_path in app.assets.copytree(Path('public') / 'static', app.project.configuration.www_directory_path): await app.renderer.render_file( file_path, - task_context=task_context, + job_context=job_context, ) async def _generate_entity_type_list_html( - task_context: GenerationContext, + job_context: GenerationContext, locale: str, entity_type: type[Entity], ) -> None: - app = task_context.app + app = job_context.app entity_type_name_fs = camel_case_to_kebab_case(get_entity_type_name(entity_type)) entity_type_path = app.project.configuration.localize_www_directory_path(locale) / entity_type_name_fs template = app.jinja2_environment.select_template([ @@ -292,7 +213,7 @@ async def _generate_entity_type_list_html( 'entity/page-list.html.j2', ]) rendered_html = await template.render_async( - task_context=task_context, + job_context=job_context, localizer=app.localizers[locale], page_resource=f'/{entity_type_name_fs}/index.html', entity_type=entity_type, @@ -300,18 +221,13 @@ async def _generate_entity_type_list_html( ) async with await create_html_resource(entity_type_path) as f: await f.write(rendered_html) - locale_label = get_display_name(locale, app.localizer.locale) - getLogger().info(app.localizer._('Generated the listing page for {entity_type} in {locale}.').format( - entity_type=entity_type.entity_type_label_plural().localize(app.localizer), - locale=locale_label, - )) async def _generate_entity_type_list_json( - task_context: GenerationContext, + job_context: GenerationContext, entity_type: type[Entity & LinkedDataDumpable], ) -> None: - app = task_context.app + app = job_context.app entity_type_name = get_entity_type_name(entity_type) entity_type_name_fs = camel_case_to_kebab_case(get_entity_type_name(entity_type)) entity_type_path = app.project.configuration.www_directory_path / entity_type_name_fs @@ -332,12 +248,12 @@ async def _generate_entity_type_list_json( async def _generate_entity_html( - task_context: GenerationContext, + job_context: GenerationContext, locale: str, entity_type: type[Entity], entity_id: str, ) -> None: - app = task_context.app + app = job_context.app entity = app.project.ancestry[entity_type][entity_id] entity_type_name_fs = camel_case_to_kebab_case(get_entity_type_name(entity)) entity_path = app.project.configuration.localize_www_directory_path(locale) / entity_type_name_fs / entity.id @@ -345,7 +261,7 @@ async def _generate_entity_html( f'entity/page--{entity_type_name_fs}.html.j2', 'entity/page.html.j2', ]).render_async( - task_context=task_context, + job_context=job_context, localizer=app.localizers[locale], page_resource=entity, entity_type=entity.type, @@ -356,11 +272,11 @@ async def _generate_entity_html( async def _generate_entity_json( - task_context: GenerationContext, + job_context: GenerationContext, entity_type: type[Entity & LinkedDataDumpable], entity_id: str, ) -> None: - app = task_context.app + app = job_context.app entity_type_name_fs = camel_case_to_kebab_case(get_entity_type_name(entity_type)) entity_path = app.project.configuration.www_directory_path / entity_type_name_fs / entity_id entity = cast('Entity & LinkedDataDumpable', app.project.ancestry[entity_type][entity_id]) @@ -370,9 +286,9 @@ async def _generate_entity_json( async def _generate_sitemap( - task_context: GenerationContext, + job_context: GenerationContext, ) -> None: - app = task_context.app + app = job_context.app sitemap_template = app.jinja2_environment.get_template('sitemap.xml.j2') sitemaps = [] sitemap: list[str] = [] @@ -418,10 +334,10 @@ async def _generate_sitemap( async def _generate_json_schema( - task_context: GenerationContext, + job_context: GenerationContext, ) -> None: - app = task_context.app - getLogger().info(app.localizer._('Generating JSON Schema...')) + app = job_context.app + getLogger().debug(app.localizer._('Generating JSON Schema...')) schema = Schema(app) rendered_json = json.dumps(await schema.build()) async with await create_file(app.project.configuration.www_directory_path / 'schema.json') as f: @@ -429,10 +345,10 @@ async def _generate_json_schema( async def _generate_openapi( - task_context: GenerationContext, + job_context: GenerationContext, ) -> None: - app = task_context.app - getLogger().info(app.localizer._('Generating OpenAPI specification...')) + app = job_context.app + getLogger().debug(app.localizer._('Generating OpenAPI specification...')) api_directory_path = app.project.configuration.www_directory_path / 'api' rendered_json = json.dumps(await Specification(app).build()) async with await create_json_resource(api_directory_path) as f: diff --git a/betty/jinja2/__init__.py b/betty/jinja2/__init__.py index 3a97ed612..9fb189b9b 100644 --- a/betty/jinja2/__init__.py +++ b/betty/jinja2/__init__.py @@ -15,11 +15,11 @@ Template as Jinja2Template from jinja2.runtime import StrictUndefined, Context, DebugUndefined, new_context -from betty import task from betty.app import App from betty.html import CssProvider, JsProvider from betty.jinja2.filter import FILTERS from betty.jinja2.test import TESTS +from betty.job import Context as JobContext from betty.locale import Date, Localizer, \ DEFAULT_LOCALIZER from betty.model import Entity, get_entity_type, \ @@ -39,12 +39,12 @@ def context_app(context: Context) -> App: return cast(Environment, context.environment).app -def context_task_context(context: Context) -> task.Context | None: +def context_job_context(context: Context) -> JobContext | None: """ - Get the current task context from the Jinja2 context. + Get the current job context from the Jinja2 context. """ - task_context = context.resolve_or_missing('task_context') - return task_context if isinstance(task_context, task.Context) else None + job_context = context.resolve_or_missing('job_context') + return job_context if isinstance(job_context, JobContext) else None def context_localizer(context: Context) -> Localizer: @@ -281,13 +281,13 @@ async def render_file( self, file_path: Path, *, - task_context: task.Context | None = None, + job_context: JobContext | None = None, localizer: Localizer | None = None, ) -> Path: destination_file_path = file_path.parent / file_path.stem data: dict[str, Any] = {} - if task_context is not None: - data['task_context'] = task_context + if job_context is not None: + data['job_context'] = job_context if localizer is not None: data['localizer'] = localizer try: diff --git a/betty/jinja2/filter.py b/betty/jinja2/filter.py index fc82f3802..20091cfde 100644 --- a/betty/jinja2/filter.py +++ b/betty/jinja2/filter.py @@ -197,12 +197,12 @@ async def filter_file(context: Context, file: File) -> str: :return: The public path to the preprocessed file. This can be used on a web page. """ - from betty.jinja2 import context_app, context_task_context + from betty.jinja2 import context_app, context_job_context app = context_app(context) - task_context = context_task_context(context) - task_id = f'filter_file:{file.id}' - if task_context is None or task_context.claim(task_id): + job_context = context_job_context(context) + job_id = f'filter_file:{file.id}' + if job_context is None or job_context.claim(job_id): file_destination_path = app.project.configuration.www_directory_path / 'file' / file.id / 'file' / file.path.name await _do_filter_file(file.path, file_destination_path) @@ -226,10 +226,10 @@ async def filter_image( :return: The public path to the preprocessed file. This can be embedded in a web page. """ - from betty.jinja2 import context_app, context_task_context + from betty.jinja2 import context_app, context_job_context app = context_app(context) - task_context = context_task_context(context) + job_context = context_job_context(context) destination_name = '%s-' % file.id if height and width: @@ -257,8 +257,8 @@ async def filter_image( else: raise ValueError('Cannot convert a file without a media type to an image.') - task_id = f'filter_image:{file.id}:{width or ""}:{height or ""}' - if task_context is None or task_context.claim(task_id): + job_id = f'filter_image:{file.id}:{width or ""}:{height or ""}' + if job_context is None or job_context.claim(job_id): cache_directory_path = CACHE_DIRECTORY_PATH / 'image' await task_callable(file, cache_directory_path, file_directory_path, destination_name, width, height) diff --git a/betty/job.py b/betty/job.py new file mode 100644 index 000000000..ec0099c13 --- /dev/null +++ b/betty/job.py @@ -0,0 +1,20 @@ +""" +Provide utilities for running jobs concurrently. +""" + +from __future__ import annotations + +import threading + + +class Context: + def __init__(self): + self._claims_lock = threading.Lock() + self._claimed_job_ids: set[str] = set() + + def claim(self, job_id: str) -> bool: + with self._claims_lock: + if job_id in self._claimed_job_ids: + return False + self._claimed_job_ids.add(job_id) + return True diff --git a/betty/model/__init__.py b/betty/model/__init__.py index 459df4aa6..268153f85 100644 --- a/betty/model/__init__.py +++ b/betty/model/__init__.py @@ -1061,46 +1061,6 @@ def add_association( self._associations[owner_type][owner_attr_name][owner_id].append((associate_type, associate_id)) -class PickleableEntityGraph(_EntityGraphBuilder): - def __init__(self, *entities: Entity) -> None: - super().__init__() - self._pickled = False - for entity in entities: - self._entities[entity.type][entity.id] = entity - - def __getstate__(self) -> tuple[_EntityGraphBuilderEntities, _EntityGraphBuilderAssociations]: - self._flatten() - return self._entities, self._associations - - def __setstate__(self, state: tuple[_EntityGraphBuilderEntities, _EntityGraphBuilderAssociations]) -> None: - self._entities, self._associations = state - self._built = False - self._pickled = False - - def _flatten(self) -> None: - if self._pickled: - raise RuntimeError('This entity graph has been pickled already.') - self._pickled = True - - for owner in self._iter(): - unaliased_entity = unalias(owner) - entity_type = unaliased_entity.type - - for association in EntityTypeAssociationRegistry.get_all_associations(entity_type): - associates: Iterable[Entity] - if isinstance(association, ToOneEntityTypeAssociation): - associate = association.get(unaliased_entity) - if associate is None: - continue - associates = [associate] - else: - associates = association.get(unaliased_entity) - for associate in associates: - self._associations[entity_type][association.owner_attr_name][owner.id].append( - (associate.type, associate.id), - ) - - @contextmanager def record_added(entities: EntityCollection[EntityT]) -> Iterator[MultipleTypesEntityCollection[EntityT]]: """ diff --git a/betty/model/ancestry.py b/betty/model/ancestry.py index d563ede9c..d5eeb14aa 100644 --- a/betty/model/ancestry.py +++ b/betty/model/ancestry.py @@ -6,7 +6,6 @@ from collections.abc import MutableSequence from contextlib import suppress from enum import Enum -from functools import partial from pathlib import Path from reprlib import recursive_repr from typing import Iterable, Any, TYPE_CHECKING @@ -21,7 +20,7 @@ from betty.media_type import MediaType from betty.model import many_to_many, Entity, one_to_many, many_to_one, many_to_one_to_many, \ MultipleTypesEntityCollection, EntityCollection, UserFacingEntity, EntityTypeAssociationRegistry, \ - PickleableEntityGraph, GeneratedEntityId, get_entity_type_name + GeneratedEntityId, get_entity_type_name from betty.model.event_type import EventType, UnknownEventType from betty.serde.dump import DictDump, Dump, dump_default from betty.string import camel_case_to_kebab_case @@ -419,17 +418,6 @@ def __init__( if entity is not None: self.entity = entity - def __reduce__(self) -> Any: - return ( - partial( - Note, - self.text, - id=self.id, - privacy=self.privacy, - ), - (), - ) - @classmethod def entity_type_label(cls) -> Str: return Str._('Note') @@ -583,20 +571,6 @@ def __init__( ) self._path = path - def __reduce__(self) -> Any: - return ( - partial( - File, - self.path, - id=self.id, - media_type=self.media_type, - description=self.description, - privacy=self.privacy, - links=self.links, - ), - (), - ) - @property def entities(self) -> EntityCollection[Entity]: # type: ignore[empty-body] pass @@ -719,21 +693,6 @@ def __init__( if contains is not None: self.contains = contains # type: ignore[assignment] - def __reduce__(self) -> Any: - return ( - partial( - Source, - self.name, - id=self.id, - author=self.author, - publisher=self.publisher, - date=self.date, - links=self.links, - privacy=self.privacy, - ), - (), - ) - def _get_effective_privacy(self) -> Privacy: privacy = super()._get_effective_privacy() if self.contained_by: @@ -877,18 +836,6 @@ def __init__( self.location = location self.source = source - def __reduce__(self) -> Any: - return ( - partial( - Citation, - id=self.id, - location=self.location, - date=self.date, - privacy=self.privacy, - ), - (), - ) - def _get_effective_privacy(self) -> Privacy: privacy = super()._get_effective_privacy() if self.source: @@ -1030,12 +977,6 @@ def __init__( self.encloses = encloses self.enclosed_by = enclosed_by - def __reduce__(self) -> Any: - return ( - Enclosure, - (), - ) - @classmethod def entity_type_label(cls) -> Str: return Str._('Enclosure') @@ -1079,19 +1020,6 @@ def __init__( if encloses is not None: self.encloses = encloses # type: ignore[assignment] - def __reduce__(self) -> Any: - return ( - partial( - Place, - id=self.id, - names=self.names, - coordinates=self._coordinates, - links=self.links, - privacy=self.privacy, - ), - (), - ) - @property def enclosed_by(self) -> EntityCollection[Enclosure]: # type: ignore[empty-body] pass @@ -1360,16 +1288,6 @@ def __init__( self.role = role self.event = event - def __reduce__(self) -> Any: - return ( - Presence, - ( - None, - self.role, - None, - ), - ) - @classmethod def entity_type_label(cls) -> Str: return Str._('Presence') @@ -1453,19 +1371,6 @@ def label(self) -> Str: else: return Str._('{event_type} ({event_description})', **format_kwargs) - def __reduce__(self) -> Any: - return ( - partial( - Event, - id=self.id, - event_type=self.event_type, - date=self.date, - privacy=self.privacy, - description=self.description, - ), - (), - ) - @recursive_repr() def __repr__(self) -> str: return repr_instance(self, id=self._id, type=self._event_type) @@ -1604,18 +1509,6 @@ def __init__( # individual and affiliation names. self.person = person - def __reduce__(self) -> Any: - return ( - partial( - PersonName, - id=self.id, - individual=self.individual, - affiliation=self.affiliation, - privacy=self.privacy, - ), - (), - ) - def _get_effective_privacy(self) -> Privacy: privacy = super()._get_effective_privacy() if self.person: @@ -1710,17 +1603,6 @@ def __init__( if names is not None: self.names = names # type: ignore[assignment] - def __reduce__(self) -> Any: - return ( - partial( - Person, - id=self.id, - links=self.links, - privacy=self.privacy, - ), - (), - ) - @property def parents(self) -> EntityCollection[Person]: # type: ignore[empty-body] pass @@ -1916,13 +1798,6 @@ def __init__(self): super().__init__() self._check_graph = True - def __getstate__(self) -> PickleableEntityGraph: - return PickleableEntityGraph(*self) - - def __setstate__(self, state: PickleableEntityGraph) -> None: - self._collections = {} - self.add_unchecked_graph(*state.build()) - def add_unchecked_graph(self, *entities: Entity) -> None: self._check_graph = False try: diff --git a/betty/project.py b/betty/project.py index 03c3575ea..14efb2b8f 100644 --- a/betty/project.py +++ b/betty/project.py @@ -789,19 +789,12 @@ def __init__( self, *, project_id: str | None = None, + ancestry: Ancestry | None = None, ): super().__init__() self._id = project_id self._configuration = ProjectConfiguration() - self._ancestry = Ancestry() - - def __getstate__(self) -> tuple[str | None, VoidableDump, Path, Ancestry]: - return self._id, self._configuration.dump(), self._configuration.configuration_file_path, self._ancestry - - def __setstate__(self, state: tuple[str | None, Dump, Path, Ancestry]) -> None: - self._id, dump, configuration_file_path, self._ancestry = state - self._configuration = ProjectConfiguration.load(dump) - self._configuration.configuration_file_path = configuration_file_path + self._ancestry = ancestry or Ancestry() @property def id(self) -> str: diff --git a/betty/render.py b/betty/render.py index 9814de579..8a6cfc8c4 100644 --- a/betty/render.py +++ b/betty/render.py @@ -4,7 +4,7 @@ from pathlib import Path from betty.locale import Localizer -from betty.task import Context +from betty.job import Context class Renderer: @@ -16,7 +16,7 @@ async def render_file( self, file_path: Path, *, - task_context: Context | None = None, + job_context: Context | None = None, localizer: Localizer | None = None, ) -> Path: raise NotImplementedError(repr(self)) @@ -41,14 +41,14 @@ async def render_file( self, file_path: Path, *, - task_context: Context | None = None, + job_context: Context | None = None, localizer: Localizer | None = None, ) -> Path: for renderer in self._renderers: if file_path.suffix in renderer.file_extensions: return await self.render_file(await renderer.render_file( file_path, - task_context=task_context, + job_context=job_context, localizer=localizer, )) return file_path diff --git a/betty/task.py b/betty/task.py deleted file mode 100644 index 56604bcb6..000000000 --- a/betty/task.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Provide utilities for running tasks concurrently. -""" - -from __future__ import annotations - -import multiprocessing -from collections.abc import MutableSequence - - -class Context: - def __init__(self): - self._claims_lock = multiprocessing.Manager().Lock() - self._claimed_task_ids: MutableSequence[str] = multiprocessing.Manager().list() - - def claim(self, task_id: str) -> bool: - with self._claims_lock: - if task_id in self._claimed_task_ids: - return False - self._claimed_task_ids.append(task_id) - return True diff --git a/betty/tests/extension/cotton_candy/test_search.py b/betty/tests/extension/cotton_candy/test_search.py index 8bad9e094..2cb4dbc7a 100644 --- a/betty/tests/extension/cotton_candy/test_search.py +++ b/betty/tests/extension/cotton_candy/test_search.py @@ -8,7 +8,7 @@ from betty.locale import DEFAULT_LOCALIZER from betty.model.ancestry import Person, Place, PlaceName, PersonName, File from betty.project import LocaleConfiguration -from betty.task import Context +from betty.job import Context class TestIndex: diff --git a/betty/tests/extension/wikipedia/test___init__.py b/betty/tests/extension/wikipedia/test___init__.py index c473da341..cf1693762 100644 --- a/betty/tests/extension/wikipedia/test___init__.py +++ b/betty/tests/extension/wikipedia/test___init__.py @@ -7,7 +7,7 @@ from betty.load import load from betty.model.ancestry import Link from betty.project import ExtensionConfiguration -from betty.task import Context +from betty.job import Context from betty.tests import patch_cache from betty.wikipedia import Summary @@ -37,7 +37,7 @@ async def test_filter(self, mocker: MockerFixture) -> None: app.project.configuration.extensions.append(ExtensionConfiguration(Wikipedia)) actual = await app.jinja2_environment.from_string( '{% for entry in (links | wikipedia) %}{{ entry.content }}{% endfor %}').render_async( - task_context=Context(), + job_context=Context(), links=links, ) diff --git a/betty/tests/model/test___init__.py b/betty/tests/model/test___init__.py index 733beca8e..de04427cb 100644 --- a/betty/tests/model/test___init__.py +++ b/betty/tests/model/test___init__.py @@ -2,14 +2,13 @@ from typing import Any, Iterator -import dill import pytest from betty.model import get_entity_type_name, Entity, get_entity_type, ToAny, \ EntityTypeAssociationRegistry, SingleTypeEntityCollection, MultipleTypesEntityCollection, \ one_to_many, many_to_one_to_many, many_to_many, \ EntityCollection, to_many, many_to_one, to_one, one_to_one, EntityTypeImportError, ToOne, \ - PickleableEntityGraph, EntityGraphBuilder, AliasableEntity, AliasedEntity, unalias + EntityGraphBuilder, AliasableEntity, AliasedEntity, unalias from betty.model.ancestry import Person @@ -833,137 +832,6 @@ async def test_build_many_to_one_to_many( assert unaliased_many_to_one_to_many_middle in unaliased_many_to_one_to_many_right.to_many -class TestPickleableEntityGraph: - def assert_entity(self, left: Entity | None, right: Entity | None): - assert left is not None - assert right is not None - assert left.type is right.type - assert left.id == right.id - - async def test_pickle_to_one(self) -> None: - to_one_left = _EntityGraphBuilder_ToOne_Left() - to_one_right = _EntityGraphBuilder_ToOne_Right() - to_one_left.to_one = to_one_right - - sut = PickleableEntityGraph(to_one_left, to_one_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert to_one_left is not unpickled_entities[_EntityGraphBuilder_ToOne_Left][to_one_left.id] - self.assert_entity(to_one_left, unpickled_entities[_EntityGraphBuilder_ToOne_Left][to_one_left.id]) - assert to_one_right is not unpickled_entities[_EntityGraphBuilder_ToOne_Right][to_one_right.id] - self.assert_entity(to_one_right, unpickled_entities[_EntityGraphBuilder_ToOne_Right][to_one_right.id]) - self.assert_entity(to_one_right, to_one_left.to_one) - - async def test_pickle_one_to_one(self) -> None: - one_to_one_left = _EntityGraphBuilder_OneToOne_Left() - one_to_one_right = _EntityGraphBuilder_OneToOne_Right() - one_to_one_left.to_one = one_to_one_right - - sut = PickleableEntityGraph(one_to_one_left, one_to_one_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert one_to_one_left is not unpickled_entities[_EntityGraphBuilder_OneToOne_Left][one_to_one_left.id] - self.assert_entity(one_to_one_left, unpickled_entities[_EntityGraphBuilder_OneToOne_Left][one_to_one_left.id]) - assert one_to_one_right is not unpickled_entities[_EntityGraphBuilder_OneToOne_Right][one_to_one_right.id] - self.assert_entity(one_to_one_right, unpickled_entities[_EntityGraphBuilder_OneToOne_Right][one_to_one_right.id]) - self.assert_entity(one_to_one_right, one_to_one_left.to_one) - self.assert_entity(one_to_one_left, one_to_one_right.to_one) - - async def test_pickle_many_to_one(self) -> None: - many_to_one_left = _EntityGraphBuilder_ManyToOne_Left() - many_to_one_right = _EntityGraphBuilder_ManyToOne_Right() - many_to_one_left.to_one = many_to_one_right - - sut = PickleableEntityGraph(many_to_one_left, many_to_one_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert many_to_one_left is not unpickled_entities[_EntityGraphBuilder_ManyToOne_Left][many_to_one_left.id] - self.assert_entity(many_to_one_left, unpickled_entities[_EntityGraphBuilder_ManyToOne_Left][many_to_one_left.id]) - assert many_to_one_right is not unpickled_entities[_EntityGraphBuilder_ManyToOne_Right][many_to_one_right.id] - self.assert_entity(many_to_one_right, unpickled_entities[_EntityGraphBuilder_ManyToOne_Right][many_to_one_right.id]) - self.assert_entity(many_to_one_right, many_to_one_left.to_one) - assert many_to_one_left in many_to_one_right.to_many - - async def test_pickle_to_many(self) -> None: - to_many_left = _EntityGraphBuilder_ToMany_Left() - to_many_right = _EntityGraphBuilder_ToMany_Right() - to_many_left.to_many = [to_many_right] # type: ignore[assignment] - - sut = PickleableEntityGraph(to_many_left, to_many_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert to_many_left is not unpickled_entities[_EntityGraphBuilder_ToMany_Left][to_many_left.id] - self.assert_entity(to_many_left, unpickled_entities[_EntityGraphBuilder_ToMany_Left][to_many_left.id]) - assert to_many_right is not unpickled_entities[_EntityGraphBuilder_ToMany_Right][to_many_right.id] - self.assert_entity(to_many_right, unpickled_entities[_EntityGraphBuilder_ToMany_Right][to_many_right.id]) - assert to_many_right in to_many_left.to_many - - async def test_pickle_one_to_many(self) -> None: - one_to_many_left = _EntityGraphBuilder_OneToMany_Left() - one_to_many_right = _EntityGraphBuilder_OneToMany_Right() - one_to_many_left.to_many = [one_to_many_right] # type: ignore[assignment] - - sut = PickleableEntityGraph(one_to_many_left, one_to_many_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert one_to_many_left is not unpickled_entities[_EntityGraphBuilder_OneToMany_Left][one_to_many_left.id] - self.assert_entity(one_to_many_left, unpickled_entities[_EntityGraphBuilder_OneToMany_Left][one_to_many_left.id]) - assert one_to_many_right is not unpickled_entities[_EntityGraphBuilder_OneToMany_Right][one_to_many_right.id] - self.assert_entity(one_to_many_right, unpickled_entities[_EntityGraphBuilder_OneToMany_Right][one_to_many_right.id]) - assert one_to_many_right in one_to_many_left.to_many - self.assert_entity(one_to_many_left, one_to_many_right.to_one) - - async def test_pickle_many_to_many(self) -> None: - many_to_many_left = _EntityGraphBuilder_ManyToMany_Left() - many_to_many_right = _EntityGraphBuilder_ManyToMany_Right() - many_to_many_left.to_many = [many_to_many_right] # type: ignore[assignment] - - sut = PickleableEntityGraph(many_to_many_left, many_to_many_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert many_to_many_left is not unpickled_entities[_EntityGraphBuilder_ManyToMany_Left][many_to_many_left.id] - self.assert_entity(many_to_many_left, unpickled_entities[_EntityGraphBuilder_ManyToMany_Left][many_to_many_left.id]) - assert many_to_many_right is not unpickled_entities[_EntityGraphBuilder_ManyToMany_Right][many_to_many_right.id] - self.assert_entity(many_to_many_right, unpickled_entities[_EntityGraphBuilder_ManyToMany_Right][many_to_many_right.id]) - assert many_to_many_right in many_to_many_left.to_many - assert many_to_many_left in many_to_many_right.to_many - - async def test_pickle_many_to_one_to_many(self) -> None: - many_to_one_to_many_left = _EntityGraphBuilder_ManyToOneToMany_Left() - many_to_one_to_many_middle = _EntityGraphBuilder_ManyToOneToMany_Middle() - many_to_one_to_many_right = _EntityGraphBuilder_ManyToOneToMany_Right() - many_to_one_to_many_left.to_many = [many_to_one_to_many_middle] # type: ignore[assignment] - many_to_one_to_many_right.to_many = [many_to_one_to_many_middle] # type: ignore[assignment] - - sut = PickleableEntityGraph(many_to_one_to_many_left, many_to_one_to_many_middle, many_to_one_to_many_right) - - unpickled_entities = MultipleTypesEntityCollection[Entity]() - unpickled_entities.add(*dill.loads(dill.dumps(sut)).build()) - - assert many_to_one_to_many_left is not unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Left][many_to_one_to_many_left.id] - self.assert_entity(many_to_one_to_many_left, unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Left][many_to_one_to_many_left.id]) - assert many_to_one_to_many_middle is not unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Middle][many_to_one_to_many_middle.id] - self.assert_entity(many_to_one_to_many_middle, unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Middle][many_to_one_to_many_middle.id]) - assert many_to_one_to_many_right is not unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Right][many_to_one_to_many_right.id] - self.assert_entity(many_to_one_to_many_right, unpickled_entities[_EntityGraphBuilder_ManyToOneToMany_Right][many_to_one_to_many_right.id]) - assert many_to_one_to_many_middle in many_to_one_to_many_left.to_many - self.assert_entity(many_to_one_to_many_left, many_to_one_to_many_middle.to_one_left) - self.assert_entity(many_to_one_to_many_right, many_to_one_to_many_middle.to_one_right) - assert many_to_one_to_many_middle in many_to_one_to_many_right.to_many - - @to_one( 'one', 'betty.tests.model.test___init__._TestToOne_One', diff --git a/betty/tests/model/test_ancestry.py b/betty/tests/model/test_ancestry.py index 76eed5616..78c5c1a40 100644 --- a/betty/tests/model/test_ancestry.py +++ b/betty/tests/model/test_ancestry.py @@ -5,7 +5,6 @@ from tempfile import NamedTemporaryFile from typing import Any -import dill import pytest from geopy import Point @@ -2064,17 +2063,6 @@ class _TestAncestry_OneToOne_Right(Entity): class TestAncestry: - async def test_pickle(self) -> None: - sut = Ancestry() - left = _TestAncestry_OneToOne_Left() - right = _TestAncestry_OneToOne_Right() - left.one_right = right - sut.add(left) - unpickled_sut = dill.loads(dill.dumps(sut)) - assert 2 == len(unpickled_sut) - assert left.id == unpickled_sut[_TestAncestry_OneToOne_Left][0].id - assert right.id == unpickled_sut[_TestAncestry_OneToOne_Right][0].id - async def test_add_(self) -> None: sut = Ancestry() left = _TestAncestry_OneToOne_Left() diff --git a/betty/tests/test_job.py b/betty/tests/test_job.py new file mode 100644 index 000000000..376ce9d7d --- /dev/null +++ b/betty/tests/test_job.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from betty.job import Context + + +class TestContext: + async def test_claim(self) -> None: + sut = Context() + job_ids = ('job ID 1', 'job ID 2', 'job ID 3') + for job_id in job_ids: + assert sut.claim(job_id) + assert not sut.claim(job_id) diff --git a/betty/tests/test_task.py b/betty/tests/test_task.py deleted file mode 100644 index ce57ea101..000000000 --- a/betty/tests/test_task.py +++ /dev/null @@ -1,12 +0,0 @@ -from __future__ import annotations - -from betty.task import Context - - -class TestBatch: - async def test_claim(self) -> None: - sut = Context() - task_ids = ('task ID 1', 'task ID 2', 'task ID 3') - for task_id in task_ids: - assert sut.claim(task_id) - assert not sut.claim(task_id) diff --git a/mypy.ini b/mypy.ini index 4ebcaec4d..30e6365a2 100644 --- a/mypy.ini +++ b/mypy.ini @@ -20,9 +20,6 @@ ignore_missing_imports = True [mypy-babel.*] ignore_missing_imports = True -[mypy-dill.*] -ignore_missing_imports = True - [mypy-docker.*] ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml index b8a949b8e..44de5ec02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,6 @@ dependencies = [ 'aiohttp ~= 3.9', 'babel ~= 2.12, >= 2.12.0', 'click ~= 8.1, >= 8.1.2', - 'dill ~= 0.3, >= 0.3.6', 'docker ~= 7.0, >= 7.0.0', 'geopy ~= 2.3, >= 2.3.0', 'jinja2 ~= 3.1, >= 3.1.1',