Skip to content

Commit

Permalink
meh
Browse files Browse the repository at this point in the history
  • Loading branch information
bartfeenstra committed Feb 23, 2024
1 parent b9dc53b commit 059806e
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 24 deletions.
12 changes: 0 additions & 12 deletions betty/app/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from __future__ import annotations

import operator
import os as stdos
import weakref
from collections.abc import Callable
from contextlib import suppress
Expand Down Expand Up @@ -341,17 +340,6 @@ def renderer(self) -> Renderer:
def renderer(self) -> None:
self._renderer = None

@property
def concurrency(self) -> int:
with suppress(KeyError):
return int(stdos.environ['BETTY_CONCURRENCY'])
# Assume that any machine that runs Betty has at least two CPU cores.
return stdos.cpu_count() or 2

@property
def async_concurrency(self) -> int:
return self.concurrency ** 2

@property
def http_client(self) -> aiohttp.ClientSession:
if not self._http_client:
Expand Down
18 changes: 6 additions & 12 deletions betty/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from aiofiles.threadpool.text import AsyncTextIOWrapper

from betty.app import App
from betty.asyncio import sync, gather
from betty.asyncio import sync
from betty.json.linked_data import LinkedDataDumpable
from betty.json.schema import Schema
from betty.locale import get_display_name
Expand All @@ -36,6 +36,9 @@
_GenerationWorkerPoolTaskP = ParamSpec('_GenerationWorkerPoolTaskP')


_THREAD_CONCURRENCY = 16


def getLogger() -> logging.Logger:
"""
Get the site generation logger.
Expand Down Expand Up @@ -84,13 +87,12 @@ def __init__(self, app: App, task_context: GenerationContext):
self._workers: list[Future[None]] = []

async def __aenter__(self) -> Self:
self._executor = ThreadPoolExecutor(max_workers=self._app.concurrency)
for _ in range(0, self._app.concurrency):
self._executor = ThreadPoolExecutor(max_workers=_THREAD_CONCURRENCY)
for _ in range(0, _THREAD_CONCURRENCY):
self._workers.append(self._executor.submit(_GenerationPoolWorker(
self._queue,
self._cancel,
self._finish,
self._app.concurrency,
self._task_context,
)))
return self
Expand Down Expand Up @@ -125,23 +127,15 @@ def __init__(
task_queue: queue.Queue[_GenerationQueueItem],
cancel: threading.Event,
finish: threading.Event,
async_concurrency: int,
task_context: GenerationContext,
):
self._task_queue = task_queue
self._cancel = cancel
self._finish = finish
self._async_concurrency = async_concurrency
self._context = task_context

@sync
async def __call__(self) -> None:
await gather(*(
self._perform_tasks()
for _ in range(0, self._async_concurrency)
))

async def _perform_tasks(self) -> None:
while not self._cancel.is_set():
try:
item = self._task_queue.get_nowait()
Expand Down

0 comments on commit 059806e

Please sign in to comment.