Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow for disabling of parallel processing of qmk find and qmk mass-compile. #22160

Merged
merged 14 commits into from
Oct 16, 2023
Merged
2 changes: 1 addition & 1 deletion lib/python/qmk/cli/find.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def find(cli):
if len(cli.args.filter) == 0 and len(cli.args.print) > 0:
cli.log.warning('No filters supplied -- keymaps not parsed, unable to print requested values.')

targets = search_keymap_targets(cli.args.keymap, cli.args.filter, cli.args.print)
targets = search_keymap_targets([('all', cli.config.find.keymap)], cli.args.filter, cli.args.print)
for keyboard, keymap, print_vals in targets:
print(f'{keyboard}:{keymap}')

Expand Down
4 changes: 2 additions & 2 deletions lib/python/qmk/cli/mass_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,6 @@ def mass_compile(cli):
if len(cli.args.builds) > 0:
targets = search_make_targets(cli.args.builds, cli.args.filter)
else:
targets = search_keymap_targets(cli.args.keymap, cli.args.filter)
targets = search_keymap_targets([('all', cli.config.mass_compile.keymap)], cli.args.filter)

return mass_compile_targets(targets, cli.args.clean, cli.args.dry_run, cli.args.no_temp, cli.args.parallel, cli.args.env)
return mass_compile_targets(targets, cli.args.clean, cli.args.dry_run, cli.config.mass_compile.no_temp, cli.config.mass_compile.parallel, cli.args.env)
30 changes: 14 additions & 16 deletions lib/python/qmk/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
import functools
import fnmatch
import logging
import multiprocessing
import re
from typing import List, Tuple
from dotty_dict import dotty
from milc import cli

from qmk.util import parallel_map
from qmk.info import keymap_json
import qmk.keyboard
import qmk.keymap
Expand Down Expand Up @@ -78,17 +78,16 @@ def _expand_keymap_target(keyboard: str, keymap: str, all_keyboards: List[str] =
all_keyboards = qmk.keyboard.list_keyboards()

if keyboard == 'all':
with multiprocessing.Pool() as pool:
if keymap == 'all':
cli.log.info('Retrieving list of all keyboards and keymaps...')
targets = []
for kb in pool.imap_unordered(_all_keymaps, all_keyboards):
targets.extend(kb)
return targets
else:
cli.log.info(f'Retrieving list of keyboards with keymap "{keymap}"...')
keyboard_filter = functools.partial(_keymap_exists, keymap=keymap)
return [(kb, keymap) for kb in filter(lambda e: e is not None, pool.imap_unordered(keyboard_filter, all_keyboards))]
if keymap == 'all':
cli.log.info('Retrieving list of all keyboards and keymaps...')
targets = []
for kb in parallel_map(_all_keymaps, all_keyboards):
targets.extend(kb)
return targets
else:
cli.log.info(f'Retrieving list of keyboards with keymap "{keymap}"...')
keyboard_filter = functools.partial(_keymap_exists, keymap=keymap)
return [(kb, keymap) for kb in filter(lambda e: e is not None, parallel_map(keyboard_filter, all_keyboards))]
else:
if keymap == 'all':
keyboard = qmk.keyboard.resolve_keyboard(keyboard)
Expand Down Expand Up @@ -117,8 +116,7 @@ def _filter_keymap_targets(target_list: List[Tuple[str, str]], filters: List[str
targets = [(kb, km, {}) for kb, km in target_list]
else:
cli.log.info('Parsing data for all matching keyboard/keymap combinations...')
with multiprocessing.Pool() as pool:
valid_keymaps = [(e[0], e[1], dotty(e[2])) for e in pool.imap_unordered(_load_keymap_info, target_list)]
valid_keymaps = [(e[0], e[1], dotty(e[2])) for e in parallel_map(_load_keymap_info, target_list)]

function_re = re.compile(r'^(?P<function>[a-zA-Z]+)\((?P<key>[a-zA-Z0-9_\.]+)(,\s*(?P<value>[^#]+))?\)$')
equals_re = re.compile(r'^(?P<key>[a-zA-Z0-9_\.]+)\s*=\s*(?P<value>[^#]+)$')
Expand Down Expand Up @@ -179,10 +177,10 @@ def f(e):
return targets


def search_keymap_targets(keymap='default', filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]:
def search_keymap_targets(targets: List[Tuple[str, str]] = [('all', 'default')], filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]:
"""Search for build targets matching the supplied criteria.
"""
return list(sorted(_filter_keymap_targets(expand_keymap_targets([('all', keymap)]), filters, print_vals), key=lambda e: (e[0], e[1])))
return list(sorted(_filter_keymap_targets(expand_keymap_targets(targets), filters, print_vals), key=lambda e: (e[0], e[1])))


def search_make_targets(targets: List[str], filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]:
Expand Down
56 changes: 56 additions & 0 deletions lib/python/qmk/util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""Utility functions.
"""
import contextlib
import multiprocessing

from milc import cli


@contextlib.contextmanager
def parallelize():
"""Returns a function that can be used in place of a map() call.

Attempts to use `mpire`, falling back to `multiprocessing` if it's not
available. If parallelization is not requested, returns the original map()
function.
"""

# Work out if we've already got a config value for parallel searching
if cli.config.user.parallel_search is None:
parallel_search = True
else:
parallel_search = cli.config.user.parallel_search

# Non-parallel searches use `map()`
if not parallel_search:
yield map
return

# Prefer mpire's `WorkerPool` if it's available
with contextlib.suppress(ImportError):
from mpire import WorkerPool
from mpire.utils import make_single_arguments
with WorkerPool() as pool:

def _worker(func, *args):
# Ensure we don't unpack tuples -- mpire's `WorkerPool` tries to do so normally so we tell it not to.
for r in pool.imap_unordered(func, make_single_arguments(*args, generator=False), progress_bar=True):
yield r

yield _worker
return

# Otherwise fall back to multiprocessing's `Pool`
with multiprocessing.Pool() as pool:
yield pool.imap_unordered


def parallel_map(*args, **kwargs):
"""Effectively runs `map()` but executes it in parallel if necessary.
"""
with parallelize() as map_fn:
# This needs to be enclosed in a `list()` as some implementations return
# a generator function, which means the scope of the pool is closed off
# before the results are returned. Returning a list ensures results are
# materialised before any worker pool is shut down.
return list(map_fn(*args, **kwargs))