Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add limits to reduction #43

Merged
merged 1 commit into from
Nov 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ disable=
too-many-arguments,
too-many-boolean-expressions,
too-many-branches,
too-many-instance-attributes,
too-many-locals,
unused-argument,
useless-option-value, # disables warning in recent pylint that does not check for no-self-use anymore

Expand Down
2 changes: 2 additions & 0 deletions picire/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
from .cache import CacheRegistry
from .cli import __version__, reduce
from .dd import DD
from .exception import ReductionError, ReductionException, ReductionStopped
from .iterator import CombinedIterator, IteratorRegistry
from .limit_reduction import LimitReduction
from .outcome import Outcome
from .parallel_dd import ParallelDD
from .shared_cache import shared_cache_decorator
Expand Down
63 changes: 48 additions & 15 deletions picire/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@
import argparse
import codecs
import os
import sys
import time

from datetime import timedelta
from math import inf
from multiprocessing import cpu_count
from os.path import basename, exists, join, realpath
Expand All @@ -28,7 +30,9 @@

from .cache import CacheRegistry
from .dd import DD
from .exception import ReductionException, ReductionStopped
from .iterator import CombinedIterator, IteratorRegistry
from .limit_reduction import LimitReduction
from .parallel_dd import ParallelDD
from .shared_cache import shared_cache_decorator
from .splitter import SplitterRegistry
Expand Down Expand Up @@ -91,6 +95,12 @@ def int_or_inf(value):
parser.add_argument('--no-cache-evict-after-fail', dest='evict_after_fail', action='store_false', default=True,
help='disable the eviction of larger test cases from the cache when a failing, i.e., interesting test case is found')

# Limits on the reduction.
parser.add_argument('--limit-time', metavar='SEC', type=int,
help='limit the execution time of reduction (in seconds; may result in non-minimal output)')
parser.add_argument('--limit-tests', metavar='N', type=int,
help='limit the number of test command executions (may result in non-minimal output)')

# Logging settings.
inators.arg.add_log_level_argument(parser)
parser.add_argument('--log-format', metavar='FORMAT', default='%(message)s',
Expand Down Expand Up @@ -148,12 +158,19 @@ def process_args(args):
args.cache_config = {'cache_fail': args.cache_fail,
'evict_after_fail': args.evict_after_fail}

if args.limit_time is not None or args.limit_tests is not None:
stop = LimitReduction(deadline=timedelta(seconds=args.limit_time) if args.limit_time is not None else None,
max_tests=args.limit_tests)
else:
stop = None

# Choose the reducer class that will be used and its configuration.
args.reduce_config = {'config_iterator': CombinedIterator(args.subset_first,
IteratorRegistry.registry[args.subset_iterator],
IteratorRegistry.registry[args.complement_iterator]),
'split': SplitterRegistry.registry[args.split](n=args.granularity),
'dd_star': args.dd_star}
'dd_star': args.dd_star,
'stop': stop}
if not args.parallel:
args.reduce_class = DD
else:
Expand Down Expand Up @@ -221,6 +238,10 @@ def reduce(src, *,
:param cache_config: Dictionary containing information to initialize the
cache_class.
:return: The contents of the minimal test case.
:raises ReductionException: If reduction could not run until completion. The
``result`` attribute of the exception contains the contents of the
smallest, potentially non-minimal, but failing test case found during
reduction.
"""

# Get the parameters in a dictionary so that they can be pretty-printed
Expand All @@ -246,11 +267,19 @@ def reduce(src, *,
cache=cache,
id_prefix=(f'a{atom_cnt}',),
**reduce_config)
min_set = dd(list(range(len(src))))
src = test_builder(min_set)
try:
min_set = dd(list(range(len(src))))
src = test_builder(min_set)

logger.trace('The cached results are: %s', cache)
logger.debug('A minimal config is: %r', min_set)
logger.trace('The cached results are: %s', cache)
logger.debug('A minimal config is: %r', min_set)
except ReductionException as e:
logger.trace('The cached results are: %s', cache)
logger.debug('The reduced config is: %r', e.result)
logger.warning('Reduction stopped prematurely, the output may not be minimal: %s', e, exc_info=None if isinstance(e, ReductionStopped) else e)

e.result = test_builder(e.result)
raise

return src

Expand Down Expand Up @@ -283,13 +312,17 @@ def execute():
except ValueError as e:
parser.error(e)

out_src = reduce(args.src,
reduce_class=args.reduce_class,
reduce_config=args.reduce_config,
tester_class=args.tester_class,
tester_config=args.tester_config,
atom=args.atom,
cache_class=args.cache_class,
cache_config=args.cache_config)

postprocess(args, out_src)
try:
out_src = reduce(args.src,
reduce_class=args.reduce_class,
reduce_config=args.reduce_config,
tester_class=args.tester_class,
tester_config=args.tester_config,
atom=args.atom,
cache_class=args.cache_class,
cache_config=args.cache_config)
postprocess(args, out_src)
except ReductionException as e:
postprocess(args, e.result)
if not isinstance(e, ReductionStopped):
sys.exit(1)
34 changes: 31 additions & 3 deletions picire/dd.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import logging

from .cache import ConfigCache
from .exception import ReductionError, ReductionStopped
from .iterator import CombinedIterator
from .outcome import Outcome
from .splitter import ZellerSplit
Expand All @@ -23,7 +24,7 @@ class DD:
"""

def __init__(self, test, *, split=None, cache=None, id_prefix=None,
config_iterator=None, dd_star=False):
config_iterator=None, dd_star=False, stop=None):
"""
Initialize a DD object.

Expand All @@ -34,6 +35,7 @@ def __init__(self, test, *, split=None, cache=None, id_prefix=None,
:param config_iterator: Reference to a generator function that provides
config indices in an arbitrary order.
:param dd_star: Boolean to enable the DD star algorithm.
:param stop: A callable invoked before the execution of every test.
"""
self._test = test
self._split = split or ZellerSplit()
Expand All @@ -42,13 +44,18 @@ def __init__(self, test, *, split=None, cache=None, id_prefix=None,
self._iteration_prefix = ()
self._config_iterator = config_iterator or CombinedIterator()
self._dd_star = dd_star
self._stop = stop

def __call__(self, config):
"""
Return a 1-minimal failing subset of the initial configuration.

:param config: The initial configuration that will be reduced.
:return: 1-minimal failing configuration.
:raises ReductionException: If reduction could not run until completion.
The ``result`` attribute of the exception contains the smallest,
potentially non-minimal, but failing configuration found during
reduction.
"""
for iter_cnt in itertools.count():
logger.info('Iteration #%d', iter_cnt)
Expand All @@ -75,7 +82,15 @@ def __call__(self, config):
logger.info('\tGranularity: %d', len(subsets))
logger.debug('\tConfig: %r', subsets)

next_subsets, complement_offset = self._reduce_config(run, subsets, complement_offset)
try:
next_subsets, complement_offset = self._reduce_config(run, subsets, complement_offset)
except ReductionStopped as e:
logger.info('\tStopped')
e.result = config
raise
except Exception as e:
logger.info('\tErrored')
raise ReductionError(str(e), result=config) from e

if next_subsets is not None:
changed = True
Expand Down Expand Up @@ -127,7 +142,10 @@ def _reduce_config(self, run, subsets, complement_offset):
i = -i - 1

# Get the outcome either from cache or by testing it.
outcome = self._lookup_cache(config_set, config_id) or self._test_config(config_set, config_id)
outcome = self._lookup_cache(config_set, config_id)
if outcome is None:
self._check_stop()
outcome = self._test_config(config_set, config_id)
if outcome is Outcome.FAIL:
fvalue = i
break
Expand All @@ -145,6 +163,16 @@ def _reduce_config(self, run, subsets, complement_offset):

return None, complement_offset

def _check_stop(self):
"""
Check whether reduction shall continue with executing the next test or
stop.

:raises ReductionStopped: If reduction shall not continue.
"""
if self._stop:
self._stop()

def _lookup_cache(self, config, config_id):
"""
Perform a cache lookup if caching is enabled.
Expand Down
34 changes: 34 additions & 0 deletions picire/exception.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Copyright (c) 2023 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.


class ReductionException(Exception):
"""
Base class of reduction-related exceptions. In addition to signal the
premature termination of a reduction process, exception instances contain
the intermediate result of the reduction.

:ivar result: A representation of the smallest, potentially non-minimal, but
failing test case found during reduction.
"""

def __init__(self, *args, result=None):
super().__init__(*args)
self.result = result


class ReductionStopped(ReductionException):
"""
Exception to signal that reduction has been stopped, e.g., because some time
limit or test count limit has been reached.
"""


class ReductionError(ReductionException):
"""
Exception to signal that an unexpected error occured during reduction.
"""
55 changes: 55 additions & 0 deletions picire/limit_reduction.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# Copyright (c) 2023 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.

from datetime import datetime, timedelta
from time import time

from .exception import ReductionStopped


class LimitReduction:
"""
Limit the execution time and/or the number of performed tests during a
reduction.
"""

def __init__(self, *, deadline=None, max_tests=None):
"""
:param deadline: A soft limit on the execution time of the reduction.
The deadline may be given as a :class:`datetime` object, as a
:class:`timedelta` object (relative to :meth:`~datetime.now`), or as
a ``float`` POSIX timestamp (as returned by :meth:`time.time`).
:param max_tests: A hard limit on the maximum number of tests that may
be executed.
"""
self._deadline = deadline
self._max_tests = max_tests

if isinstance(deadline, timedelta):
deadline = datetime.now() + deadline
if isinstance(deadline, datetime):
deadline = deadline.timestamp()
self._deadline_timestamp = deadline
self._tests_left = max_tests

def __call__(self):
if self._deadline is not None:
if time() >= self._deadline_timestamp:
raise ReductionStopped('deadline expired')
if self._max_tests is not None:
if self._tests_left <= 0:
raise ReductionStopped('maximum number of tests performed')
self._tests_left -= 1

def __str__(self):
cls = self.__class__
params = []
if self._deadline is not None:
params.append(f'deadline={self._deadline}')
if self._max_tests is not None:
params.append(f'max_tests={self._max_tests}')
return f'{cls.__module__}.{cls.__name__}({", ".join(params)})'
6 changes: 4 additions & 2 deletions picire/parallel_dd.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
class ParallelDD(DD):

def __init__(self, test, *, split=None, cache=None, id_prefix=None,
config_iterator=None, dd_star=False,
config_iterator=None, dd_star=False, stop=None,
proc_num=None, max_utilization=None):
"""
Initialize a ParallelDD object.
Expand All @@ -34,11 +34,12 @@ def __init__(self, test, *, split=None, cache=None, id_prefix=None,
:param config_iterator: Reference to a generator function that provides
config indices in an arbitrary order.
:param dd_star: Boolean to enable the DD star algorithm.
:param stop: A callable invoked before the execution of every test.
:param proc_num: The level of parallelization.
:param max_utilization: The maximum CPU utilization accepted.
"""
cache = cache or shared_cache_decorator(ConfigCache)()
super().__init__(test=test, split=split, cache=cache, id_prefix=id_prefix, config_iterator=config_iterator, dd_star=dd_star)
super().__init__(test=test, split=split, cache=cache, id_prefix=id_prefix, config_iterator=config_iterator, dd_star=dd_star, stop=stop)

self._proc_num = proc_num
self._max_utilization = max_utilization
Expand Down Expand Up @@ -93,6 +94,7 @@ def _reduce_config(self, run, subsets, complement_offset):
ploop.brk()
break

self._check_stop()
# Break if we found a FAIL either in the cache or be testing it now.
if not ploop.do(self._loop_body, (config_set, i, config_id)):
# if do() returned False, the test was not started
Expand Down
32 changes: 31 additions & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __call__(self, config, config_id):
2,
math.inf,
])
class TestApi:
class TestReduction:

def _run_picire(self, interesting, config, expect, granularity, dd, split, subset_first, subset_iterator, complement_iterator, cache):
logging.basicConfig(format='%(message)s')
Expand Down Expand Up @@ -101,3 +101,33 @@ def test_dd(self, interesting, config, expect, granularity, split, subset_first,
])
def test_parallel(self, interesting, config, expect, granularity, split, subset_first, subset_iterator, complement_iterator, cache):
self._run_picire(interesting, config, expect, granularity, picire.ParallelDD, split, subset_first, subset_iterator, complement_iterator, picire.shared_cache_decorator(cache))


@pytest.mark.parametrize('interesting, config', [
(interesting_a, config_a),
(interesting_b, config_b),
(interesting_c, config_c),
])
@pytest.mark.parametrize('deadline, max_tests', [
(0, None),
(None, 0),
])
class TestLimit:

def _run_picire(self, interesting, config, dd, deadline, max_tests):
logging.basicConfig(format='%(message)s')
logging.getLogger('picire').setLevel(logging.DEBUG)

dd_obj = dd(CaseTest(interesting, config),
stop=picire.LimitReduction(deadline=deadline, max_tests=max_tests))
with pytest.raises(picire.ReductionStopped) as exc_info:
dd_obj(list(range(len(config))))

output = [config[x] for x in exc_info.value.result]
assert output == config

def test_dd(self, interesting, config, deadline, max_tests):
self._run_picire(interesting, config, picire.DD, deadline, max_tests)

def test_parallel(self, interesting, config, deadline, max_tests):
self._run_picire(interesting, config, picire.ParallelDD, deadline, max_tests)
Loading