Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Filter expected log messages during testing #1014

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions aiida/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,15 @@
LOG_LEVEL_REPORT = 23
logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT')


# A logging filter that can be used to disable logging
class NotInTestingFilter(logging.Filter):

def filter(self, record):
from aiida import settings
return not settings.TESTING_MODE


# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
Expand All @@ -45,11 +54,17 @@
'datefmt': '%m/%d/%Y %I:%M:%S %p',
},
},
'filters': {
'testing': {
'()': NotInTestingFilter
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'halfverbose',
'filters': ['testing']
},
'dblogger': {
# get_property takes the property from the config json file
Expand Down
21 changes: 1 addition & 20 deletions aiida/backends/djsite/db/subtests/djangomigrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,11 @@
from aiida.backends.testbase import AiidaTestCase



class CalcStateChanges(AiidaTestCase):
# Class to check if the migration code that deals with removing the
# NOTFOUND and UNDETERMINED calc states works properly
def test_unexpected_calc_states(self):
import logging

from django.utils import timezone
from aiida.orm.calculation import Calculation

Expand All @@ -32,8 +30,7 @@ def test_unexpected_calc_states(self):

calc_params = {
'computer': self.computer,
'resources': {'num_machines': 1,
'num_mpiprocs_per_machine': 1}
'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}
}

for state in ['NOTFOUND', 'UNDETERMINED']:
Expand All @@ -45,25 +42,9 @@ def test_unexpected_calc_states(self):

time_before_fix = timezone.now()

# First of all, I re-enable logging in case it was disabled by
# mistake by a previous test (e.g. one that disables and reenables
# again, but that failed)
logging.disable(logging.NOTSET)
# Temporarily disable logging to the stream handler (i.e. screen)
# because otherwise fix_calc_states will print warnings
handler = next((h for h in logging.getLogger('aiida').handlers if
isinstance(h, logging.StreamHandler)), None)

if handler:
original_level = handler.level
handler.setLevel(logging.ERROR)

# Call the code that deals with updating these states
state_change.fix_calc_states(None, None)

if handler:
handler.setLevel(original_level)

current_state = job.get_state()
self.assertNotEqual(current_state, state,
"Migration code failed to change removed state {}".
Expand Down
1 change: 0 additions & 1 deletion aiida/backends/djsite/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
# For further information please visit http://www.aiida.net #
###########################################################################

import logging
import os
import django
from aiida.utils.logger import get_dblogger_extra
Expand Down
8 changes: 0 additions & 8 deletions aiida/backends/sqlalchemy/tests/migrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
###########################################################################

import copy
import logging
import unittest

import os
Expand Down Expand Up @@ -49,8 +48,6 @@ class TestMigrationApplicationSQLA(AiidaTestCase):
# The path of the migration configuration (the actual configuration - not
# the testing)
alembic_dpath = None
# The initial alembic log level - to be restored after the testing
init_alemb_log_level = ''

@classmethod
def setUpClass(cls, *args, **kwargs):
Expand All @@ -59,15 +56,10 @@ def setUpClass(cls, *args, **kwargs):
os.path.realpath(utils.__file__))

def setUp(self):
self.init_alemb_log_level = get_property('logging.alembic_loglevel')
set_property('logging.alembic_loglevel',
logging.getLevelName(logging.ERROR))
self.migrate_db_with_non_testing_migrations("base")

def tearDown(self):
self.migrate_db_with_non_testing_migrations("head")
set_property('logging.alembic_loglevel',
logging.getLevelName(self.init_alemb_log_level))

def migrate_db_with_non_testing_migrations(self, destination):
if destination not in ["head", "base"]:
Expand Down
21 changes: 0 additions & 21 deletions aiida/backends/tests/backup_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,22 +199,15 @@ def test_loading_backup_time_params_from_file_4(self):
In the parsed JSON string, the endDateOfBackup & daysToBackuplimit
are set which should lead to an exception.
"""
import logging
from aiida.common.additions.backup_script.backup_base import BackupError

# Disable the logging messages
logging.disable(logging.ERROR)

backup_variables = json.loads(self._json_test_input_5)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
# An exception should be raised because endDateOfBackup
# & daysToBackuplimit have been defined in the same time.
with self.assertRaises(BackupError):
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)

# Enable the logging messages
logging.disable(logging.NOTSET)

def check_full_deserialization_serialization(self, input_string, backup_inst):
input_variables = json.loads(input_string)
backup_inst._ignore_backup_dir_existence_check = True
Expand Down Expand Up @@ -274,10 +267,6 @@ def test_timezone_addition_and_dir_correction(self):
that don't have a timezone. Moreover, it checks if the given directory
paths are normalized as expected.
"""
import logging

# Disable the logging messages
logging.disable(logging.INFO)

backup_variables = json.loads(self._json_test_input_6)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
Expand Down Expand Up @@ -305,9 +294,6 @@ def test_timezone_addition_and_dir_correction(self):
"_backup_setup_inst destination directory is "
"not normalized as expected.")

# Enable the logging messages
logging.disable(logging.NOTSET)


class TestBackupScriptIntegration(AiidaTestCase):

Expand All @@ -318,12 +304,8 @@ class TestBackupScriptIntegration(AiidaTestCase):
_bs_instance = backup_setup.BackupSetup()

def test_integration(self):
import logging
from aiida.utils.capturing import Capturing

# Disable the logging messages
logging.disable(logging.INFO)

# Fill in the repository with data
self.fill_repo()
try:
Expand Down Expand Up @@ -359,9 +341,6 @@ def test_integration(self):
finally:
shutil.rmtree(temp_folder, ignore_errors=True)

# Enable the logging messages
logging.disable(logging.NOTSET)

def fill_repo(self):
from aiida.orm import JobCalculation, CalculationFactory, Data, DataFactory

Expand Down
12 changes: 1 addition & 11 deletions aiida/backends/tests/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,6 @@

class TestDaemonBasic(AiidaTestCase):

def setUp(self):
super(TestDaemonBasic, self).setUp()
import logging
logging.disable(logging.CRITICAL)

def tearDown(self):
super(TestDaemonBasic, self).tearDown()
import logging
logging.disable(logging.NOTSET)

def test_workflow_fast_kill(self):
from aiida.cmdline.commands.workflow import Workflow as WfCmd

Expand All @@ -51,7 +41,7 @@ def test_workflow_fast_kill(self):

# Killing the head workflow
wf_cmd = WfCmd()
wf_cmd.workflow_kill(*[str(head_wf.pk), "-f"])
wf_cmd.workflow_kill(*[str(head_wf.pk), '-f', '-q'])

# At this point no running workflow should be found
running_no = 0
Expand Down
19 changes: 1 addition & 18 deletions aiida/backends/tests/orm/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,19 +134,6 @@ def test_db_log_handler(self):
message = 'Testing logging of critical failure'
calc = Calculation()

# Make sure that global logging is not accidentally disabled
logging.disable(logging.NOTSET)

# # Temporarily disable logging to the stream handler (i.e. screen)
# # because otherwise fix_calc_states will print warnings
# handler = next((h for h in logging.getLogger('aiida').handlers if
# isinstance(h, logging.StreamHandler)), None)

# # try:
# if handler:
# original_level = handler.level
# handler.setLevel(logging.CRITICAL + 1)

# Firing a log for an unstored should not end up in the database
calc.logger.critical(message)

Expand All @@ -160,8 +147,4 @@ def test_db_log_handler(self):
logs = self._backend.log.find()

self.assertEquals(len(logs), 1)
self.assertEquals(logs[0].message, message)

# finally:
# if handler:
# handler.setLevel(original_level)
self.assertEquals(logs[0].message, message)
9 changes: 0 additions & 9 deletions aiida/backends/tests/work/workChain.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,15 +387,6 @@ def _run_with_checkpoints(self, wf_class, inputs=None):


class TestWorkchainWithOldWorkflows(AiidaTestCase):
def setUp(self):
super(TestWorkchainWithOldWorkflows, self).setUp()
import logging
logging.disable(logging.CRITICAL)

def tearDown(self):
super(TestWorkchainWithOldWorkflows, self).tearDown()
import logging
logging.disable(logging.NOTSET)

def test_call_old_wf(self):
wf = WorkflowDemo()
Expand Down
16 changes: 1 addition & 15 deletions aiida/backends/tests/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,24 +197,11 @@ def test_failing_calc_in_wf(self):
sub-workflows) that has an exception at one of its steps stops
properly and it is not left as RUNNING.
"""
import logging
from aiida.daemon.workflowmanager import execute_steps
from aiida.workflows.test import (FailingWFTestSimple,
FailingWFTestSimpleWithSubWF)

try:
# First of all, I re-enable logging in case it was disabled by
# mistake by a previous test (e.g. one that disables and reenables
# again, but that failed)
logging.disable(logging.NOTSET)
# Temporarily disable logging to the stream handler (i.e. screen)
# because otherwise fix_calc_states will print warnings
handler = next((h for h in logging.getLogger('aiida').handlers if
isinstance(h, logging.StreamHandler)), None)
if handler:
original_level = handler.level
handler.setLevel(logging.ERROR)

# Testing the error propagation of a simple workflow
wf = FailingWFTestSimple()
wf.store()
Expand All @@ -238,8 +225,7 @@ def test_failing_calc_in_wf(self):
self.assertLess(step_no, 5, "This workflow should have stopped "
"since it is failing")
finally:
if handler:
handler.setLevel(original_level)
pass

def test_result_parameter_name_colision(self):
"""
Expand Down
3 changes: 3 additions & 0 deletions aiida/cmdline/commands/devel.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,9 @@ def run_tests(self, *args):
from aiida.backends import settings
from aiida.backends.testbase import run_aiida_db_tests
from aiida.backends.testbase import check_if_tests_can_run
from aiida import settings

settings.TESTING_MODE = True

# For final summary
test_failures = []
Expand Down
11 changes: 8 additions & 3 deletions aiida/cmdline/commands/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ def workflow_kill(self, *args):

Pass a list of workflow PKs to kill them.
If you also pass the -f option, no confirmation will be asked.
If you pass the -q option, additional information will be suppressed
"""
from aiida.backends.utils import load_dbenv, is_dbenv_loaded
if not is_dbenv_loaded():
Expand All @@ -145,6 +146,7 @@ def workflow_kill(self, *args):
from aiida.orm.workflow import WorkflowKillError, WorkflowUnkillable

force = False
verbose = True
wfs = []

args = list(args)
Expand All @@ -153,6 +155,8 @@ def workflow_kill(self, *args):
param = args.pop()
if param == '-f':
force = True
elif param == '-q':
verbose = False
else:
try:
wfs.append(int(param))
Expand All @@ -177,7 +181,7 @@ def workflow_kill(self, *args):
counter = 0
for wf_pk in wfs:
try:
kill_from_pk(wf_pk, verbose=True)
kill_from_pk(wf_pk, verbose=verbose)
counter += 1
except NotExistent:
print >> sys.stderr, ("WARNING: workflow {} "
Expand All @@ -192,8 +196,9 @@ def workflow_kill(self, *args):
sys.stdout.write("{}: {}\n".format(e.__class__.__name__,
e.message))

print >> sys.stderr, "{} workflow{} killed.".format(counter,
"" if counter <= 1 else "s")
if verbose:
print >> sys.stderr, "{} workflow{} killed.".format(counter,
"" if counter <= 1 else "s")

def print_logshow(self, *args):
from aiida.backends.utils import load_dbenv, is_dbenv_loaded
Expand Down
2 changes: 1 addition & 1 deletion aiida/common/additions/backup_script/backup_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def __init__(self, backup_info_filepath, additional_back_time_mins):
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')

# The logger of the backup script
self._logger = logging.getLogger("aiida_backup")
self._logger = logging.getLogger('aiida.aiida_backup')

def _read_backup_info_from_file(self, backup_info_file_name):
"""
Expand Down
6 changes: 1 addition & 5 deletions aiida/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,16 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################

import os


from aiida.backends import settings

from aiida.common.exceptions import ConfigurationError
from aiida.common.setup import (get_config, get_secret_key, get_property,
get_profile_config, get_default_profile,
parse_repository_uri)



USE_TZ = True
TESTING_MODE = False

try:
confs = get_config()
Expand Down