Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Corrected some of the refactored unit tests #633

Merged
merged 1 commit into from
Mar 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion hed/tools/analysis/hed_type_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,4 +147,4 @@ def get_summary(self):
for type_value, count in self.type_dict.items():
details[type_value] = count.get_summary()
return {'name': str(self.name), 'type_tag': self.type_tag, 'files': list(self.files.keys()),
'total_events': self.total_events, 'details': details}
'total_events': self.total_events, 'details': details}
34 changes: 15 additions & 19 deletions hed/tools/bids/bids_file_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import os
from hed.errors.error_reporter import ErrorContext, ErrorHandler
from hed.validator.sidecar_validator import SidecarValidator
from hed.validator.spreadsheet_validator import SpreadsheetValidator
from hed.tools.analysis.tabular_summary import TabularSummary
from hed.tools.bids.bids_tabular_file import BidsTabularFile
from hed.tools.bids.bids_sidecar_file import BidsSidecarFile
Expand Down Expand Up @@ -111,57 +113,51 @@ def summarize(self, value_cols=None, skip_cols=None):
info.update(list(self.datafile_dict.keys()))
return info

def validate_sidecars(self, hed_schema, check_for_warnings=True, error_handler=None):
def validate_sidecars(self, hed_schema, extra_def_dicts=None, check_for_warnings=True):
""" Validate merged sidecars.

Parameters:
hed_schema (HedSchema): HED schema for validation.
extra_def_dicts (DefinitionDict): Extra definitions
check_for_warnings (bool): If True, include warnings in the check.
error_handler (ErrorHandler): The common error handler for the dataset.

Returns:
list: A list of validation issues found. Each issue is a dictionary.

"""

if not error_handler:
error_handler = ErrorHandler()
error_handler = ErrorHandler(check_for_warnings)
issues = []
validator = SidecarValidator(hed_schema)

for sidecar in self.sidecar_dict.values():
error_handler.push_error_context(ErrorContext.FILE_NAME, sidecar.file_path)
if sidecar.has_hed:
issues += sidecar.contents.validate(hed_schema, name=sidecar.file_path)
error_handler.pop_error_context()
name = os.path.basename(sidecar.file_path)
issues += validator.validate(extra_def_dicts=extra_def_dicts, name=name, error_handler=error_handler)
return issues

def validate_datafiles(self, hed_schema, check_for_warnings=True, keep_contents=False, error_handler=None):
def validate_datafiles(self, hed_schema, extra_def_dicts=None, check_for_warnings=True, keep_contents=False):
""" Validate the datafiles and return an error list.

Parameters:
hed_schema (HedSchema): Schema to apply to the validation.
extra_def_dicts (DefinitionDict): Extra definitions that come from outside.
check_for_warnings (bool): If True, include warnings in the check.
keep_contents (bool): If True, the underlying data files are read and their contents retained.
error_handler (ErrorHandler): The common error handler to use for the dataset.

Returns:
list: A list of validation issues found. Each issue is a dictionary.

"""

if not error_handler:
error_handler = ErrorHandler()
error_handler = ErrorHandler(check_for_warnings)
issues = []
for data_obj in self.datafile_dict.values():
error_handler.push_error_context(ErrorContext.FILE_NAME, data_obj.file_path)
data_obj.set_contents(overwrite=False)
if not data_obj.has_hed:
continue
data = data_obj.contents

issues += data.validate(hed_schema)
name = os.path.basename(data_obj.file_path)
issues += data_obj.contents.validate(data_obj.contents, extra_def_dicts=None, name=name,
error_handler=error_handler)
if not keep_contents:
data_obj.clear_contents()
error_handler.pop_error_context()
return issues

def _make_datafile_dict(self):
Expand Down
2 changes: 1 addition & 1 deletion hed/tools/remodeling/operations/factor_hed_type_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def do_op(self, dispatcher, df, name, sidecar=None):
df_list = [input_data.dataframe.copy()]
hed_strings, definitions = get_assembled(input_data, sidecar, dispatcher.hed_schema,
extra_def_dicts=None, join_columns=True,
shrink_defs=False, expand_defs=True)
shrink_defs=True, expand_defs=False)

var_manager = HedTypeManager(hed_strings, dispatcher.hed_schema, definitions)
var_manager.add_type_variable(self.type_tag.lower())
Expand Down
5 changes: 2 additions & 3 deletions hed/tools/remodeling/operations/summarize_hed_type_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,7 @@ def update_context(self, new_context):
sidecar = Sidecar(sidecar)
input_data = TabularInput(new_context['df'], sidecar=sidecar, name=new_context['name'])
hed_strings, definitions = get_assembled(input_data, sidecar, new_context['schema'],
extra_def_dicts=None, join_columns=True,
shrink_defs=False, expand_defs=True)
extra_def_dicts=None, join_columns=True, expand_defs=False)
context_manager = HedContextManager(hed_strings, new_context['schema'])
type_values = HedTypeValues(context_manager, definitions, new_context['name'], type_tag=self.type_tag)

Expand Down Expand Up @@ -176,4 +175,4 @@ def _level_details(level_counts, offset="", indent=""):
level_list.append(f"{offset}{indent*3}Tags: {str(details['tags'])}")
if details['description']:
level_list.append(f"{offset}{indent*3}Description: {details['description']}")
return level_list
return level_list
5 changes: 2 additions & 3 deletions tests/tools/bids/test_bids_file_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,8 @@ def test_validator(self):
events = BidsFileGroup(self.root_path)
hed = 'https://raw.githubusercontent.com/hed-standard/hed-schemas/main/standard_schema/hedxml/HED8.0.0.xml'
hed_schema = load_schema(hed)
# TODO test after filtering.
# validation_issues = events.validate_datafiles(hed_schema, check_for_warnings=False)
# self.assertFalse(validation_issues, "BidsFileGroup should have no validation errors")
validation_issues = events.validate_datafiles(hed_schema, check_for_warnings=False)
self.assertFalse(validation_issues, "BidsFileGroup should have no validation errors")
validation_issues = events.validate_datafiles(hed_schema, check_for_warnings=True)
self.assertTrue(validation_issues, "BidsFileGroup should have validation warnings")
self.assertEqual(len(validation_issues), 6,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def test_quick3(self):
input_data = TabularInput(df, sidecar=my_sidecar)
counts = HedTagCounts('myName', 2)
summary_dict = {}
hed_strings = get_assembled(input_data, my_sidecar, my_schema, extra_def_dicts=None, join_columns=True,
hed_strings, definitions = get_assembled(input_data, my_sidecar, my_schema, extra_def_dicts=None, join_columns=True,
shrink_defs=False, expand_defs=True)
for hed in hed_strings:
counts.update_event_counts(hed, 'myName')
Expand All @@ -126,10 +126,8 @@ def test_quick4(self):
hed_strings, definitions = get_assembled(input_data, sidecar, my_schema,
extra_def_dicts=None, join_columns=True,
shrink_defs=False, expand_defs=True)
for objs in input_data.iter_dataframe(hed_ops=[my_schema], return_string_only=False,
expand_defs=True, remove_definitions=True):
x = objs['HED']
counts.update_event_counts(objs['HED'], 'myName')
for hed in hed_strings:
counts.update_event_counts(hed, 'myName')
summary_dict['myName'] = counts

def test_get_summary_details(self):
Expand Down
20 changes: 18 additions & 2 deletions tests/tools/remodeling/operations/test_summarize_hed_type_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ def setUpClass(cls):
cls.summary_path = \
os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../../data/remodel_tests/aomic_sub-0013_summary_all_rmdl.json'))
rel_path = '../../../data/remodel_tests/sub-002_task-FacePerception_run-1_events.tsv'
cls.events_wh = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), rel_path))
rel_side = '../../../data/remodel_tests/task-FacePerception_events.json'
cls.sidecar_path_wh = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), rel_side))

@classmethod
def tearDownClass(cls):
Expand Down Expand Up @@ -75,9 +79,21 @@ def test_summary(self):
self.assertEqual(len(summary2['Dataset']['Overall summary']['files']), 2)
summary2a = context2.get_summary(individual_summaries="separate")
self.assertIsInstance(summary2a["Individual files"]["run-02"], dict)

def test_text_summary_with_levels(self):
with open(self.summary_path, 'r') as fp:
parms = json.load(fp)
dispatch = Dispatcher([], data_root=None, backup_name=None, hed_versions=['8.1.0'])
df = dispatch.get_data_file(self.events_wh)
parsed_commands, errors = Dispatcher.parse_operations(parms)
sum_op = parsed_commands[2]
sum_op.do_op(dispatch, dispatch.prep_data(df), 'run-01', sidecar=self.sidecar_path_wh)
context1 = dispatch.context_dict['AOMIC_condition_variables']
text_summary1 = context1.get_text_summary()
self.assertIsInstance(text_summary1, dict)

def test_text_summary(self):
sidecar = Sidecar(self.sidecar_path, 'aomic_sidecar', hed_schema=self.hed_schema)
sidecar = Sidecar(self.sidecar_path, name='aomic_sidecar')

with open(self.summary_path, 'r') as fp:
parms = json.load(fp)
Expand All @@ -104,4 +120,4 @@ def test_text_summary(self):


if __name__ == '__main__':
unittest.main()
unittest.main()