def test_complex_file_validation_invalid(self): schema_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_schema.mediawiki') events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_events.tsv') hed_schema = schema.load_schema(schema_path) json_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../data/validator_tests/bids_events_bad_defs.json") validator = HedValidator(hed_schema=hed_schema) sidecar = Sidecar(json_path) issues = sidecar.validate_entries(validators=validator, check_for_warnings=True) self.assertEqual(len(issues), 4) input_file = EventsInput(events_path, sidecars=sidecar) validation_issues = input_file.validate_file_sidecars( validator, check_for_warnings=True) self.assertEqual(len(validation_issues), 4) validation_issues = input_file.validate_file(validator, check_for_warnings=True) self.assertEqual(len(validation_issues), 42)
def test_loading_and_reset_mapper(self): events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_events.tsv') json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../data/validator_tests/bids_events.json") sidecar = Sidecar(json_path) self.assertEqual(len(sidecar.validate_entries()), 0) input_file_1 = EventsInput(events_path, sidecars=sidecar) input_file_2 = EventsInput(events_path, sidecars=sidecar) input_file_2.reset_column_mapper() for (row_number, column_dict), (row_number2, column_dict2) in zip( input_file_1.iter_dataframe(), input_file_2.iter_dataframe()): self.assertEqual( row_number, row_number2, f"EventsInput should have row {row_number} equal to {row_number2} after reset" ) self.assertTrue( len(column_dict) == 5, f"The column dictionary for row {row_number} should have the right length" ) self.assertTrue( len(column_dict2) == 11, f"The reset column dictionary for row {row_number2} should have the right length" )
def test_file_as_string(self): events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_events.tsv') json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../data/validator_tests/bids_events.json") sidecar = Sidecar(json_path) self.assertEqual(len(sidecar.validate_entries(expand_defs=True)), 0) input_file = EventsInput(events_path, sidecars=sidecar) with open(events_path) as file: events_file_as_string = io.StringIO(file.read()) input_file_from_string = EventsInput(file=events_file_as_string, sidecars=sidecar) for (row_number, column_dict), (row_number2, column_dict) in zip( input_file, input_file_from_string): self.assertEqual(row_number, row_number2) self.assertEqual(column_dict, column_dict)
def test_complex_file_validation_invalid_definitions_removed(self): # This verifies definitions are being removed from sidecar strings before being added, or it will produce # extra errors. schema_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_schema.mediawiki') events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/validator_tests/bids_events.tsv') hed_schema = schema.load_schema(schema_path) json_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../data/validator_tests/bids_events_bad_defs2.json") sidecar = Sidecar(json_path) input_file = EventsInput(events_path, sidecars=sidecar) validator = HedValidator(hed_schema=hed_schema) validation_issues1 = input_file.validate_file_sidecars(validator) self.assertEqual(len(validation_issues1), 4) validation_issues = input_file.validate_file(validator) self.assertEqual(len(validation_issues), 42)