def test_validate_file_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: # TODO this error should not be different from the error when using the validate script above msg = "builder must have data type defined with attribute 'data_type'" with self.assertRaisesWith(ValueError, msg): validate(io, 'hdmf-common')
def main(): ep = """ use --nspath to validate against an extension. If --ns is not specified, validate against all namespaces in namespace file. """ parser = ArgumentParser(description="Validate an NWB file", epilog=ep) parser.add_argument("path", type=str, help="the path to the NWB file") parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace file") parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") args = parser.parse_args() if not os.path.exists(args.path): print('%s not found' % args.path, file=sys.stderr) sys.exit(1) io = HDF5IO(args.path, get_manager(), mode='r') if args.nspath is not None: namespaces = load_namespaces(args.nspath) if args.ns is not None: print('Validating against %s from %s.' % (args.ns, args.ns_path)) else: print('Validating using namespaces in %s.' % args.nspath) for ns in namespaces: print('Validating against %s' % ns) errors = validate(io, ns) _print_errors(errors) else: errors = validate(io) print('Validating against core namespace') _print_errors(errors)
def _write(test_name, nwbfile): filename = 'tests/back_compat/%s_%s.nwb' % (__version__, test_name) with NWBHDF5IO(filename, 'w') as io: io.write(nwbfile) with NWBHDF5IO(filename, 'r') as io: validate(io) nwbfile = io.read()
def validate_nwbs(): global TOTAL, FAILURES, ERRORS logging.info('running validation tests on NWB files') examples_nwbs = glob.glob('*.nwb') import pynwb TOTAL += len(examples_nwbs) for nwb in examples_nwbs: try: logging.info("Validating file %s" % nwb) ws = list() with warnings.catch_warnings(record=True) as tmp: with pynwb.NWBHDF5IO(nwb, mode='r') as io: errors = pynwb.validate(io) if errors: FAILURES += 1 ERRORS += 1 for err in errors: print("Error: %s" % err) for w in tmp: # ignore RunTimeWarnings about importing if isinstance(w.message, RuntimeWarning) and not warning_re.match(str(w.message)): ws.append(w) for w in ws: warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) except Exception: print(traceback.format_exc()) FAILURES += 1 ERRORS += 1
def validate(path, devel_debug=False): """Run validation on a file and return errors In case of an exception being thrown, an error message added to the returned list of validation errors Parameters ---------- path: str or Path """ path = str(path) # Might come in as pathlib's PATH try: with pynwb.NWBHDF5IO(path, "r", load_namespaces=True) as reader: errors = pynwb.validate(reader) lgr.warning( "pynwb validation errors for %s: %s", path, errors, extra={"validating": True}, ) except Exception as exc: if devel_debug: raise lgr.warning("Failed to validate %s: %s", path, exc, extra={"validating": True}) errors = [f"Failed to validate {path}: {exc}"] # To overcome # https://github.com/NeurodataWithoutBorders/pynwb/issues/1090 # https://github.com/NeurodataWithoutBorders/pynwb/issues/1091 re_ok_prior_210 = re.compile( r"general/(experimenter|related_publications)\): " r"incorrect shape - expected an array of shape .\[None\].") try: version = get_nwb_version(path, sanitize=False) except Exception: # we just will not remove any errors, it is required so should be some pass else: if version is not None: # Explicitly sanitize so we collect warnings. # TODO: later cast into proper ERRORs version = _sanitize_nwb_version(version, log=errors.append) loosever = LooseVersion(version) if loosever and loosever < "2.1.0": errors_ = errors[:] errors = [ e for e in errors if not re_ok_prior_210.search(str(e)) ] if errors != errors_: lgr.debug( "Filtered out %d validation errors on %s", len(errors_) - len(errors), path, ) return errors
def test_append(self): proc_mod = self.nwbfile.create_processing_module(name='test_proc_mod', description='') proc_inter = LFP(name='test_proc_dset') proc_mod.add(proc_inter) device = self.nwbfile.create_device(name='test_device') e_group = self.nwbfile.create_electrode_group( name='test_electrode_group', description='', location='', device=device) self.nwbfile.add_electrode(x=0.0, y=0.0, z=0.0, imp=np.nan, location='', filtering='', group=e_group) electrodes = self.nwbfile.create_electrode_table_region(region=[0], description='') e_series = ElectricalSeries( name='test_es', electrodes=electrodes, data=np.ones(shape=(100, )), rate=10000.0, ) proc_inter.add_electrical_series(e_series) with NWBHDF5IO(self.path, mode='w') as io: io.write(self.nwbfile, cache_spec=False) with NWBHDF5IO(self.path, mode='a') as io: nwb = io.read() link_electrodes = nwb.processing['test_proc_mod'][ 'LFP'].electrical_series['test_es'].electrodes ts2 = ElectricalSeries(name='timeseries2', data=[4., 5., 6.], rate=1.0, electrodes=link_electrodes) nwb.add_acquisition(ts2) io.write(nwb) # also attempt to write same spec again self.assertIs( nwb.processing['test_proc_mod'] ['LFP'].electrical_series['test_es'].electrodes, nwb.acquisition['timeseries2'].electrodes) with NWBHDF5IO(self.path, mode='r') as io: nwb = io.read() np.testing.assert_equal(nwb.acquisition['timeseries2'].data[:], ts2.data) self.assertIs( nwb.processing['test_proc_mod'] ['LFP'].electrical_series['test_es'].electrodes, nwb.acquisition['timeseries2'].electrodes) errors = validate(io) for e in errors: print('ERROR', e)
def main(): parser = argparse.ArgumentParser('python test.py [options]') parser.add_argument('-m', '--modules', nargs='*', dest='modules', help='modules to import prior to reading the file(s)') parser.add_argument('path', help='path to an NWB file or directory containing NWB files') parser.set_defaults(modules=[]) args = parser.parse_args() in_path = pathlib.Path(args.path) if in_path.is_dir(): files = list(in_path.glob('*.nwb')) elif in_path.is_file(): files = [in_path] else: raise ValueError('%s should be a directory or an NWB file' % in_path) for module in args.modules: importlib.import_module(module) num_invalid_files = 0 num_exceptions = 0 for fi, filename in enumerate(files): print('%d/%d %s' % (fi + 1, len(files), filename)) try: with pynwb.NWBHDF5IO(str(filename), 'r', load_namespaces=True) as io: errors = pynwb.validate(io) if errors: for e in errors: print('Validator Error:', e) num_invalid_files += 1 else: print('Validation OK!') # inspect NWBFile object nwbfile = io.read() check_general(nwbfile) check_timeseries(nwbfile) check_tables(nwbfile) check_icephys(nwbfile) check_opto(nwbfile) check_ecephys(nwbfile) except Exception as ex: num_exceptions += 1 print("ERROR:", ex) print() if num_invalid_files: print('%d/%d files are invalid.' % (num_exceptions, len(files))) if num_exceptions: print('%d/%d files had errors.' % (num_exceptions, len(files))) else: print('All %d files validate!' % len(files))
def test_read(self): """ Attempt to read and validate all NWB files in the same folder as this file. The folder should contain NWB files from previous versions of NWB. See src/pynwb/testing/make_test_files.py for code to generate the NWB files. """ dir_path = Path(__file__).parent nwb_files = dir_path.glob('*.nwb') for f in nwb_files: with self.subTest(file=f.name): with NWBHDF5IO(str(f), 'r') as io: errors = validate(io) io.read() if errors: for e in errors: warnings.warn('%s: %s' % (f.name, e))
def test_read(self): """Test reading and validating all NWB files in the same folder as this file. This folder contains NWB files generated by previous versions of NWB using the script src/pynwb/testing/make_test_files.py """ dir_path = Path(__file__).parent nwb_files = dir_path.glob('*.nwb') for f in nwb_files: with self.subTest(file=f.name): with NWBHDF5IO(str(f), 'r') as io: errors = validate(io) io.read() if errors: for e in errors: if f.name in self.expected_errors and str( e) not in self.expected_errors[f.name]: warnings.warn('%s: %s' % (f.name, e))
def validate_nwb(filename): """ If pynwb does not catch an exception then add it to the error list Parameters ---------- filename: str nwb file name Returns ------- errors: list of errors """ if os.path.exists(filename): with NWBHDF5IO(filename, mode='r', load_namespaces=True) as io: try: errors = validate(io) except Exception as e: errors = [e] return errors
def validate(path): """Run validation on a file and return errors In case of an exception being thrown, an error message added to the returned list of validation errors Parameters ---------- path: str or Path """ path = str(path) # Might come in as pathlib's PATH try: with pynwb.NWBHDF5IO(path, "r", load_namespaces=True) as reader: errors = pynwb.validate(reader) except Exception as exc: errors = [f"Failed to validate {path}: {exc}"] # To overcome # https://github.com/NeurodataWithoutBorders/pynwb/issues/1090 # https://github.com/NeurodataWithoutBorders/pynwb/issues/1091 re_ok_prior_210 = re.compile( "general/(experimenter|related_publications)\): " "incorrect shape - expected an array of shape .\[None\].") try: version = get_nwb_version(path) except: # we just will not remove any errors pass else: if version and LooseVersion(version) < "2.1.0": errors_ = errors[:] errors = [e for e in errors if not re_ok_prior_210.search(str(e))] if errors != errors_: lgr.debug( "Filtered out %d validation errors on %s", len(errors_) - len(errors), path, ) return errors
def validate_nwbs(): global TOTAL, FAILURES, ERRORS logging.info('running validation tests on NWB files') examples_nwbs = glob.glob('*.nwb') import pynwb for nwb in examples_nwbs: try: logging.info("Validating file %s" % nwb) ws = list() with warnings.catch_warnings(record=True) as tmp: logging.info("Validating with pynwb.validate method.") with pynwb.NWBHDF5IO(nwb, mode='r') as io: errors = pynwb.validate(io) TOTAL += 1 if errors: FAILURES += 1 ERRORS += 1 for err in errors: print("Error: %s" % err) def get_namespaces(nwbfile): comp = run([ "python", "-m", "pynwb.validate", "--list-namespaces", "--cached-namespace", nwb ], stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=20) if comp.returncode != 0: return [] return comp.stdout.split() namespaces = get_namespaces(nwb) if len(namespaces) == 0: FAILURES += 1 ERRORS += 1 cmds = [] cmds += [["python", "-m", "pynwb.validate", nwb]] cmds += [[ "python", "-m", "pynwb.validate", "--cached-namespace", nwb ]] cmds += [[ "python", "-m", "pynwb.validate", "--no-cached-namespace", nwb ]] for ns in namespaces: cmds += [[ "python", "-m", "pynwb.validate", "--cached-namespace", "--ns", ns, nwb ]] for cmd in cmds: logging.info("Validating with \"%s\"." % (" ".join(cmd[:-1]))) comp = run(cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=20) TOTAL += 1 if comp.returncode != 0: FAILURES += 1 ERRORS += 1 print("Error: %s" % comp.stdout) for w in tmp: # ignore RunTimeWarnings about importing if isinstance(w.message, RuntimeWarning) and not warning_re.match( str(w.message)): ws.append(w) for w in ws: warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) except Exception: print(traceback.format_exc()) FAILURES += 1 ERRORS += 1
def write_all_blocks(self, blocks, **kwargs): """ Write list of blocks to the file """ # todo: allow metadata in NWBFile constructor to be taken from kwargs annotations = defaultdict(set) for annotation_name in GLOBAL_ANNOTATIONS: if annotation_name in kwargs: annotations[annotation_name] = kwargs[annotation_name] else: for block in blocks: if annotation_name in block.annotations: try: annotations[annotation_name].add( block.annotations[annotation_name]) except TypeError: if annotation_name in POSSIBLE_JSON_FIELDS: encoded = json.dumps( block.annotations[annotation_name]) annotations[annotation_name].add(encoded) else: raise if annotation_name in annotations: if len(annotations[annotation_name]) > 1: raise NotImplementedError( "We don't yet support multiple values for {}". format(annotation_name)) # take single value from set annotations[annotation_name], = annotations[ annotation_name] if "identifier" not in annotations: annotations["identifier"] = self.filename if "session_description" not in annotations: annotations[ "session_description"] = blocks[0].description or self.filename # todo: concatenate descriptions of multiple blocks if different if "session_start_time" not in annotations: raise Exception( "Writing to NWB requires an annotation 'session_start_time'") # todo: handle subject # todo: store additional Neo annotations somewhere in NWB file nwbfile = NWBFile(**annotations) assert self.nwb_file_mode in ('w', ) # possibly expand to 'a'ppend later if self.nwb_file_mode == "w" and os.path.exists(self.filename): os.remove(self.filename) io_nwb = pynwb.NWBHDF5IO(self.filename, mode=self.nwb_file_mode) if sum(statistics(block)["SpikeTrain"]["count"] for block in blocks) > 0: nwbfile.add_unit_column('_name', 'the name attribute of the SpikeTrain') # nwbfile.add_unit_column('_description', # 'the description attribute of the SpikeTrain') nwbfile.add_unit_column( 'segment', 'the name of the Neo Segment to which the SpikeTrain belongs') nwbfile.add_unit_column( 'block', 'the name of the Neo Block to which the SpikeTrain belongs') if sum(statistics(block)["Epoch"]["count"] for block in blocks) > 0: nwbfile.add_epoch_column('_name', 'the name attribute of the Epoch') # nwbfile.add_epoch_column('_description', 'the description attribute of the Epoch') nwbfile.add_epoch_column( 'segment', 'the name of the Neo Segment to which the Epoch belongs') nwbfile.add_epoch_column( 'block', 'the name of the Neo Block to which the Epoch belongs') for i, block in enumerate(blocks): self.write_block(nwbfile, block) io_nwb.write(nwbfile) io_nwb.close() with pynwb.NWBHDF5IO(self.filename, "r") as io_validate: errors = pynwb.validate(io_validate, namespace="core") if errors: raise Exception( f"Errors found when validating {self.filename}")
def _validate_helper(**kwargs): errors = validate(**kwargs) _print_errors(errors) return (errors is not None and len(errors) > 0)
def test_validate_file_cached_bad_ns(self): """Test that validating a file with cached spec against a specified, unknown namespace fails.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: with self.assertRaisesWith(KeyError, "\"'notfound' not a namespace\""): validate(io, 'notfound')
def test_validate_file_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" with NWBHDF5IO('tests/back_compat/1.1.2_nwbfile.nwb', 'r') as io: errors = validate(io) self.assertEqual(errors, [])