def test_reading_same(self): for ioobj, path in self.iter_io_objects(return_path=True): obj_reader_base = create_generic_reader(ioobj, target=False) obj_reader_single = create_generic_reader(ioobj) obj_base = obj_reader_base() obj_single = obj_reader_single() try: assert_same_sub_schema(obj_base, obj_single) except BaseException as exc: exc.args += ('from ' + os.path.basename(path),) raise
def test_reading_same(self): for ioobj, path in self.iter_io_objects(return_path=True): obj_reader_base = create_generic_reader(ioobj, target=False) obj_reader_single = create_generic_reader(ioobj) obj_base = obj_reader_base() obj_single = obj_reader_single() try: assert_same_sub_schema(obj_base, obj_single) except BaseException as exc: exc.args += ("from " + os.path.basename(path),) raise
def create_file_reader(self, filename=None, return_path=False, clean=False, target=None, readall=False): ''' Create a function that can read from the specified filename. If filename is None, create a filename (default). If return_path is True, return the full path of the file along with the reader function. return reader, path. Default is False. If clean is True, try to delete existing versions of the file before creating the io object. Default is False. If target is None, use the first supported_objects from ioobj If target is False, use the 'read' method. If target is the Block or Segment class, use read_block or read_segment, respectively. If target is a string, use 'read_'+target. If readall is True, use the read_all_ method instead of the read_ method. Default is False. ''' ioobj, path = self.generic_io_object(filename=filename, return_path=True, clean=clean) res = create_generic_reader(ioobj, target=target, readall=readall) if return_path: return res, path return res
def test_reading_same(self): for ioobj, path in self.iter_io_objects(return_path=True): obj_reader_all = create_generic_reader(ioobj, readall=True) obj_reader_base = create_generic_reader(ioobj, target=False) obj_reader_next = create_generic_reader(ioobj, target='next_block') obj_reader_single = create_generic_reader(ioobj) obj_all = obj_reader_all() obj_base = obj_reader_base() obj_single = obj_reader_single() obj_next = [obj_reader_next()] while ioobj._isopen: obj_next.append(obj_reader_next()) try: assert_same_sub_schema(obj_all[0], obj_base) assert_same_sub_schema(obj_all[0], obj_single) assert_same_sub_schema(obj_all, obj_next) except BaseException as exc: exc.args += ('from ' + os.path.basename(path),) raise self.assertEqual(len(obj_all), len(obj_next))
def test_reading_same(self): for ioobj, path in self.iter_io_objects(return_path=True): obj_reader_all = create_generic_reader(ioobj, readall=True) obj_reader_base = create_generic_reader(ioobj, target=False) obj_reader_next = create_generic_reader(ioobj, target='next_block') obj_reader_single = create_generic_reader(ioobj) obj_all = obj_reader_all() obj_base = obj_reader_base() obj_single = obj_reader_single() obj_next = [obj_reader_next()] while ioobj._isopen: obj_next.append(obj_reader_next()) try: assert_same_sub_schema(obj_all[0], obj_base) assert_same_sub_schema(obj_all[0], obj_single) assert_same_sub_schema(obj_all, obj_next) except BaseException as exc: exc.args += ('from ' + os.path.basename(path), ) raise self.assertEqual(len(obj_all), len(obj_next))
def test_write_then_read(self): ''' Test for IO that are able to write and read - here %s: 1 - Generate a full schema with supported objects. 2 - Write to a file 3 - Read from the file 4 - Check the hierachy 5 - Check data Work only for IO for Block and Segment for the highest object (main cases). ''' % self.ioclass.__name__ if not self.able_to_write_or_read(writeread=True): return for cascade in self.cascade_modes: ioobj1 = self.generic_io_object(clean=True) if ioobj1 is None: return ob1 = write_generic(ioobj1, target=self.higher) close_object_safe(ioobj1) ioobj2 = self.generic_io_object() # Read the highest supported object from the file obj_reader = create_generic_reader(ioobj2, target=False) ob2 = obj_reader(cascade=cascade)[0] if self.higher == Segment: ob2 = ob2.segments[0] # some formats (e.g. elphy) do not support double floating # point spiketrains try: assert_same_sub_schema(ob1, ob2, True, 1e-8) assert_neo_object_is_compliant(ob1) assert_neo_object_is_compliant(ob2) # intercept exceptions and add more information except BaseException as exc: exc.args += ('with cascade=%s ' % cascade, ) raise close_object_safe(ioobj2)
def test_write_then_read(self): ''' Test for IO that are able to write and read - here %s: 1 - Generate a full schema with supported objects. 2 - Write to a file 3 - Read from the file 4 - Check the hierachy 5 - Check data Work only for IO for Block and Segment for the highest object (main cases). ''' % self.ioclass.__name__ if not self.able_to_write_or_read(writeread=True): return for cascade in self.cascade_modes: ioobj1 = self.generic_io_object(clean=True) if ioobj1 is None: return ob1 = write_generic(ioobj1, target=self.higher) close_object_safe(ioobj1) ioobj2 = self.generic_io_object() # Read the highest supported object from the file obj_reader = create_generic_reader(ioobj2, target=False) ob2 = obj_reader(cascade=cascade)[0] if self.higher == Segment: ob2 = ob2.segments[0] # some formats (e.g. elphy) do not support double floating # point spiketrains try: assert_same_sub_schema(ob1, ob2, True, 1e-8) assert_neo_object_is_compliant(ob1) assert_neo_object_is_compliant(ob2) # intercept exceptions and add more information except BaseException as exc: exc.args += ('with cascade=%s ' % cascade,) raise close_object_safe(ioobj2)