def test_accumulate_files(self): fnames = ['PLP0000708.nx.hdf', 'PLP0000709.nx.hdf'] pths = [os.path.join(self.path, fname) for fname in fnames] plp.accumulate_HDF_files(pths) f8, f9, fadd = None, None, None try: f8 = h5py.File(os.path.join(self.path, 'PLP0000708.nx.hdf'), 'r') f9 = h5py.File(os.path.join(self.path, 'PLP0000709.nx.hdf'), 'r') fadd = h5py.File(os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf'), 'r') f8d = f8['entry1/data/hmm'][0] f9d = f9['entry1/data/hmm'][0] faddd = fadd['entry1/data/hmm'][0] assert_equal(faddd, f8d + f9d) finally: if f8 is not None: f8.close() if f9 is not None: f9.close() if fadd is not None: fadd.close()
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ['PLP0000708.nx.hdf', 'PLP0000708.nx.hdf'] pths = [os.path.join(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) with warnings.catch_warnings(): warnings.simplefilter('ignore', RuntimeWarning) # it should be processable fadd = PlatypusNexus( os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) fadd.process() # it should also be reduceable reducer = PlatypusReduce( os.path.join(self.pth, 'PLP0000711.nx.hdf')) datasets, reduced = reducer.reduce( os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) assert_('y' in reduced) # the error bars should be smaller datasets2, reduced2 = reducer.reduce( os.path.join(self.pth, 'PLP0000708.nx.hdf')) assert_(np.all(reduced['y_err'] < reduced2['y_err']))
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ["PLP0000708.nx.hdf", "PLP0000708.nx.hdf"] pths = [os.path.join(self.path, fname) for fname in fnames] plp.accumulate_HDF_files(pths) # it should be processable fadd = PlatypusNexus(os.path.join(os.getcwd(), "ADD_PLP0000708.nx.hdf")) fadd.process() # it should also be reduceable reducer = ReducePlatypus(os.path.join(self.path, "PLP0000711.nx.hdf")) reduced = reducer.reduce(os.path.join(os.getcwd(), "ADD_PLP0000708.nx.hdf")) assert_("ydata" in reduced) # the error bars should be smaller reduced2 = reducer.reduce(os.path.join(self.path, "PLP0000708.nx.hdf")) assert_(np.all(reduced["ydata_sd"] < reduced2["ydata_sd"]))
def test_accumulate_files(self): fnames = ["PLP0000708.nx.hdf", "PLP0000709.nx.hdf"] pths = [pjoin(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) f8, f9, fadd = None, None, None try: f8 = h5py.File(pjoin(self.pth, "PLP0000708.nx.hdf"), "r") f9 = h5py.File(pjoin(self.pth, "PLP0000709.nx.hdf"), "r") fadd = h5py.File(pjoin(self.tmpdir, "ADD_PLP0000708.nx.hdf"), "r") f8d = f8["entry1/data/hmm"][0] f9d = f9["entry1/data/hmm"][0] faddd = fadd["entry1/data/hmm"][0] assert_equal(faddd, f8d + f9d) finally: if f8 is not None: f8.close() if f9 is not None: f9.close() if fadd is not None: fadd.close()
def test_accumulate_files(self): fnames = ['PLP0000708.nx.hdf', 'PLP0000709.nx.hdf'] pths = [os.path.join(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) f8, f9, fadd = None, None, None try: f8 = h5py.File(os.path.join(self.pth, 'PLP0000708.nx.hdf'), 'r') f9 = h5py.File(os.path.join(self.pth, 'PLP0000709.nx.hdf'), 'r') fadd = h5py.File( os.path.join(self.tmpdir, 'ADD_PLP0000708.nx.hdf'), 'r') f8d = f8['entry1/data/hmm'][0] f9d = f9['entry1/data/hmm'][0] faddd = fadd['entry1/data/hmm'][0] assert_equal(faddd, f8d + f9d) finally: if f8 is not None: f8.close() if f9 is not None: f9.close() if fadd is not None: fadd.close()
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ["PLP0000708.nx.hdf", "PLP0000708.nx.hdf"] pths = [pjoin(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) # it should be processable fadd = PlatypusNexus(pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf")) fadd.process() # it should also be reduceable reducer = PlatypusReduce(pjoin(self.pth, "PLP0000711.nx.hdf")) datasets, reduced = reducer.reduce( pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf")) assert_("y" in reduced) # the error bars should be smaller datasets2, reduced2 = reducer.reduce( pjoin(self.pth, "PLP0000708.nx.hdf")) assert_(np.all(reduced["y_err"] < reduced2["y_err"]))