def test_event_reduction(self): # check that eventmode reduction can occur, and that there are the # correct number of datasets produced. # warnings filter for pixel size with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) a = PlatypusReduce(pjoin(self.pth, "PLP0011613.nx.hdf")) a.reduce( pjoin(self.pth, "PLP0011641.nx.hdf"), integrate=0, rebin_percent=2, eventmode=[0, 900, 1800], ) assert_equal(a.y.shape[0], 2) # check that two datasets are written out. assert os.path.isfile("PLP0011641_0.dat") assert os.path.isfile("PLP0011641_1.dat") # check that the resolutions are pretty much the same assert_allclose(a.x_err[0] / a.x[0], a.x_err[1] / a.x[1], atol=0.001) # check that the (right?) timestamps are written into the datafile tree = ET.parse(pjoin(os.getcwd(), "PLP0011641_1.xml")) tree.find(".//REFentry").attrib["time"]
def test_reduction_method(self): # a quick smoke test to check that the reduction can occur # warnings filter for pixel size with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) a = PlatypusReduce("PLP0000711.nx.hdf", data_folder=self.pth) # try reduction with the reduce method a.reduce( "PLP0000708.nx.hdf", data_folder=self.pth, rebin_percent=4, ) # try reduction with the __call__ method a( "PLP0000708.nx.hdf", data_folder=self.pth, rebin_percent=4, ) # this should also have saved a couple of files in the current # directory assert os.path.isfile("./PLP0000708_0.dat") assert os.path.isfile("./PLP0000708_0.xml") # can we read the file ReflectDataset("./PLP0000708_0.dat") # try writing offspecular data a.write_offspecular("offspec.xml", 0)
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ['PLP0000708.nx.hdf', 'PLP0000708.nx.hdf'] pths = [os.path.join(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) with warnings.catch_warnings(): warnings.simplefilter('ignore', RuntimeWarning) # it should be processable fadd = PlatypusNexus( os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) fadd.process() # it should also be reduceable reducer = PlatypusReduce( os.path.join(self.pth, 'PLP0000711.nx.hdf')) datasets, reduced = reducer.reduce( os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) assert_('y' in reduced) # the error bars should be smaller datasets2, reduced2 = reducer.reduce( os.path.join(self.pth, 'PLP0000708.nx.hdf')) assert_(np.all(reduced['y_err'] < reduced2['y_err']))
def test_free_liquids(self): # smoke test for free liquids a0 = PlatypusReduce('PLP0038418.nx.hdf', data_folder=self.pth) a1 = PlatypusReduce('PLP0038417.nx.hdf', data_folder=self.pth) # try reduction with the reduce method d0, r0 = a0.reduce('PLP0038420.nx.hdf', data_folder=self.pth, rebin_percent=4) d1, r1 = a1.reduce('PLP0038421.nx.hdf', data_folder=self.pth, rebin_percent=4)
def test_reduction_method(self): # a quick smoke test to check that the reduction can occur a = PlatypusReduce('PLP0000711.nx.hdf', data_folder=self.pth) # try reduction with the reduce method a.reduce('PLP0000708.nx.hdf', data_folder=self.pth, rebin_percent=4) # try reduction with the __call__ method a('PLP0000708.nx.hdf', data_folder=self.pth, rebin_percent=4) # this should also have saved a couple of files in the current # directory assert_(os.path.isfile('./PLP0000708_0.dat')) assert_(os.path.isfile('./PLP0000708_0.xml')) # try writing offspecular data a.write_offspecular('offspec.xml', 0)
def test_reduction_method(self): # a quick smoke test to check that the reduction can occur a = PlatypusReduce('PLP0000711.nx.hdf', data_folder=self.path, rebin_percent=4) # try reduction with the reduce method a.reduce('PLP0000708.nx.hdf', data_folder=self.path, rebin_percent=4) # try reduction with the __call__ method a('PLP0000708.nx.hdf', data_folder=self.path, rebin_percent=4) # this should also have saved a couple of files in the current # directory assert_(os.path.isfile('./PLP0000708_0.dat')) assert_(os.path.isfile('./PLP0000708_0.xml')) # try writing offspecular data a.write_offspecular('offspec.xml', 0)
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ['PLP0000708.nx.hdf', 'PLP0000708.nx.hdf'] pths = [os.path.join(self.path, fname) for fname in fnames] plp.accumulate_HDF_files(pths) # it should be processable fadd = PlatypusNexus(os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) fadd.process() # it should also be reduceable reducer = PlatypusReduce(os.path.join(self.path, 'PLP0000711.nx.hdf')) reduced = reducer.reduce(os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf')) assert_('ydata' in reduced) # the error bars should be smaller reduced2 = reducer.reduce(os.path.join(self.path, 'PLP0000708.nx.hdf')) assert_(np.all(reduced['ydata_sd'] < reduced2['ydata_sd']))
def test_event_reduction(self): # check that eventmode reduction can occur, and that there are the # correct number of datasets produced. a = PlatypusReduce( os.path.join(self.pth, 'PLP0011613.nx.hdf')) a.reduce(os.path.join(self.pth, 'PLP0011641.nx.hdf'), integrate=0, rebin_percent=2, eventmode=[0, 900, 1800]) assert_equal(a.y.shape[0], 2) # check that two datasets are written out. assert_(os.path.isfile('PLP0011641_0.dat')) assert_(os.path.isfile('PLP0011641_1.dat')) # check that the resolutions are pretty much the same assert_allclose(a.x_err[0] / a.x[0], a.x_err[1] / a.x[1], atol=0.001) # check that the (right?) timestamps are written into the datafile tree = ET.parse(os.path.join(os.getcwd(), 'PLP0011641_1.xml')) tree.find('.//REFentry').attrib['time']
def test_accumulate_files_reduce(self): # test by adding a file to itself. Should have smaller stats fnames = ["PLP0000708.nx.hdf", "PLP0000708.nx.hdf"] pths = [pjoin(self.pth, fname) for fname in fnames] plp.accumulate_HDF_files(pths) with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) # it should be processable fadd = PlatypusNexus(pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf")) fadd.process() # it should also be reduceable reducer = PlatypusReduce(pjoin(self.pth, "PLP0000711.nx.hdf")) datasets, reduced = reducer.reduce( pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf")) assert_("y" in reduced) # the error bars should be smaller datasets2, reduced2 = reducer.reduce( pjoin(self.pth, "PLP0000708.nx.hdf")) assert_(np.all(reduced["y_err"] < reduced2["y_err"]))