def test_qdcd_normalisation_profile(self): """ Test qdcd normalisation profile """ file_name = path.join(path.dirname(islatu.__file__), "tests/test_files/qdcd_norm.dat") normalisation_metadata, normalisation_data = io.i07_dat_parser( file_name) itp = splrep(normalisation_data["qdcd_"], normalisation_data["adc2"]) file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) r_store0 = r.scans[0].R r_store1 = r.scans[1].R r.qdcd_normalisation(itp) np.any( np.not_equal(unp.nominal_values(r.scans[0].R), unp.nominal_values(r_store0))) np.any( np.not_equal(unp.nominal_values(r.scans[1].R), unp.nominal_values(r_store1)))
def test_init_profile(self): """ Test init with correct file paths """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) assert_equal(len(r.scans), 2)
def test_rebin_profile(self): """ Test profile rebin """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) r.concatenate() r.rebin(number_of_q_vectors=3) assert_equal(r.q.size, 2) assert_equal(r.R.size, 2)
def test_concatentate_profile(self): """ Test profile concatentation """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) r.concatenate() assert_equal(r.q.size, 6) assert_equal(r.R.size, 6) assert_equal(r.dq.size, 6) assert_equal(r.dR.size, 6)
def test_crop_bkg_profile(self): """ Test crop and bkg sub profile """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) a2 = np.zeros((3)) np.any(np.not_equal(unp.nominal_values(r.scans[0].R), a2)) np.any(np.not_equal(unp.nominal_values(r.scans[1].R), a2)) np.any(np.not_equal(unp.std_devs(r.scans[0].R), a2)) np.any(np.not_equal(unp.std_devs(r.scans[1].R), a2)) np.any(np.not_equal(r.scans[0].n_pixels, a2)) np.any(np.not_equal(r.scans[1].n_pixels, a2))
def test_resolution_function_profile(self): """ Test resolution_function from pixel profile """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) q_store0 = unp.nominal_values(r.scans[0].q) q_store1 = unp.nominal_values(r.scans[1].q) r.resolution_function(1) assert_equal(unp.nominal_values(r.scans[0].q), q_store0) np.any(np.not_equal(unp.std_devs(r.scans[0].q), np.zeros((3)))) assert_equal(unp.nominal_values(r.scans[1].q), q_store1) np.any(np.not_equal(unp.std_devs(r.scans[1].q), np.zeros((3))))
def test_normalise_ter_profile(self): """ Test profile ter normalisation """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) r.concatenate() store = r.R dstore = r.dR r.normalise_ter() assert_equal(r.q.size, 6) assert_equal(r.R.size, 6) np.any(np.not_equal(r.R, store)) np.any(np.not_equal(r.dR, dstore))
def test_transmission_profile(self): """ Test transmission correction profile """ file_name1 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_a.dat") file_name2 = path.join(path.dirname(islatu.__file__), "tests/test_files/test_b.dat") files = [file_name1, file_name2] r = refl_data.Profile(files, io.i07_dat_parser) r.crop(cropping.crop_around_peak_2d) r.bkg_sub(background.fit_gaussian_2d) r_store0 = r.scans[0].R r_store1 = r.scans[1].R r.transmission_normalisation() np.any( np.not_equal(unp.nominal_values(r.scans[0].R), unp.nominal_values(r_store0))) np.any( np.not_equal(unp.nominal_values(r.scans[1].R), unp.nominal_values(r_store1)))
def i07reduce(run_numbers, yaml_file, directory='/dls/{}/data/{}/{}/', title='Unknown'): """ The runner that parses the yaml file and performs the data reduction. run_numbers (:py:attr:`list` of :py:attr:`int`): Reflectometry scans that make up the profile. yaml_file (:py:attr:`str`): File path to instruction set. directory (:py:attr:`str`): Outline for directory path. title (:py:attr:`str`): A title for the experiment. """ the_boss = Foreperson(run_numbers, yaml_file, directory, title) files_to_reduce = the_boss.reduction.input_files print("-" * 10) print('File Parsing') print("-" * 10) refl = refl_data.Profile(files_to_reduce, the_boss.reduction.parser, the_boss.data_source.experiment.measurement.q_axis_name, the_boss.data_source.experiment.measurement.theta_axis_name, None, 0, the_boss.data_source.experiment.measurement.pixel_max, the_boss.data_source.experiment.measurement.hot_pixel_max, the_boss.data_source.experiment.measurement.transpose) print("-" * 10) print('Cropping') print("-" * 10) refl.crop(the_boss.reduction.crop_function, the_boss.reduction.crop_kwargs) print("-" * 10) print('Background Subtraction') print("-" * 10) refl.bkg_sub(the_boss.reduction.bkg_function, the_boss.reduction.bkg_kwargs) the_boss.reduction.data_state.background = 'corrected' print("-" * 10) print('Estimating Resolution Function') print("-" * 10) refl.resolution_function( the_boss.data_source.experiment.measurement.qz_dimension, progress=True) the_boss.reduction.data_state.resolution = 'estimated' print("-" * 10) print('Performing Data Corrections') print("-" * 10) if the_boss.reduction.dcd_normalisation is not None: itp = corrections.get_interpolator( the_boss.reduction.dcd_normalisation, the_boss.reduction.parser) refl.qdcd_normalisation(itp) the_boss.reduction.data_state.dcd = 'normalised' refl.footprint_correction( the_boss.reduction.beam_width, the_boss.reduction.sample_size) refl.transmission_normalisation() the_boss.reduction.data_state.transmission = 'normalised' refl.concatenate() refl.normalise_ter() the_boss.reduction.data_state.intensity = 'normalised' if the_boss.data.rebin: print("-" * 10) print('Rebinning') print("-" * 10) if the_boss.data.q_min is None: refl.rebin(number_of_q_vectors=the_boss.data.n_qvectors) else: if the_boss.data.q_space == 'linear': spacing = np.linspace elif the_boss.data.q_space == 'log': spacing = np.logspace refl.rebin(new_q=spacing(refl.q.min(), refl.q.max(), the_boss.data.q_step)) the_boss.reduction.data_state.rebinned = the_boss.data.q_shape the_boss.data_source.experiment.measurement.q_range = [ str(refl.q.min()), str(refl.q.max())] the_boss.data.n_qvectors = str(len(refl.R)) try: _ = the_boss.data.column_4 data = np.array([refl.q, refl.R, refl.dR, refl.dq]).T np.savetxt( (the_boss.directory_path + '/processing/XRR_{}.dat'.format( run_numbers[0])), data, header='{}\n 1 2 3 4'.format(dump(vars(the_boss)))) if the_boss.data.both: data = np.array([refl.q, refl.R, refl.dR]).T np.savetxt( (the_boss.directory_path + '/processing/XRR_{}_3col.dat'.format( run_numbers[0])), data, header='{}\n 1 2 3'.format(dump(vars(the_boss)))) except: data = np.array([refl.q, refl.R, refl.dR]).T np.savetxt( (the_boss.directory_path + '/processing/XRR_{}.dat'.format( run_numbers[0])), data, header='{}\n 1 2 3'.format(dump(vars(the_boss)))) print("-" * 10) print('Reduced Data Stored in Processing Directory') print("-" * 10)