def fill_table(self): raw_image_size = self.raw_image_size total_nbr_pixels = raw_image_size[0] * raw_image_size[1] for _row, _file in enumerate(self.list_files): o_norm = Normalization() o_norm.load(file=_file, auto_gamma_filter=False, manual_gamma_filter=True, manual_gamma_threshold=self. __get_filtering_coefficient_value()) _raw_data = o_norm.data['sample']['data'] nbr_pixel_corrected = self.get_number_pixel_gamma_corrected( data=_raw_data) # number of pixel corrected _item = QtGui.QTableWidgetItem("{}/{}".format( nbr_pixel_corrected, total_nbr_pixels)) self.ui.tableWidget.setItem(_row, 1, _item) # percentage of pixel corrected _item = QtGui.QTableWidgetItem("{:.02f}%".format( nbr_pixel_corrected * 100 / total_nbr_pixels)) self.ui.tableWidget.setItem(_row, 2, _item)
def test_df_correction(self): """assert df corrction works""" sample_path = self.data_path + '/tif/sample/' ob_path = self.data_path + '/tif/ob/' o_norm = Normalization() o_norm.load(folder=sample_path, auto_gamma_filter=False) o_norm.load(folder=ob_path, data_type='ob', auto_gamma_filter=False) df_file_1 = self.data_path + '/tif/df/df002.tif' df_file_2 = self.data_path + '/tif/df/df003.tif' o_norm.load(file=df_file_1, data_type='df', auto_gamma_filter=False) o_norm.load(file=df_file_2, data_type='df', auto_gamma_filter=False) #sample o_norm.df_correction() _expected_data = np.zeros([5, 5]) _expected_data[:, 2] = 1 _expected_data[:, 3] = 2 _expected_data[:, 4] = 3 _sample_data = o_norm.data['sample']['data'][0] assert (_expected_data == o_norm.data['sample']['data'][0]).all() #ob _expected_data = np.zeros([5, 5]) _ob_data = o_norm.data['ob']['data'][0] assert (_expected_data == _ob_data).all()
def load_data(self, folder_selected): list_files = glob.glob(os.path.join(folder_selected, '*.fits')) if list_files == []: list_files = glob.glob(os.path.join(folder_selected, '*.tif*')) else: #fits # keep only files of interest list_files = [file for file in list_files if not "_SummedImg.fits" in file] list_files = [file for file in list_files if ".fits" in file] # sort list of files list_files.sort() o_norm = Normalization() o_norm.load(file=list_files, notebook=True) self.data = o_norm.data['sample']['data'] self.list_files = o_norm.data['sample']['file_name'] display(HTML('<span style="font-size: 20px; color:blue">' + str(len(list_files)) + \ ' files have been loaded</span>')) # define time spectra file folder = os.path.dirname(self.list_files[0]) spectra_file = glob.glob(os.path.join(folder, '*_Spectra.txt')) if spectra_file: self.spectra_file = spectra_file[0] display(HTML('<span style="font-size: 20px; color:blue"> Spectra File automatically located: ' + \ self.spectra_file + '</span>')) else: #ask for spectra file self.select_time_spectra_file()
def __load_files(self): progress_bar_layout = widgets.Layout(border='1px solid blue') hbox = widgets.HBox([ widgets.IntProgress(description="FUll Progress", layout=progress_bar_layout), widgets.Label(value='', layout=widgets.Layout(width='10%')) ]) w = hbox.children[0] nbr_groups = len(self.exp_dict.keys()) w.max = nbr_groups label = hbox.children[1] label.value = f"0/{nbr_groups}" display(hbox) for _index, _key in enumerate(self.exp_dict.keys()): _item = self.exp_dict[_key] _path = _item['folder'] list_files = _item['list_of_files'] full_list_files = [ os.path.join(_path, _file) for _file in list_files ] o_norm = Normalization() o_norm.load(file=full_list_files, notebook=True) _data = o_norm.data['sample']['data'] _item['list_of_images'] = _data self.exp_dict[_key] = _item w.value = _index + 1 label.value = f"{_index+1}/{nbr_groups}" hbox.close() display(widgets.Label(value="Done!"))
def load(self, list_images): #list_images = self.get_list_images() self.list_file_names = list_images self.o_norm = Normalization() self.o_norm.load(file=list_images, notebook=True) self.data = self.o_norm.data['sample']['data'] self.__calculate_image_dimension()
def load_data(self): self.o_norm = Normalization() # sample list_sample = self.files.sample self.o_norm.load(file=list_sample, notebook=True, auto_gamma_filter=False, manual_gamma_filter=True, manual_gamma_threshold=self.gamma_threshold) self.data.sample = self.o_norm.data['sample']['data'] self.list_file_names = list_sample # ob list_ob = self.files.ob self.o_norm.load(file=list_ob, data_type='ob', notebook=True, auto_gamma_filter=False, manual_gamma_filter=True, manual_gamma_threshold=self.gamma_threshold) self.data.ob = self.o_norm.data['ob']['data'] # df list_df = self.files.df if list_df: self.o_norm.load(file=list_df, data_type='df', notebook=True, auto_gamma_filter=False, manual_gamma_filter=True, manual_gamma_threshold=self.gamma_threshold) self.data.df = self.o_norm.data['df']['data']
def merging(self, output_folder): """combine images using algorithm provided""" # get merging algorithm merging_algo = self.combine_method.value algorithm = self.__add if merging_algo == 'mean': algorithm = self.__mean # get output folder output_folder = os.path.abspath(output_folder) # create dictionary of how the images will be combined merging_dict = self.__create_merging_dictionary() self.merginc_dict_debugging = merging_dict # create final list of files to merge final_dict_of_files_to_merge = self.__create_dict_of_files_to_merge(merging_dict) self.final_dict_of_files_to_merge_debugging = final_dict_of_files_to_merge final_nbr_folders = len(merging_dict.keys()) folder_level_ui = widgets.HBox([widgets.Label("Folder Progress:", layout=widgets.Layout(width='20%')), widgets.IntProgress(max=final_nbr_folders, layout=widgets.Layout(width='50%'))]) display(folder_level_ui) w1 = folder_level_ui.children[1] nbr_files_to_merge = self.nbr_files_in_each_folder file_level_ui = widgets.HBox([widgets.Label("File Progress:", layout=widgets.Layout(width='20%')), widgets.IntProgress(max=nbr_files_to_merge, layout=widgets.Layout(width='50%'))]) display(file_level_ui) w2 = file_level_ui.children[1] for _index_final_folder, _final_folder in enumerate(final_dict_of_files_to_merge.keys()): file_handler.make_or_reset_folder(os.path.join(output_folder, _final_folder)) list_files_to_merge = final_dict_of_files_to_merge[_final_folder] for _index_files_to_merge, _files_to_merge in enumerate(list_files_to_merge): _files_to_merge = [_file for _file in _files_to_merge] self.files_to_merge_for_testing = _files_to_merge o_load = Normalization() o_load.load(file=_files_to_merge) _data = o_load.data['sample']['data'] combined_data = self.__merging_algorithm(algorithm, _data) self.combined_data_for_testing = combined_data _base_name_file = os.path.basename(_files_to_merge[0]) output_file_name = os.path.join(output_folder, _final_folder, _base_name_file) file_handler.save_data(data=combined_data, filename=output_file_name) w2.value = _index_files_to_merge + 1 w1.value = _index_final_folder + 1
def test_loading_list_of_files(self): """assert initialization using list of files""" list_files = [ self.data_path + '/tif/sample/image001.tif', self.data_path + '/tif/sample/image002.tif', self.data_path + '/tif/sample/image003.tif' ] o_norm = Normalization() o_norm.load(file=list_files, auto_gamma_filter=False) data_returned = o_norm.data['sample']['data'] assert (3, 5, 5) == np.shape(data_returned)
def load_data(self, folder): self.input_folder = folder o_list_dominand = ListMostDominantExtension(working_dir=folder) o_list_dominand.calculate() self.list_files = o_list_dominand.get_files_of_selected_ext( ).list_files o_norm = Normalization() o_norm.load(file=self.list_files, notebook=True) self.list_data = o_norm.data['sample']['data']
def test_initialization_using_array_with_ob(self): """assert initialization using arrays with ob""" ob_01 = self.data_path + '/tif/ob/ob001.tif' ob_02 = self.data_path + '/tif/ob/ob002.tif' data = [] data.append(np.asarray(Image.open(ob_01))) data.append(np.asarray(Image.open(ob_02))) o_norm = Normalization() o_norm.load(data=data, data_type='ob', auto_gamma_filter=False) data_returned = o_norm.data['ob']['data'] assert (2, 5, 5) == np.shape(data_returned)
def load_images(self): try: self.list_data_files = self.folder_ui.selected except: display(HTML('<span style="font-size: 20px; color:red">Please Select a set of Images First!</span>')) return self.o_load = Normalization() self.o_load.load(file=self.list_data_files, notebook=True) self.list_data = self.o_load.data['sample']['data']
def load_images(self, list_images=[]): if list_images == []: list_images = self.list_images_ui.selected self.o_norm = Normalization() self.o_norm.load(file=list_images, notebook=True) self.nbr_files = len(list_images) [self.images_dimension['height'], self.images_dimension['width']] = \ np.shape(self.o_norm.data['sample']['data'][0]) self.working_data = np.squeeze(self.o_norm.data['sample']['data']) self.list_images = list_images
def test_initialization_using_array_with_data(self): """assert initialization using arrays with data""" sample_01 = self.data_path + '/tif/sample/image001.tif' sample_02 = self.data_path + '/tif/sample/image002.tif' data = [] data.append(np.asarray(Image.open(sample_01))) data.append(np.asarray(Image.open(sample_02))) o_norm = Normalization() o_norm.load(data=data, auto_gamma_filter=False) data_returned = o_norm.data['sample']['data'] assert (2, 5, 5) == np.shape(data_returned)
def test_export_works_for_tif(self): '''assert the file created is correct for tif images''' sample_path = self.data_path + '/tif/sample' o_norm = Normalization() o_norm.load(folder=sample_path, auto_gamma_filter=False) _sample_0 = o_norm.data['sample']['data'][0] o_norm.export(folder=self.export_folder, data_type='sample') o_norm_2 = Normalization() o_norm_2.load(folder=self.export_folder, auto_gamma_filter=False) _sample_reloaded = o_norm_2.data['sample']['data'][0] self.assertTrue((_sample_0 == _sample_reloaded).all())
def test_export_works_for_tiff_metadata(self): '''assert file created using tif has the metadata as well''' sample_path = self.data_path + '/tif/sample' o_norm = Normalization() o_norm.load(folder=sample_path, auto_gamma_filter=False) o_norm.export(folder=self.export_folder, data_type='sample') o_norm_2 = Normalization() o_norm_2.load(folder=self.export_folder, auto_gamma_filter=False) for index in np.arange(len(o_norm.data['sample']['data'])): input_metadata = str(o_norm.data['sample']['metadata'][index]) export_metadata = str(o_norm_2.data['sample']['metadata'][index]) self.assertTrue((input_metadata == export_metadata))
def load_data(self, folder_selected): self.o_norm = Normalization() self.load_files(data_type='sample', folder=folder_selected) # define time spectra file folder = os.path.dirname(self.o_norm.data['sample']['file_name'][0]) spectra_file = glob.glob(os.path.join(folder, '*_Spectra.txt')) if spectra_file: self.spectra_file = spectra_file[0] display(HTML('<span style="font-size: 15px; color:blue"> Spectra File automatically located: ' + \ self.spectra_file + '</span>')) else: #ask for spectra file self.select_time_spectra_file()
def load_images(raw_imamge_dir: str) -> Type[Normalization]: """Loading all Images into memory""" import glob from neutronimaging.util import in_jupyter o_norm = Normalization() # gather all image _img_names = [ me for me in glob.glob(f"{raw_imamge_dir}/*.fits") if "_SummedImg" not in me ] _img_names.sort() o_norm.load(file=_img_names, notebook=in_jupyter()) return o_norm
def merge(self, output_folder): """combine images using algorithm provided""" output_folder = os.path.abspath(output_folder) merging_list = self.master_list_images_to_combine algorithm = self.get_merging_algorithm() merging_ui = widgets.HBox([widgets.Label("Merging Progress", layout=widgets.Layout(width='20%')), widgets.IntProgress(max=len(merging_list.keys()), layout=widgets.Layout(width='80%'))]) display(merging_ui) progress_bar_ui = merging_ui.children[1] output_folder = self.make_output_folder(output_folder) _run_index = 0 for _run in merging_list.keys(): positions_dict = merging_list[_run] for _position in positions_dict.keys(): list_of_files = positions_dict[_position]['list_of_files'] o_load = Normalization() o_load.load(file=list_of_files, notebook=True) _data = o_load.data['sample']['data'] combined_data = SequentialCombineImagesUsingMetadata._merging_algorithm(algorithm, _data) _new_name = self._define_merged_file_name(output_folder=output_folder, run_label=_run, position_label=_position) output_file_name = os.path.join(output_folder, _new_name) file_handler.save_data(data=combined_data, filename=output_file_name) _run_index += 1 progress_bar_ui.value = _run_index merging_ui.close() del merging_ui display(HTML('<span style="font-size: 20px; color:blue">Files have been created in : ' + \ output_folder + '</span>'))
def test_normalization_ran_only_once(self): """assert normalization is only once if force switch not turn on""" sample_tif_folder = self.data_path + '/tif/sample' ob_tif_folder = self.data_path + '/tif/ob' # testing sample with norm_roi o_norm = Normalization() o_norm.load(folder=sample_tif_folder, auto_gamma_filter=False) o_norm.load(folder=ob_tif_folder, data_type='ob', auto_gamma_filter=False) roi = ROI(x0=0, y0=0, x1=3, y1=2) o_norm.normalization(roi=roi) _returned_first_time = o_norm.data['sample']['data'][0] o_norm.normalization(roi=roi) _returned_second_time = o_norm.data['sample']['data'][0] assert (_returned_first_time == _returned_second_time).all()
def load_data(self): self.o_norm = Normalization() # sample full_list_sample = self.files.sample nbr_full_list_sampled = len(full_list_sample) if nbr_full_list_sampled > 3: new_list_sample = [ full_list_sample[0], full_list_sample[np.int(nbr_full_list_sampled / 2)], full_list_sample[-1] ] list_sample = new_list_sample self.o_norm.load(file=list_sample, notebook=True) self.data.sample = self.o_norm.data['sample']['data'] self.list_file_names = list_sample
def test_normalization_ran_twice_with_force_flag(self): """assert normalization can be ran twice using force flag""" sample_tif_folder = self.data_path + '/tif/sample' ob_tif_folder = self.data_path + '/tif/ob' # testing sample with norm_roi o_norm = Normalization() o_norm.load(folder=sample_tif_folder, auto_gamma_filter=False) o_norm.load(folder=ob_tif_folder, data_type='ob', auto_gamma_filter=False) roi = ROI(x0=0, y0=0, x1=3, y1=2) o_norm.normalization(roi=roi) _returned_first_time = o_norm.data['sample']['data'][0] roi = ROI(x0=0, y0=0, x1=2, y1=3) o_norm.normalization(roi=roi, force=True) _returned_second_time = o_norm.data['sample']['data'][0] assert not ((_returned_first_time == _returned_second_time).all())
def test_do_nothing_if_nothing_to_export(self): '''assert do nothing if nothing to export''' sample_path = self.data_path + '/tif/sample' ob_path = self.data_path + '/tif/ob' o_norm = Normalization() o_norm.load(folder=sample_path, auto_gamma_filter=False) o_norm.load(folder=ob_path, data_type='ob', auto_gamma_filter=False) self.assertFalse(o_norm.export(data_type='df'))
def test_normalization_works_with_2_dfs(self): """assert using 2 df in normalization works""" samples_path = self.data_path + '/tif/sample/' # 3 files ob1 = self.data_path + '/tif/ob/ob001.tif' ob2 = self.data_path + '/tif/ob/ob002.tif' df1 = self.data_path + '/tif/df/df001.tif' df2 = self.data_path + '/tif/df/df002.tif' o_norm = Normalization() o_norm.load(folder=samples_path, auto_gamma_filter=False) o_norm.load(file=[ob1, ob2], data_type='ob', auto_gamma_filter=False) o_norm.load(file=[df1, df2], data_type='df', auto_gamma_filter=False) o_norm.df_correction() _roi1 = ROI(x0=0, y0=0, x1=2, y1=2) _roi2 = ROI(x0=1, y0=1, x1=3, y1=3) _list_roi = [_roi1, _roi2] nbr_data_before = len(o_norm.data['sample']['data']) o_norm.normalization(roi=_list_roi) nbr_data_after = len(o_norm.data['sample']['data']) assert nbr_data_after == nbr_data_before
def test_roi_type_in_normalization(self): """assert error is raised when type of norm roi are not ROI in normalization""" sample_tif_file = self.data_path + '/tif/sample/image001.tif' ob_tif_file = self.data_path + '/tif/ob/ob001.tif' o_norm = Normalization() o_norm.load(file=sample_tif_file, data_type='sample', auto_gamma_filter=False) o_norm.load(file=ob_tif_file, data_type='ob', auto_gamma_filter=False) roi = {'x0': 0, 'y0': 0, 'x1': 2, 'y1': 2} with pytest.raises(ValueError): o_norm.normalization(roi=roi)
def merging(self): """combine images using algorithm provided""" list_files = self.files_list_widget.selected nbr_files = len(list_files) # get merging algorithm merging_algo = self.combine_method.value algorithm = self.__add if merging_algo =='arithmetic mean': algorithm = self.__arithmetic_mean elif merging_algo == 'geometric mean': algorithm = self.__geo_mean # get output folder output_folder = os.path.abspath(self.output_folder_widget.selected) o_load = Normalization() o_load.load(file=list_files, notebook=True) _data = o_load.data['sample']['data'] merging_ui = widgets.HBox([widgets.Label("Merging Progress", layout=widgets.Layout(width='20%')), widgets.IntProgress(max=2)]) display(merging_ui) w1 = merging_ui.children[1] combined_data = self.__merging_algorithm(algorithm, _data) w1.value = 1 #_new_name = self.__create_merged_file_name(list_files_names=o_load.data['sample']['file_name']) _new_name = self.default_filename_ui.value + self.ext_ui.value output_file_name = os.path.join(output_folder, _new_name) file_handler.save_data(data=combined_data, filename=output_file_name) w1.value = 2 display(HTML('<span style="font-size: 20px; color:blue">File created: ' + \ os.path.basename(output_file_name) + '</span>')) display(HTML('<span style="font-size: 20px; color:blue">In Folder: ' + \ output_folder + '</span>'))
def display_corrected_image(self, file_index=0): _view = self.ui.filtered_image_view.getView() _view_box = _view.getViewBox() _state = _view_box.getState() first_update = False if self.filtered_histogram_level == []: first_update = True _histo_widget = self.ui.filtered_image_view.getHistogramWidget() self.filtered_histogram_level = _histo_widget.getLevels() o_norm = Normalization() file_name = self.list_files[file_index] o_norm.load( file=file_name, auto_gamma_filter=True, manual_gamma_filter=True, manual_gamma_threshold=self.__get_filtering_coefficient_value()) _image = o_norm.data['sample']['data'][0] #self.ui.filtered_image_view.clear() _image = np.transpose(_image) self.ui.filtered_image_view.setImage(_image) _view_box.setState(_state) self.live_filtered_image = _image if not first_update: _histo_widget.setLevels(self.filtered_histogram_level[0], self.filtered_histogram_level[1]) # histogram self.ui.filtered_histogram_plot.clear() min = 0 max = np.max(_image) y, x = np.histogram(_image, bins=np.linspace(min, max + 1, self.nbr_histo_bins)) self.ui.filtered_histogram_plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150))
def load(self): self.list_files = self.list_images_ui.selected data = [] w = widgets.IntProgress() w.max = len(self.list_files) display(w) for _index, _file in enumerate(self.list_files): _o_norm = Normalization() _o_norm.load(file=_file) _data = _o_norm.data['sample']['data'][0] data.append(_data) w.value = _index + 1 self.data = data w.close()
def display_raw_image(self, file_index): _view = self.ui.raw_image_view.getView() _view_box = _view.getViewBox() _state = _view_box.getState() self.state_of_raw = _state first_update = False if self.raw_histogram_level == []: first_update = True _histo_widget = self.ui.raw_image_view.getHistogramWidget() self.raw_histogram_level = _histo_widget.getLevels() o_norm = Normalization() file_name = self.list_files[file_index] o_norm.load(file=file_name, auto_gamma_filter=False) _image = o_norm.data['sample']['data'][0] _image = np.transpose(_image) self.ui.raw_image_view.setImage(_image) self.live_raw_image = _image self.raw_image_size = np.shape(_image) if not first_update: _histo_widget.setLevels(self.raw_histogram_level[0], self.raw_histogram_level[1]) # histogram self.ui.raw_histogram_plot.clear() min = 0 max = np.max(_image) y, x = np.histogram(_image, bins=np.linspace(min, max + 1, self.nbr_histo_bins)) self.ui.raw_histogram_plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150))
def test_error_raised_if_data_type_is_not_valid(self): '''assert error is raised if data_type is wrong''' sample_path = self.data_path + '/tif/sample' ob_path = self.data_path + '/tif/ob' o_norm = Normalization() o_norm.load(folder=sample_path, auto_gamma_filter=False) o_norm.load(folder=ob_path, data_type='ob', auto_gamma_filter=False) self.assertRaises(KeyError, o_norm.export, data_type='not_real_type')
def test_error_raised_if_wrong_folder(self): '''assert error is raised when folder does not exist''' sample_path = self.data_path + '/tif/sample' ob_path = self.data_path + '/tif/ob' o_norm = Normalization() o_norm.load(folder=sample_path,auto_gamma_filter=False) o_norm.load(folder=ob_path, data_type='ob', auto_gamma_filter=False) self.assertRaises(IOError, o_norm.export, folder='/unknown/', data_type='sample')