def test_set_new_data_new_dtype(): data_1 = np.random.normal(5, 10, [10, 10, 5]) affine_1 = np.diag([1, 2, 3, 1]) data_2 = np.random.normal(5, 10, [3, 2, 4]) im_data_1 = nib.Nifti1Image(data_1, affine_1) im_data_1.set_data_dtype(np.float32) im_data_2 = set_new_data(im_data_1, data_2, new_dtype=np.uint16) assert_equal(im_data_1.get_data_dtype(), np.float32) assert_equal(im_data_2.get_data_dtype(), np.uint16)
def test_set_new_data_nan_no_nan(): data_1 = np.random.normal(5, 10, [10, 10, 5]) affine_1 = np.diag([1, 2, 3, 1]) data_1[2, 2, 2] = np.nan data_1[1, 1, 1] = np.nan data_2 = np.random.normal(5, 10, [3, 2, 4]) im_data_1 = nib.Nifti1Image(data_1, affine_1) im_data_2 = set_new_data(im_data_1, data_2, new_dtype=np.uint16) assert_true(np.nan not in im_data_2.get_data())
def test_set_new_data_basic(): data_1 = np.random.normal(5, 10, [10, 10, 5]) affine_1 = np.diag([1, 2, 3, 1]) data_2 = np.random.normal(5, 10, [3, 2, 4]).astype(np.float32) im_data_1 = nib.Nifti1Image(data_1, affine_1) im_data_1.set_data_dtype(np.uint8) im_data_1.header['descrip'] = 'Spam' im_data_2 = set_new_data(im_data_1, data_2) assert_array_equal(im_data_2.get_data(), data_2) assert_array_equal(im_data_2.get_affine(), affine_1) assert_equal(im_data_2.header['descrip'], b'Spam') assert_equal(im_data_1.get_data_dtype(), np.uint8) assert_equal(im_data_2.get_data_dtype(), np.float32)
def write_struct( bruker_struct, pfo_output, fin_scan="", save_human_readable=True, save_b0_if_dwi=True, verbose=1, frame_body_as_frame_head=False, keep_same_det=True, consider_subject_position=False, ): """ The core method of the converter has 2 parts. 1) parsing the Bruker scan folder structure into an internal dictionary called struct. 2) writing the information parsed in struct into folders. ------- write_struct is the second part of the bridge - :param bruker_struct: output of scan2struct :param pfo_output: path-to-folder where the converted structure will be saved. :param fin_scan: filename of the scan :param save_human_readable: output data will be saved in .txt other than in numpy format. :param save_b0_if_dwi: save the first time-point if the data is a DWI. :param verbose: :param frame_body_as_frame_head: according to the animal. If True monkey, if False rat-rabbit :param keep_same_det: force the initial determinant to be the same as the final one :param consider_subject_position: Attribute manually set, or left blank, by the lab experts. False by default :return: save the bruker_struct parsed in scan2struct in the specified folder, with the specified parameters. """ if not os.path.isdir(pfo_output): raise IOError("Output folder does not exist.") if bruker_struct is None: return if not len(bruker_struct["visu_pars_list"]) == len(bruker_struct["nib_scans_list"]): raise IOError( "Visu pars list and scans list have a different number of elements." ) if fin_scan is None: fin_scan = "" # -- WRITE Additional data shared by all the sub-scans: # if the modality is a DtiEpi or Dwimage then save the DW directions, b values and b vectors in separate csv .txt. # is_dwi = ( # "dtiepi" in bruker_struct["visu_pars_list"][0]['VisuAcqSequenceName'].lower() # or "dwi" in bruker_struct["visu_pars_list"][0]['VisuAcqSequenceName'].lower() # ) is_dwi=False if ( is_dwi ): # File method is the same for each sub-scan. Cannot embed this in the next for cycle. # -- Deals with b-vector: normalise, reorient and save in external .npy/txt. dw_grad_vec = bruker_struct["method"]["PVM_DwGradVec"] assert dw_grad_vec.shape[0] == bruker_struct["method"]["PVM_DwNDiffExp"] # get b-vectors re-orientation matrix from visu-pars reorientation_matrix = obtain_b_vectors_orient_matrix( bruker_struct["visu_pars_list"][0]["VisuCoreOrientation"], bruker_struct["visu_pars_list"][0]["VisuSubjectPosition"], frame_body_as_frame_head=frame_body_as_frame_head, keep_same_det=keep_same_det, consider_subject_position=consider_subject_position, ) # apply reorientation dw_grad_vec = apply_reorientation_to_b_vects(reorientation_matrix, dw_grad_vec) # normalise: dw_grad_vec = normalise_b_vect(dw_grad_vec) np.save(jph(pfo_output, fin_scan + "_DwGradVec.npy"), dw_grad_vec) if save_human_readable: np.savetxt( jph(pfo_output, fin_scan + "_DwGradVec.txt"), dw_grad_vec, fmt="%.14f" ) if verbose > 0: msg = "Diffusion weighted directions saved in " + jph( pfo_output, fin_scan + "_DwDir.npy" ) print(msg) b_vals = bruker_struct["method"]["PVM_DwEffBval"] b_vects = bruker_struct["method"]["PVM_DwDir"] np.save(jph(pfo_output, fin_scan + "_DwEffBval.npy"), b_vals) np.save(jph(pfo_output, fin_scan + "_DwDir.npy"), b_vects) if save_human_readable: np.savetxt( jph(pfo_output, fin_scan + "_DwEffBval.txt"), b_vals, fmt="%.14f" ) np.savetxt(jph(pfo_output, fin_scan + "_DwDir.txt"), b_vects, fmt="%.14f") if verbose > 0: print( "B-vectors saved in {}".format( jph(pfo_output, fin_scan + "_DwEffBval.npy") ) ) print( "B-values saved in {}".format( jph(pfo_output, fin_scan + "_DwGradVec.npy") ) ) # save the dictionary as numpy array containing the corresponding dictionaries # TODO use pickle instead of numpy to save the dictionaries(?) if not bruker_struct["acqp"] == {}: np.save(jph(pfo_output, fin_scan + "_acqp.npy"), bruker_struct["acqp"]) if save_human_readable: from_dict_to_txt_sorted( bruker_struct["acqp"], jph(pfo_output, fin_scan + "_acqp.txt") ) if not bruker_struct["method"] == {}: np.save(jph(pfo_output, fin_scan + "_method.npy"), bruker_struct["method"]) if save_human_readable: from_dict_to_txt_sorted( bruker_struct["method"], jph(pfo_output, fin_scan + "_method.txt") ) if not bruker_struct["reco"] == {}: np.save(jph(pfo_output, fin_scan + "_reco.npy"), bruker_struct["reco"]) if save_human_readable: from_dict_to_txt_sorted( bruker_struct["reco"], jph(pfo_output, fin_scan + "_reco.txt") ) # Visu_pars and summary info for each sub-scan: summary_info = {} for i in range(len(bruker_struct["visu_pars_list"])): if len(bruker_struct["nib_scans_list"]) > 1: i_label = "_subscan_" + str(i) + "_" else: i_label = "_" # A) Save visu_pars for each sub-scan: # np.save( # jph(pfo_output, fin_scan + i_label + "visu_pars.npy"), # bruker_struct["visu_pars_list"][i], # ) # B) Save single slope data for each sub-scan (from visu_pars): # np.save( # jph(pfo_output, fin_scan + i_label + "slope.npy"), # bruker_struct["visu_pars_list"][i]["VisuCoreDataSlope"], # ) # A and B) save them both in .txt if human readable version of data is required. # save_human_readable = False # if save_human_readable: # from_dict_to_txt_sorted( # bruker_struct["visu_pars_list"][i], # jph(pfo_output, fin_scan + i_label + "visu_pars.txt"), # ) # # slope = bruker_struct["visu_pars_list"][i]["VisuCoreDataSlope"] # if not isinstance(slope, np.ndarray): # slope = np.atleast_2d(slope) # np.savetxt( # jph(pfo_output, fin_scan + i_label + "slope.txt"), slope, fmt="%.14f" # ) # # # Update summary dictionary: # summary_info_i = { # i_label[1:] # + "visu_pars['VisuUid']": bruker_struct["visu_pars_list"][i]["VisuUid"], # i_label[1:] # + "visu_pars['VisuCoreDataSlope']": bruker_struct["visu_pars_list"][i][ # "VisuCoreDataSlope" # ], # i_label[1:] # + "visu_pars['VisuCoreSize']": bruker_struct["visu_pars_list"][i][ # "VisuCoreSize" # ], # i_label[1:] # + "visu_pars['VisuCoreOrientation']": bruker_struct["visu_pars_list"][i][ # "VisuCoreOrientation" # ], # i_label[1:] # + "visu_pars['VisuCorePosition']": bruker_struct["visu_pars_list"][i][ # "VisuCorePosition" # ], # } # # if len(list(bruker_struct["visu_pars_list"][i]["VisuCoreExtent"])) == 2: # # equivalent to struct['method']['SpatDimEnum'] == '2D': # if "VisuCoreSlicePacksSlices" in bruker_struct["visu_pars_list"][i].keys(): # summary_info_i.update( # { # i_label[1:] # + "visu_pars['VisuCoreSlicePacksSlices']": bruker_struct[ # "visu_pars_list" # ][i]["VisuCoreSlicePacksSlices"] # } # ) # # if ( # len(list(bruker_struct["visu_pars_list"][i]["VisuCoreExtent"])) == 3 # and "VisuCoreDiskSliceOrder" in bruker_struct["visu_pars_list"][i].keys() # ): # # first part equivalent to struct['method']['SpatDimEnum'] == '3D': # summary_info_i.update( # { # i_label[1:] # + "visu_pars['VisuCoreDiskSliceOrder']": bruker_struct[ # "visu_pars_list" # ][i]["VisuCoreDiskSliceOrder"] # } # ) # # if "VisuCreatorVersion" in bruker_struct["visu_pars_list"][i].keys(): # summary_info_i.update( # { # i_label[1:] # + "visu_pars['VisuCreatorVersion']": bruker_struct[ # "visu_pars_list" # ][i]["VisuCreatorVersion"] # } # ) # # summary_info.update(summary_info_i) # WRITE NIFTI IMAGES: if isinstance(bruker_struct["nib_scans_list"][i], list): # the scan had sub-volumes embedded. they are saved separately for sub_vol_id, subvol in enumerate(bruker_struct["nib_scans_list"][i]): if fin_scan == "": pfi_scan = jph( pfo_output, "scan" + i_label[:-1] + "_subvol_" + str(sub_vol_id) + ".nii.gz", ) else: pfi_scan = jph( pfo_output, fin_scan + i_label[:-1] + "_subvol_" + str(sub_vol_id) + ".nii.gz", ) nib.save(subvol, pfi_scan) else: if fin_scan == "": pfi_scan = jph(pfo_output, "scan" + i_label[:-1] + ".nii.gz") else: pfi_scan = jph(pfo_output, fin_scan + i_label[:-1] + ".nii.gz") nib.save(bruker_struct["nib_scans_list"][i], pfi_scan) if save_b0_if_dwi and is_dwi: # save the b0, first slice alone. Optimized if you have # NiftiSeg (http://cmictig.cs.ucl.ac.uk/wiki/index.php/NiftySeg) installed if fin_scan == "": pfi_scan_b0 = jph(pfo_output, "scan" + i_label[:-1] + "_b0.nii.gz") else: pfi_scan_b0 = jph( pfo_output, fin_scan + i_label[:-1] + "_b0.nii.gz" ) nib.save( set_new_data( bruker_struct["nib_scans_list"][i], bruker_struct["nib_scans_list"][i].get_data()[..., 0], ), pfi_scan_b0, ) if verbose > 0: msg = "b0 scan saved alone in " + pfi_scan_b0 print(msg) # complete the summary info with additional information from other parameter files, if required: if not bruker_struct["acqp"] == {}: summary_info_acqp = { "acqp['ACQ_sw_version']": bruker_struct["acqp"]["ACQ_sw_version"], "acqp['NR']": bruker_struct["acqp"]["NR"], "acqp['NI']": bruker_struct["acqp"]["NI"], "acqp['ACQ_n_echo_images']": bruker_struct["acqp"]["ACQ_n_echo_images"], "acqp['ACQ_slice_thick']": bruker_struct["acqp"]["ACQ_slice_thick"], } summary_info.update(summary_info_acqp) if not bruker_struct["method"] == {}: summary_info_method = { "method['PVM_SpatDimEnum']": bruker_struct["method"]["PVM_SpatDimEnum"], "method['PVM_Matrix']": bruker_struct["method"]["PVM_Matrix"], "method['PVM_SpatResol']": bruker_struct["method"]["PVM_SpatResol"], "method['Method']": bruker_struct["method"]["Method"], "method['PVM_SPackArrSliceOrient']": bruker_struct["method"][ "PVM_SPackArrSliceOrient" ], "method['PVM_SPackArrReadOrient']": bruker_struct["method"][ "PVM_SPackArrReadOrient" ], } summary_info.update(summary_info_method) if not bruker_struct["reco"] == {}: summary_info_reco = { "reco['RECO_size']": bruker_struct["reco"]["RECO_size"], "reco['RECO_inp_order']": bruker_struct["reco"]["RECO_inp_order"], } summary_info.update(summary_info_reco) # Finally summary info with the updated information. from_dict_to_txt_sorted(summary_info, jph(pfo_output, fin_scan + "_summary.txt")) # Get the method name in a single .txt file: if bruker_struct["acquisition_method"] is not "": text_file = open(jph(pfo_output, "acquisition_method.txt"), "w+") text_file.write(bruker_struct["acquisition_method"]) text_file.close()