def generate_lists(data_params): list_data = [] file_path = data_params['adni_data_des'] + tls.get_convention_name( data_params) + '/List_data.pkl' adni_out = generate_lists_from_adni_dataset(data_params) list_data.append(adni_out) daf.save_lists_to_file(path_file=file_path, data_list=list_data)
def save_data_params(data_params): path_file = data_params['adni_data_des'] + tls.get_convention_name(data_params) + '/Data_params.pkl' try: os.makedirs(os.path.dirname(path_file)) except OSError as e: if e.errno != errno.EEXIST: raise with open(path_file, 'wb') as f: pickle.dump(data_params, f)
def generate_data_from_lists(data_params, selected_label=None): file_path = data_params['adni_data_des'] + tls.get_convention_name( data_params) + '/List_data.pkl' data_list = daf.read_lists_from_file(file_path) adni_1_in = data_list[0] lists_with_names = zip( [adni_1_in[0], adni_1_in[1], adni_1_in[2]], ['alz_ADNI_1_train', 'alz_ADNI_1_valid', 'alz_ADNI_1_test']) time.sleep(1) generate_data_from_selected_dataset(data_params, lists_with_names, selected_label)
def main(): binaries_classes = ['AD-NC', 'AD-MCI', 'MCI-NC'] data_params = rsd.get_all_data_params() root_path = data_params['adni_data_des'] name_cnv = root_path + tls.get_convention_name(data_params) + '/' + str(data_params['ROI_list'][data_params['ROI_selection']] + '/' + data_params['3D_or_2D']) line = name_cnv + '/' + binaries_classes[0] + '/test/' list_files = get_pickle_from_folder(line) for i in list_files: model = daf.read_model(i) print(" HIPP_L : {} - HIPP_R: {} - Vector: {} - Label: {}".format(model.hippLeft.shape, model.hippRight.shape, model.hippMetaDataVector, model.hippLabel)) # print(model) left_dims, right_dims = [[13,16],[13,16],[13,16]], [[13,16],[13,16],[13,16]] plot_ROI_all(model.hippLeft, model.hippRight, left_dims, right_dims)
def save_desc_table(data_params, text_data): classes = ['AD ', 'MCI', 'NC '] path_file = data_params['adni_data_des'] + tls.get_convention_name(data_params) + '/Desciption_ADNI_demography.txt' try: os.makedirs(os.path.dirname(path_file)) except OSError as e: if e.errno != errno.EEXIST: raise with open(path_file, 'w') as f: f.write("----------------------------------------------------------------------------------------------------------\n") f.write("| ADNI-1 description |\n") f.write("----------------------------------------------------------------------------------------------------------\n") f.write("| #Subject | Sex (F/M) | Age [min, max]/mean(std) | MMSE [min, max]mean/std |\n") f.write("----------------------------------------------------------------------------------------------------------\n") for i in range(3): f.write("| {} | {} | {} | {} | {} |\n".format(classes[i], text_data[i][1], text_data[i][2], text_data[i][3], text_data[i][4])) f.write("----------------------------------------------------------------------------------------------------------\n") f.close()
def process_extracting_3D_data(data_params, lst, data_name, label_code, indice_ROI): if ("HIPP" in indice_ROI): l, r = tls.get_dimensions_cubes_HIPP( data_params) # exctract only Hippocampus ROI elif ("PPC" in indice_ROI): l, r = tls.get_dimensions_cubes_PPC( data_params) # exctract only Posterior PC ROI else: # compute both ROIs (in future) pass # get dimensions from the selected ROI (max - min) names = ['sag', 'cor', 'axi'] list_cord_l = [int(l[i + 1] - l[i]) for i in range(0, 6, 2)] list_cord_r = [int(r[i + 1] - r[i]) for i in range(0, 6, 2)] # compute the indexes for selected slices neighbors = int( data_params['neighbors']) # used for how many of slices we will select sag_l, cor_l, axi_l = [[(int(i / 2) - neighbors), (int(i / 2) + neighbors + 1)] for i in { "l_" + str(names[j]): list_cord_l[j] for j in range(len(list_cord_l)) }.values()] sag_r, cor_r, axi_r = [[(int(i / 2) - neighbors), (int(i / 2) + neighbors + 1)] for i in { "r_" + str(names[j]): list_cord_r[j] for j in range(len(list_cord_r)) }.values()] data_selection = str(str(data_name).split('_')[1]).upper() + '_' + str( str(data_name).split('_')[2]).upper() data_set = str(data_name).split('_')[3] binary_label = str(data_name).split('_')[4] target_path = data_params['adni_data_des'] + tls.get_convention_name( data_params) + '/' + indice_ROI + "/3D/" data_size = 0 key = 0 for input_line in lst: #----------------------------------------------------------------------------------------------------------------------- # Mean ROI (L & R) # data_roi_mean = prc.process_mean_hippocampus(input_line, data_params) # mean cube # cross mean between cubes (in future) # return computed cubes ROIs Left and Right #----------------------------------------------------------------------------------------------------------------------- data_roi_left, data_roi_right = prc.process_cube_HIPP( input_line, data_params) # left, right cube # [ID, Date, Class, Age, Sex, MMSE, GDS, CDR] # meta-data subject_ID = str(input_line[1]).split('/')[7] meta_data = tls.get_meta_data_xml(data_params, subject_ID) # print(meta_data, binary_label, data_set, label_code[input_line[0]]) model_object_normal = HippModel(data_roi_left, data_roi_right, meta_data, int(label_code[input_line[0]])) data_size += getsizeof(model_object_normal) model_abs_normal_path = target_path + binary_label + '/' + str( data_set) + '/' + str(input_line[0]) + '/' + str(key) + str( '_' + indice_ROI + '_').upper() + data_name + '_' + subject_ID + '_[' + str( input_line[0]) + ']' + str('_normal') + '.pkl' # store model data daf.save_model(model_object_normal, model_abs_normal_path) if data_params['flip']: # Fliped Felt & Right ROI data_roi_left_flip = prc.flip_3d(data_roi_left) data_roi_right_flip = prc.flip_3d(data_roi_right) #cross fliped model model_object_fliped = HippModel(data_roi_right_flip, data_roi_left_flip, meta_data, int(label_code[input_line[0]])) data_size += getsizeof(model_object_fliped) model_abs_fliped_path = target_path + binary_label + '/' + str( data_set) + '/' + str(input_line[0]) + '/' + str(key) + str( '_' + indice_ROI + '_').upper() + data_name + '_' + subject_ID + '_[' + str( input_line[0]) + ']' + str('_fliped') + '.pkl' # store data model daf.save_model(model_object_fliped, model_abs_fliped_path) key += 1 # Progress of computation print(CP.bg.RED + CP.style.BRIGHT + " {} % percent complete of 100% ".format( round(key / len(lst) * 100, 2)) + " " + CP.style.RESET_ALL + CP.bg.RESET, end='\r') #========================================================================================================================== print("\n", end='\r') print(CP.style.BRIGHT + "\n>> Data Size is: {} Mb -> {} Gb\n".format( round((data_size / 1024) * 6.43, 2), round(((data_size / 1024) * 6.43) / 1024, 2)) + CP.style.RESET_ALL)