Ejemplo n.º 1
0
def events_strain_visualization_old(path_to_data_dir, list_of_test_id):
    """
	this function visualize/plot the strains for each events listed in tests in
	list_of_test_id
	
	plt.savefig
	if len(list_of_test_id) == 1:
		plt.show()
	"""
    test_id = ["test%s" % i for i in list_of_test_id]

    path_to_final_selected_events = path_to_data_dir + "final_selected_events.json"
    if os.path.exists(path_to_final_selected_events):
        final_selected_events = json.load(
            open(path_to_final_selected_events, "r"))
        final_interested_events = []
        for event in final_selected_events:
            if event[0] in test_id:
                final_interested_events.append(event)
        for event in final_interested_events:
            single_event_strain_visualization(path_to_data_dir, event)
    else:
        for test in list_of_test_id:
            path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test)
            path_to_test_result = path_to_test_dir + "/results"
            path_to_event_list = path_to_test_result + "/selected_events.json"
            if os.path.exists(path_to_event_list):
                event_list = json.load(open(path_to_event_list, "r"))
                for value in event_list.values():
                    event = ["test%s" % test, [value[0], value[1], value[2]]]
                    single_event_strain_visualization(path_to_data_dir, event)
            else:
                print "skip current test:", "test%s" % test, "there is no selected events"
    print "done plotting for the interested tests whose test_id is in the list", list_of_test_id
Ejemplo n.º 2
0
def single_event_local_atoms(event,path_to_data_dir,model,feature,target,residual_threshold =0.5):
	"""
	this function developed correlation model between feature and target
	for a single event whose state is saved into event
	
	return:
		num_of_local_atom: a list
			num_of_local_atom for init_sad, sad_fin, init_fin
			
	"""
	if 'test' in event[0]:
		test_id = int(event[0][4:])
	else:
		test_id = int(event[0])
	path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
	
	path_to_curr_result = path_to_test_dir + "/results"
	init,sad,fin = event[1][0], event[1][1], event[1][2]
	path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin
	
	path_to_init_sad = path_to_curr_event + "/init_sad"
	path_to_sad_fin = path_to_curr_event + "/sad_fin"
	path_to_init_fin = path_to_curr_event + "/init_fin"
	if feature == "displacement" and target == "shear_strain":
		init_sad_X,init_sad_y = get_strain_disp(path_to_init_sad)
		sad_fin_X,sad_fin_y = get_strain_disp(path_to_sad_fin)
		init_fin_X,init_fin_y = get_strain_disp(path_to_init_fin)
	
	init_sad = outlier_detector(init_sad_X,init_sad_y,model,residual_threshold)
	sad_fin = outlier_detector(sad_fin_X,sad_fin_y,model,residual_threshold)
	init_fin = outlier_detector(init_fin_X,init_fin_y,model,residual_threshold)

	return [init_sad, sad_fin, init_fin]
Ejemplo n.º 3
0
def single_event_adder(event_state, path_to_data_dir,db):
	if 'test' in event_state[0]:
		test_id = int(event_state[0][4:])
	else:
		test_id = int(event_state[0])
	path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
	path_to_curr_result = path_to_test_dir + "/results"
	
	init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
	path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin
	
	path_to_file_ini = path_to_test_dir + '/' + init + ".dump"
	path_to_file_sad = path_to_test_dir + '/' + sad + ".dump"
	path_to_file_fin = path_to_test_dir + '/' + fin + ".dump"

	initial_config_data = read_data_from_file(path_to_file_ini)[['x','y','z']]
	saddle_config_data = read_data_from_file(path_to_file_sad)[['x','y','z']]
	final_config_data = read_data_from_file(path_to_file_fin)[['x','y','z']]
	
	test_eng = event_energy(path_to_test_dir)
	ini_eng = test_eng[init]
	sad_eng = test_eng[sad]
	fin_eng = test_eng[fin]
	
	minimum1 = db.addMinimum(ini_eng, np.array(initial_config_data))
	minimum2 = db.addMinimum(fin_eng, np.array(final_config_data))
	trans = db.addTransitionState(sad_eng, np.array(saddle_config_data), minimum1, minimum2)
	return
def filter_events_all_tests_stage_1(path_to_data_dir,
                                    input_param,
                                    save_results=True,
                                    re_calc=False):
    """
	this function implement stage 1 two criteria to remove a single event
	which does not satisfy stage 1 two criteria and finally save the filtered events into
	a selected_event.json
	"""
    list_of_test_id = input_param["list_of_test_id"]

    existed_tests = []
    for i in list_of_test_id:
        try:
            path_to_curr_test = data_dir_to_test_dir(path_to_data_dir, i)
            existed_tests.append(i)
        except Exception:
            pass
    list_of_test_id = existed_tests
    box_dim = input_param["box_dim"]

    # preferably use the re_calc and num_of_proc in input_param
    if "num_of_proc" in input_param and "re_calc" in input_param:
        num_of_proc = input_param["num_of_proc"]
        re_calc = input_param["re_calc"]

    pool = mp.Pool(processes=num_of_proc)
    num_of_events = len(list_of_test_id)
    test_lists = []
    for i in xrange(num_of_events):
        path_to_curr_test = data_dir_to_test_dir(path_to_data_dir,
                                                 list_of_test_id[i])
        test_lists.append(path_to_curr_test)
    result = pool.map(
        partial(event_selection,
                box_dim=box_dim,
                save_results=True,
                re_calc=re_calc), test_lists)
    print "done filtering events for all tests in list_of_test_id for stage 1"
def single_event_voronoi_classifier(event_state, path_to_data_dir):
    """
	this function load the calculated voronoi index results file voronoi_index_results.json
	classify the vornoi index into ICO, ICO_LIKE, GUM according to the criterion
	defined at the top level of the module
	"""
    if 'test' in event_state[0]:
        test_id = int(event_state[0][4:])
    else:
        test_id = int(event_state[0])
    path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)

    #path_to_test_dir = path_to_data_dir + event_state[0]
    path_to_curr_result = path_to_test_dir + "/results"

    init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
    path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin

    event_str = event_state[0] + "/event_" + init + "_" + sad + "_" + fin

    path_to_voro_results = path_to_curr_event + "/voronoi_index_results.json"
    if not os.path.exists(path_to_voro_results):
        print("the voronoi index has not been calculated for the event %s" %
              event_str)
        return None

    voronoi_index = json.load(open(path_to_voro_results, "r"))
    if voronoi_index["init"] == []:
        return None
    # classify voronoi index
    init_voronoi_class = classify_voronoi_index(voronoi_index["init"])
    sad_voronoi_class = classify_voronoi_index(voronoi_index["sad"])
    fin_voronoi_class = classify_voronoi_index(voronoi_index["fin"])

    voronoi_class = {
        "init": init_voronoi_class,
        "sad": sad_voronoi_class,
        "fin": fin_voronoi_class
    }

    # do visualization of the fraction of voronoi class
    path_to_image = path_to_curr_event + "/voronoi_hist.png"
    plot_voronoi_histogram_3(
        path_to_image,
        [init_voronoi_class, sad_voronoi_class, fin_voronoi_class])

    return voronoi_class
Ejemplo n.º 6
0
def single_event_strain_stats(path_to_data_dir, event):
    if 'test' in event[0]:
        test_id = int(event[0][4:])
    else:
        test_id = int(event[0])
    path_to_curr_test = data_dir_to_test_dir(path_to_data_dir, test_id)

    #path_to_curr_test = path_to_data_dir + event[0]
    path_to_curr_event = path_to_curr_test + "/results/events_stats.pkl"
    if os.path.exists(path_to_curr_event):
        result = pickle.load(open(path_to_curr_event, 'r'))
        for event_res in result:
            # event_res[3] is the event state, if equal to curr event state: event
            if event_res[3] == event:
                init_sad = event_res[0]
                sad_fin = event_res[1]
                init_fin = event_res[2]
    return (init_sad, sad_fin, init_fin)
Ejemplo n.º 7
0
def delete_art_tests(path_to_data_dir, central_atom_list):
    """
	file_names is a list of files to be deleted in the final art tests directory
	"""
    print "begining delete tests:", central_atom_list
    print "confirm deleting (y/n):"
    if prompt_yes_no() is True:
        for test in central_atom_list:
            try:
                path_to_test = data_dir_to_test_dir(path_to_data_dir, test)
                if os.path.isdir(path_to_test):
                    print "deleting the test %s" % path_to_test
                    shutil.rmtree(path_to_test)
                else:
                    print "test %s" % test, "does not exist in %s" % path_to_data_dir
            except Exception:
                print "test %s" % test, "does not exist in %s" % path_to_data_dir
    else:
        print "Not deleting!"
    print "done deleting all tests in specified list of tests in %s" % path_to_data_dir
Ejemplo n.º 8
0
def single_event_local_atoms_index(event,path_to_data_dir,model,feature,target,residual_threshold =0.5, save_results=True, re_calc = False):
	"""
	this function developed correlation model between feature and target
	for a single event whose state is saved into event
	
	return:
		local_atom_index: a list
			a list with elements contains a list of local atom index for init_sad
			
	"""
	if 'test' in event[0]:
		test_id = int(event[0][4:])
	else:
		test_id = int(event[0])
	path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
	
	path_to_curr_result = path_to_test_dir + "/results"
	init,sad,fin = event[1][0], event[1][1], event[1][2]
	path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin
	path_to_init_sad = path_to_curr_event + "/init_sad"
	
	path_to_local_atom_index = path_to_curr_event + "/local_atoms_index.json"
	print "path_to_current_event:", path_to_curr_event
	if re_calc is False:
		if os.path.exists(path_to_local_atom_index):
			return json.load(open(path_to_local_atom_index,'r'))
	print "re_calculating"
	if feature == "displacement" and target == "shear_strain":
		init_sad_X,init_sad_y = get_strain_disp(path_to_init_sad)
	
	init_sad = outlier_detector(init_sad_X,init_sad_y,model,residual_threshold, return_index = True)
	if save_results is True:
		with open(path_to_local_atom_index, 'w') as f:
			json.dump(init_sad,f)
			f.close()
	return init_sad
Ejemplo n.º 9
0
def refconfig_to_lammps_data(path_to_data_dir, event_state, input_param):
    """
	this function converts the configuration files of a single event stored
	in event_state from ART output refconfig type into lammps data type
	"""
    if 'test' in event_state[0]:
        test_id = int(event_state[0][4:])
    else:
        test_id = int(event_state[0])
    path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)

    init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
    #path_to_event = path_to_test_dir + "/results/event_" + init + "_" + sad + "_" + fin

    path_to_file_ini = path_to_test_dir + '/' + init
    path_to_file_sad = path_to_test_dir + '/' + sad
    path_to_file_fin = path_to_test_dir + '/' + fin

    ini_config_data = read_data_from_non_dump(path_to_file_ini)
    sad_config_data = read_data_from_non_dump(path_to_file_sad)
    fin_config_data = read_data_from_non_dump(path_to_file_fin)

    ini_config_data = ini_config_data[["item", "atom_id", "x", "y", "z"]]
    sad_config_data = sad_config_data[["item", "atom_id", "x", "y", "z"]]
    fin_config_data = fin_config_data[["item", "atom_id", "x", "y", "z"]]

    num_of_atoms = ini_config_data.shape[0]
    item_list = [x + 1 for x in range(num_of_atoms)]

    num_atom_types = len(ini_config_data['atom_id'].unique())
    #print "ini_config_data:", ini_config_data
    #ini_config_data.insert(loc=0, column='item', value=item_list)
    #sad_config_data.insert(loc=0, column='item', value=item_list)
    #fin_config_data.insert(loc=0, column='item', value=item_list)

    ini_file_to_save = os.path.join(path_to_test_dir, init + ".lammps")
    sad_file_to_save = os.path.join(path_to_test_dir, sad + ".lammps")
    fin_file_to_save = os.path.join(path_to_test_dir, fin + ".lammps")

    box_range = input_param["box_range"]

    with open(ini_file_to_save, 'w') as f:
        f.write(
            '# LAMMPS data file written by OVITO\n%s atoms\n%s atom types\n%s %s xlo xhi\n%s %s ylo yhi\n%s %s zlo zhi\n\nAtoms # atomic\n\n'
            % (num_of_atoms, num_atom_types, box_range[0][0], box_range[0][1],
               box_range[1][0], box_range[1][1], box_range[2][0],
               box_range[2][1]))
        ini_config_data.to_csv(f, header=False, index=False, sep=' ')
        f.close()
    print "saving file in %s" % ini_file_to_save

    with open(sad_file_to_save, 'w') as f:
        f.write(
            '# LAMMPS data file written by OVITO\n%s atoms\n%s atom types\n%s %s xlo xhi\n%s %s ylo yhi\n%s %s zlo zhi\n\nAtoms # atomic\n\n'
            % (num_of_atoms, num_atom_types, box_range[0][0], box_range[0][1],
               box_range[1][0], box_range[1][1], box_range[2][0],
               box_range[2][1]))
        sad_config_data.to_csv(f, header=False, index=False, sep=' ')
        f.close()
    print "saving file in %s" % sad_file_to_save

    with open(fin_file_to_save, 'w') as f:
        f.write(
            '# LAMMPS data file written by OVITO\n%s atoms\n%s atom types\n%s %s xlo xhi\n%s %s ylo yhi\n%s %s zlo zhi\n\nAtoms # atomic\n\n'
            % (num_of_atoms, num_atom_types, box_range[0][0], box_range[0][1],
               box_range[1][0], box_range[1][1], box_range[2][0],
               box_range[2][1]))
        fin_config_data.to_csv(f, header=False, index=False, sep=' ')
        f.close()
    print "saving file in %s" % fin_file_to_save
Ejemplo n.º 10
0
def single_event_pn_calculator(event_state,
                               path_to_data_dir,
                               save_results=True,
                               re_calc=False,
                               is_non_affine=False):
    # go to event directory
    if 'test' in event_state[0]:
        test_id = int(event_state[0][4:])
    else:
        test_id = int(event_state[0])
    path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
    path_to_curr_result = path_to_test_dir + "/results"
    init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
    path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin
    if not os.path.exists(path_to_curr_event):
        os.makedirs(path_to_curr_event)
    # check if results has already been saved
    path_pn_number_results = os.path.join(path_to_curr_event, "pn_number.json")
    path_pn_index_results = os.path.join(path_to_curr_event, "pn_index.json")

    if re_calc is False:
        if os.path.exists(path_pn_number_results) and os.path.exists(
                path_pn_index_results):
            return (json.load(open(path_pn_number_results, "r")),
                    json.load(open(path_pn_index_results, "r")))
        else:
            print "begin calculating pn number and index"
    else:
        print "re_calculating pn number and index"

    # begin working on each process of this event
    path_to_init_sad = path_to_curr_event + "/init_sad"
    path_to_sad_fin = path_to_curr_event + "/sad_fin"
    path_to_init_fin = path_to_curr_event + "/init_fin"

    if is_non_affine is False:
        # read displacement file for each event
        path_to_all_displacement_init_sad = path_to_init_sad + "/displacement_results_dict.pkl"
        path_to_all_displacement_sad_fin = path_to_sad_fin + "/displacement_results_dict.pkl"
        path_to_all_displacement_init_fin = path_to_init_fin + "/displacement_results_dict.pkl"
        if os.path.exists(
                path_to_all_displacement_init_sad) and os.path.exists(
                    path_to_all_displacement_sad_fin) and os.path.exists(
                        path_to_all_displacement_init_fin):
            all_disp_init_sad = pickle.load(
                open(path_to_all_displacement_init_sad, 'r'))
            all_disp_sad_fin = pickle.load(
                open(path_to_all_displacement_sad_fin, 'r'))
            all_disp_init_fin = pickle.load(
                open(path_to_all_displacement_init_fin, 'r'))
        else:
            raise Exception(
                "displacement results have not been found in current event:%s"
                % path_to_curr_event)
        # perform the calculations according to the formulas proposed in
        # "Local structural excitations in model glasses" Swayamjyoti et al PRB, 2014
        pn_init_sad, pn_index_init_sad = get_pn_number_index(all_disp_init_sad)
        pn_sad_fin, pn_index_sad_fin = get_pn_number_index(all_disp_sad_fin)
        pn_init_fin, pn_index_init_fin = get_pn_number_index(all_disp_init_fin)
    elif is_non_affine is True:
        path_to_all_shear_init_sad = path_to_init_sad + "/strain_results_dict.pkl"
        path_to_all_shear_sad_fin = path_to_sad_fin + "/strain_results_dict.pkl"
        path_to_all_shear_init_fin = path_to_init_fin + "/strain_results_dict.pkl"
        if os.path.exists(path_to_all_shear_init_sad) and os.path.exists(
                path_to_all_shear_sad_fin) and os.path.exists(
                    path_to_all_shear_init_fin):
            all_strains_init_sad = pickle.load(
                open(path_to_all_shear_init_sad, 'r'))
            all_strains_sad_fin = pickle.load(
                open(path_to_all_shear_sad_fin, 'r'))
            all_strains_init_fin = pickle.load(
                open(path_to_all_shear_init_fin, 'r'))

            all_shear_init_sad = extract_shear_dict(all_strains_init_sad)
            all_shear_sad_fin = extract_shear_dict(all_strains_sad_fin)
            all_shear_init_fin = extract_shear_dict(all_strains_init_fin)

        else:
            raise Exception(
                "strain results have not been found in current event:%s" %
                path_to_curr_event)
        # perform the calculations according to the formulas proposed in
        # "Local structural excitations in model glasses" Swayamjyoti et al PRB, 2014
        pn_init_sad, pn_index_init_sad = get_pn_number_index(
            all_shear_init_sad, True)
        pn_sad_fin, pn_index_sad_fin = get_pn_number_index(
            all_shear_sad_fin, True)
        pn_init_fin, pn_index_init_fin = get_pn_number_index(
            all_shear_init_fin, True)

    pn_number = {
        "init_sad": pn_init_sad,
        "sad_fin": pn_sad_fin,
        "init_fin": pn_init_fin
    }
    pn_index = {
        "init_sad": pn_index_init_sad,
        "sad_fin": pn_index_sad_fin,
        "init_fin": pn_index_init_fin
    }
    # save the calculated results into files
    if save_results is True:
        print("begin saving pn results into json files")
        with open(path_pn_number_results, 'w') as f:
            json.dump(pn_number, f)
            f.close()
        with open(path_pn_index_results, 'w') as f:
            json.dump(pn_index, f)
            f.close()
        print "pn results saved into two json files!"
    return (pn_number, pn_index)
Ejemplo n.º 11
0
def strain_events_stats_visualization_old(path_to_data_dir, list_of_test_id):
    """
	this function visualize the strain statistics from tests listed in
	list_of_test_id, the user can customize the tests they want to know
	their statistics, among these tests, only the one has events_stat.pkl
	file will be used for plot
	
	path_to_data_dir: str
		path to the data dir
	list_of_tests: list
		a number list showing the test id number
		e.g. list [1,2] mean that visualizing test1, test2 data
		
	"""
    # set up the statistical quantities for the tests
    disp_ave, disp_std, disp_max , disp_ave_2, disp_std_2, disp_max_2, disp_ave_3, disp_std_3, disp_max_3 = [], [], [], [], [], [], [], [], []

    shear_ave, shear_std, shear_max, shear_ave_2, shear_std_2, shear_max_2, shear_ave_3, shear_std_3, shear_max_3 = [], [], [], [], [], [], [], [], []

    vol_ave, vol_std, vol_max, vol_ave_2, vol_std_2, vol_max_2, vol_ave_3, vol_std_3, vol_max_3 = [], [], [], [], [], [], [], [], []

    for i in list_of_test_id:
        path_to_curr_test = data_dir_to_test_dir(path_to_data_dir, i)
        path_to_curr_event = path_to_curr_test + "/results/events_stats.pkl"
        # skip the test who do not have events_stats.pkl in all tests specified in
        # list_of_test_id
        if os.path.exists(path_to_curr_event):
            result = pickle.load(open(path_to_curr_event, 'r'))
            for event in result:
                init_sad = event[0]
                sad_fin = event[1]
                init_fin = event[2]
                # calculate the statistics of init_sad and sad_fin
                disp_ave.append(init_sad["ave"][2])
                disp_std.append(init_sad["std"][2])
                disp_max.append(init_sad["max"][2])

                shear_ave.append(init_sad["ave"][1])
                shear_std.append(init_sad["std"][1])
                shear_max.append(init_sad["max"][1])

                vol_ave.append(init_sad["ave"][0])
                vol_std.append(init_sad["std"][0])
                vol_max.append(init_sad["max"][0])

                disp_ave_2.append(sad_fin["ave"][2])
                disp_std_2.append(sad_fin["std"][2])
                disp_max_2.append(sad_fin["max"][2])

                shear_ave_2.append(sad_fin["ave"][1])
                shear_std_2.append(sad_fin["std"][1])
                shear_max_2.append(sad_fin["max"][1])

                vol_ave_2.append(sad_fin["ave"][0])
                vol_std_2.append(sad_fin["std"][0])
                vol_max_2.append(sad_fin["max"][0])

                disp_ave_3.append(init_fin["ave"][2])
                disp_std_3.append(init_fin["std"][2])
                disp_max_3.append(init_fin["max"][2])

                shear_ave_3.append(init_fin["ave"][1])
                shear_std_3.append(init_fin["std"][1])
                shear_max_3.append(init_fin["max"][1])

                vol_ave_3.append(init_fin["ave"][0])
                vol_std_3.append(init_fin["std"][0])
                vol_max_3.append(init_fin["max"][0])

    plot_histogram_3(path_to_data_dir + "/disp_ave.png",
                     [disp_ave, disp_ave_2, disp_ave_3])
    plot_histogram_3(path_to_data_dir + "/disp_std.png",
                     [disp_std, disp_std_2, disp_std_3])
    plot_histogram_3(path_to_data_dir + "/disp_max.png",
                     [disp_max, disp_max_2, disp_max_3])

    plot_histogram_3(path_to_data_dir + "/shear_ave.png",
                     [shear_ave, shear_ave_2, shear_ave_3])
    plot_histogram_3(path_to_data_dir + "/shear_std.png",
                     [shear_std, shear_std_2, shear_std_3])
    plot_histogram_3(path_to_data_dir + "/shear_max.png",
                     [shear_max, shear_max_2, shear_max_3])

    plot_histogram_3(path_to_data_dir + "/vol_ave.png",
                     [vol_ave, vol_ave_2, vol_ave_3])
    plot_histogram_3(path_to_data_dir + "/vol_std.png",
                     [vol_std, vol_std_2, vol_std_3])
    plot_histogram_3(path_to_data_dir + "/vol_max.png",
                     [vol_max, vol_max_2, vol_max_3])
    print "done plotting strain statistics for all interested tests!"
def filter_events_all_tests_stage_2(path_to_data_dir,
                                    input_param,
                                    save_results=True,
                                    re_calc=False):
    """
	this function implement stage 2 criteria 3 to remove the redundancy of event pairs
	it will sort all event pair and finally save the unique events into
	a pkl file as a list of strings, such as test1/event_init_sad_fin
	
	criteria 3:
	for the remaining refined searches, any pair is redundant if 
	abs(D(fin - init)_1-D(fin-init)_2) < 0.1 (A)
	AND abs(E(fin-init)_1-E(fin-init))_2 < 0.005(eV)
	AND abs(E(sad-init)_1-E(sad-init))_2 < 0.01(eV)
	"""
    list_of_test_id = input_param["list_of_test_id"]

    existed_tests = []
    for i in list_of_test_id:
        try:
            path_to_curr_test = data_dir_to_test_dir(path_to_data_dir, i)
            existed_tests.append(i)
        except Exception:
            pass
    list_of_test_id = existed_tests

    box_dim = input_param["box_dim"]
    identical_event_criteria = input_param["identical_event_criteria"]
    if "num_of_proc" in input_param and "re_calc" in input_param:
        num_of_proc = input_param["num_of_proc"]
        re_calc = input_param["re_calc"]

    if re_calc is False:
        path_to_final_selected_events = os.path.join(
            path_to_data_dir, "final_selected_events.json")
        if os.path.exists(path_to_final_selected_events):
            print "reduadancy has already been checked, total number of final selected events:"
            final_events = json.load(open(path_to_final_selected_events, 'r'))
            print len(final_events)
            return

    all_selected_events = get_list_of_selected_events_str(
        path_to_data_dir, list_of_test_id)

    # remove the 2nd identical pair events found
    num_of_selected_events = len(all_selected_events)
    print "total number of selected events:", num_of_selected_events
    removed_index = []
    pool = mp.Pool(processes=num_of_proc)
    for i in xrange(num_of_selected_events):
        if i in removed_index:
            continue
        tests_list = [
            all_selected_events[j]
            for j in xrange(i + 1, num_of_selected_events)
        ]
        result = pool.map(
            partial(identical_events,
                    path_to_data_dir=path_to_data_dir,
                    event_1=all_selected_events[i],
                    box_dim=box_dim,
                    identical_event_criteria=identical_event_criteria),
            tests_list)
        #for j in xrange(i+1,num_of_selected_events):
        #	is_same = identical_events(path_to_data_dir, all_selected_events[i], all_selected_events[j], box_dim, identical_event_criteria)
        #	if is_same:
        #		removed_index.append(j)
        k = 0
        for x in result:
            if x == True:
                removed_index.append(k + i + 1)
            k = k + 1
    removed_index = np.unique(removed_index)
    # final_selected_events = np.delete(all_selected_events, removed_index).tolist()
    final_selected_events = [
        i for j, i in enumerate(all_selected_events) if j not in removed_index
    ]
    print "total number of selected events before redundacy check:", num_of_selected_events
    print "total number of final selected events after removing redundacy:", len(
        final_selected_events)

    #save it into a file called final_selected_events.json
    if save_results is True:
        path_to_final_selected_events = os.path.join(
            path_to_data_dir, "final_selected_events.json")
        json.dump(final_selected_events,
                  open(path_to_final_selected_events, 'w'))

    print "done redudancy check for all interested tests!"
def identical_events(event_2,
                     path_to_data_dir,
                     event_1,
                     box_dim,
                     identical_event_criteria={
                         "D_init_fin": 0.1,
                         "E_init_fin": 0.005,
                         "E_init_sad": 0.01
                     }):
    """
	this function return True if two events are identical
	
	criteria 3:
	for the remaining refined searches, any pair is redundant if
	abs(D(fin - init)_1-D(fin-init)_2) < D_init_fin e.g. 0.1 (A) for Cu-Zr
	AND abs(E(fin-init)_1-E(fin-init)_2) < E_init_fin e.g. 0.005(eV) for Cu-Zr
	AND abs(E(sad-init)_1-E(sad-init)_2) < E_init_sad e.g. 0.01(eV) for Cu-Zr
	"""
    D_init_fin = identical_event_criteria["D_init_fin"]
    E_init_fin = identical_event_criteria["E_init_fin"]
    E_init_sad = identical_event_criteria["E_init_sad"]

    if 'test' in event_1[0]:
        test_id_1 = int(event_1[0][4:])
    else:
        test_id_1 = int(event_1[0])
    path_to_event_1_test = data_dir_to_test_dir(path_to_data_dir, test_id_1)

    if 'test' in event_2[0]:
        test_id_2 = int(event_2[0][4:])
    else:
        test_id_2 = int(event_2[0])
    path_to_event_2_test = data_dir_to_test_dir(path_to_data_dir, test_id_2)

    #path_to_event_1_test = path_to_data_dir + event_1[0]
    #path_to_event_2_test = path_to_data_dir + event_2[0]
    event_1_init, event_1_sad, event_1_fin = event_1[1][0], event_1[1][
        1], event_1[1][2]
    event_2_init, event_2_sad, event_2_fin = event_2[1][0], event_2[1][
        1], event_2[1][2]

    event_1_energy = event_energy(path_to_event_1_test)

    event_1_init_eng, event_1_sad_eng, event_1_fin_eng = event_1_energy[
        event_1_init], event_1_energy[event_1_sad], event_1_energy[event_1_fin]

    event_2_energy = event_energy(path_to_event_2_test)

    event_2_init_eng, event_2_sad_eng, event_2_fin_eng = event_2_energy[
        event_2_init], event_2_energy[event_2_sad], event_2_energy[event_2_fin]

    cond_1 = abs(event_1_fin_eng - event_1_init_eng -
                 (event_2_fin_eng - event_2_init_eng)) < E_init_fin

    cond_2 = abs(event_1_sad_eng - event_1_init_eng -
                 (event_2_sad_eng - event_2_init_eng)) < E_init_sad

    if not (cond_1 and cond_2):
        return False

    distance_1 = event_distance(path_to_event_1_test,
                                [event_1_init, event_1_sad, event_1_fin],
                                box_dim)

    distance_2 = event_distance(path_to_event_2_test,
                                [event_2_init, event_2_sad, event_2_fin],
                                box_dim)

    cond_3 = abs(distance_1 - distance_2) < D_init_fin

    if cond_3:
        return True
    else:
        return False
Ejemplo n.º 14
0
def single_event_voronoi_calculator(event_state, path_to_data_dir, box_range, cut_off, atom_list = None,max_edge_count=8, periodic = [True,True,True], save_results = True, re_calc = False):
	"""
	this function calculates the voronoi index of user specified atoms stored in atom_list
	for all configurations (init,sad,fin) in a single event that are specified in the event state
	Input:
		event_state: a list
			a list with the 1st element being the test_id, e.g. test1
			the 2nd element being a list containing the string of init, sad, fin
			configuration file str, e.g. [min1000,sad1001,min1001]
	"""
	if 'test' in event_state[0]:
		test_id = int(event_state[0][4:])
	else:
		test_id = int(event_state[0])
	path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
			
	#path_to_test_dir = path_to_data_dir + event_state[0]
	path_to_curr_result = path_to_test_dir + "/results"
	# redundant, if exists, may cause race condition when os.makedirs act on same dir (leaf)
	# for parallel processing, in python3, it can be avoided by adding exist_ok=True
	# in python2 here, rewrite code to ensure os.makedirs act on different dir (leaf)
	# or using try except in 
	# https://stackoverflow.com/questions/12468022/python-fileexists-error-when-making-directory
	# https://stackoverflow.com/questions/273192/how-can-i-create-a-directory-if-it-does-not-exist
	#if not os.path.exists(path_to_curr_result):
	#	os.makedirs(path_to_curr_result)
	
	init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
	path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin
	if not os.path.exists(path_to_curr_event):
		os.makedirs(path_to_curr_event)
	
	path_to_voro_results = path_to_curr_event + "/voronoi_index_results.json"
	if re_calc is False:
		if os.path.exists(path_to_voro_results):
			return json.load(open(path_to_voro_results,"r"))
	
	path_to_file_ini = path_to_test_dir + '/' + init + ".dump"
	path_to_file_sad = path_to_test_dir + '/' + sad + ".dump"
	path_to_file_fin = path_to_test_dir + '/' + fin + ".dump"
		
	initial_config_data = read_data_from_file(path_to_file_ini)
	saddle_config_data = read_data_from_file(path_to_file_sad)
	final_config_data = read_data_from_file(path_to_file_fin)
	
	box_dim = [box_range[0][1] - box_range[0][0], box_range[1][1] - box_range[1][0], box_range[2][1] - box_range[2][0]]
	
	path_to_local_atom_index = path_to_curr_event + "/local_atoms_index.json"
	
	if atom_list is None:
		atom_list = (initial_config_data["item"]).tolist()
	
	if atom_list == "local":
		if os.path.exists(path_to_local_atom_index):
			# for local mode of voronoi calculation
			print ("\n starting local mode voronoi calculations")
			local_atom_list = json.load(open(path_to_local_atom_index,"r"))
			if local_atom_list == []:
				return None
			atom_list = [atom + 1 for atom in local_atom_list]
		else:
			raise Exception("local_atoms_index.json does not exist in %s"%path_to_curr_event)

	init_voronoi_index = single_config_voronoi_calculator(initial_config_data, box_range, cut_off, atom_list=atom_list, max_edge_count = max_edge_count, periodic=periodic)
	sad_voronoi_index = single_config_voronoi_calculator(saddle_config_data, box_range, cut_off, atom_list=atom_list, max_edge_count = max_edge_count, periodic=periodic)
	fin_voronoi_index = single_config_voronoi_calculator(final_config_data, box_range, cut_off, atom_list=atom_list, max_edge_count = max_edge_count, periodic=periodic)
	
	voronoi_index = {"init":init_voronoi_index, "sad":sad_voronoi_index, "fin":fin_voronoi_index}
	
	if save_results is True:
		print ("begin saving voronoi results into json file")
		with open(path_to_voro_results, 'w') as f:
			json.dump(voronoi_index,f)
			f.close()
	return voronoi_index
def single_event_data_extractor(event_state, path_to_data_dir, atom_list):
    # go to event directory
    if 'test' in event_state[0]:
        test_id = int(event_state[0][4:])
    else:
        test_id = int(event_state[0])
    path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
    path_to_curr_result = path_to_test_dir + "/results"
    init, sad, fin = event_state[1][0], event_state[1][1], event_state[1][2]
    path_to_curr_event = path_to_curr_result + "/event_" + init + "_" + sad + "_" + fin

    print "extracting atom_list:"
    path_to_file_ini = path_to_test_dir + '/' + init + ".dump"
    initial_config_data = read_data_from_file(path_to_file_ini)
    (atom_list_init_sad, atom_list_sad_fin,
     atom_list_init_fin) = get_list_of_atoms_from_atom_list(
         path_to_curr_event, initial_config_data, atom_list)
    atom_num_init_sad, atom_num_sad_fin, atom_num_init_fin = len(
        atom_list_init_sad), len(atom_list_sad_fin), len(atom_list_init_fin)

    print "extracting energy data:"
    event, act_eng, relax_eng = event_act_relax_energy(event_state,
                                                       path_to_data_dir)
    init_sad_eng, sad_fin_eng, init_fin_eng = act_eng, -relax_eng, act_eng - relax_eng

    print "extracting strain and displacement data:"
    # strain = {"init_sad":[], "sad_fin":[], "init_fin:[]"}
    # displacement = {"init_sad":[], "sad_fin":[], "init_fin:[]"}
    # atom_list= {"init_sad":[], "sad_fin":[], "init_fin:[]"}
    atom_list = {
        "init_sad": atom_list_init_sad,
        "sad_fin": atom_list_sad_fin,
        "init_fin": atom_list_init_fin
    }
    ave_vol_strain, ave_shear_strain, ave_disp = event_ave_strain_displacement(
        event_state, path_to_data_dir, atom_list)

    print "extracting voronoi volume data:"
    init_vol, sad_vol, fin_vol = event_voronoi_volume(event_state,
                                                      path_to_data_dir)
    init_sad_vol, sad_fin_vol, init_fin_vol = sad_vol - init_vol, fin_vol - sad_vol, fin_vol - init_vol

    event_voro_res = event_voronoi_class(event_state, path_to_data_dir)

    event_init_sad = pd.DataFrame(dtype=object)
    event_init_sad.at[0, "event_id"] = str(event_state)
    event_init_sad.at[0, "atom_list"] = str(atom_list_init_sad)
    event_init_sad.at[0, "num_of_atoms"] = str(atom_num_init_sad)
    event_init_sad.at[0, "ave_vol_strain"] = str(ave_vol_strain["init_sad"])
    event_init_sad.at[0,
                      "ave_shear_strain"] = str(ave_shear_strain["init_sad"])
    event_init_sad.at[0, "ave_disp"] = str(ave_disp["init_sad"])
    event_init_sad.at[0, "start_ICO_frac"] = str(event_voro_res["init"][0])
    event_init_sad.at[0,
                      "start_ICO_like_frac"] = str(event_voro_res["init"][1])
    event_init_sad.at[0, "start_GUM_frac"] = str(event_voro_res["init"][2])
    event_init_sad.at[0, "end_ICO_frac"] = str(event_voro_res["sad"][0])
    event_init_sad.at[0, "end_ICO_like_frac"] = str(event_voro_res["sad"][1])
    event_init_sad.at[0, "end_GUM_frac"] = str(event_voro_res["sad"][2])
    event_init_sad.at[0, "vol_diff"] = str(init_sad_vol)
    event_init_sad.at[0, "eng_diff"] = str(init_sad_eng)

    event_sad_fin = pd.DataFrame(dtype=object)
    event_sad_fin.at[0, "event_id"] = str(event_state)
    event_sad_fin.at[0, "atom_list"] = str(atom_list_sad_fin)
    event_sad_fin.at[0, "num_of_atoms"] = str(atom_num_sad_fin)
    event_sad_fin.at[0, "ave_vol_strain"] = str(ave_vol_strain["sad_fin"])
    event_sad_fin.at[0, "ave_shear_strain"] = str(ave_shear_strain["sad_fin"])
    event_sad_fin.at[0, "ave_disp"] = str(ave_disp["sad_fin"])
    event_sad_fin.at[0, "start_ICO_frac"] = str(event_voro_res["sad"][0])
    event_sad_fin.at[0, "start_ICO_like_frac"] = str(event_voro_res["sad"][1])
    event_sad_fin.at[0, "start_GUM_frac"] = str(event_voro_res["sad"][2])
    event_sad_fin.at[0, "end_ICO_frac"] = str(event_voro_res["fin"][0])
    event_sad_fin.at[0, "end_ICO_like_frac"] = str(event_voro_res["fin"][1])
    event_sad_fin.at[0, "end_GUM_frac"] = str(event_voro_res["fin"][2])
    event_sad_fin.at[0, "vol_diff"] = str(sad_fin_vol)
    event_sad_fin.at[0, "eng_diff"] = str(sad_fin_eng)

    event_init_fin = pd.DataFrame(dtype=object)
    event_init_fin.at[0, "event_id"] = str(event_state)
    event_init_fin.at[0, "atom_list"] = str(atom_list_init_fin)
    event_init_fin.at[0, "num_of_atoms"] = str(atom_num_init_fin)
    event_init_fin.at[0, "ave_vol_strain"] = str(ave_vol_strain["init_fin"])
    event_init_fin.at[0,
                      "ave_shear_strain"] = str(ave_shear_strain["init_fin"])
    event_init_fin.at[0, "ave_disp"] = str(ave_disp["init_fin"])
    event_init_fin.at[0, "start_ICO_frac"] = str(event_voro_res["init"][0])
    event_init_fin.at[0,
                      "start_ICO_like_frac"] = str(event_voro_res["init"][1])
    event_init_fin.at[0, "start_GUM_frac"] = str(event_voro_res["init"][2])
    event_init_fin.at[0, "end_ICO_frac"] = str(event_voro_res["fin"][0])
    event_init_fin.at[0, "end_ICO_like_frac"] = str(event_voro_res["fin"][1])
    event_init_fin.at[0, "end_GUM_frac"] = str(event_voro_res["fin"][2])
    event_init_fin.at[0, "vol_diff"] = str(init_fin_vol)
    event_init_fin.at[0, "eng_diff"] = str(init_fin_eng)

    #event_init_sad = = pd.DataFrame({"event_id":event_state,"atom_list":atom_list_init_sad,"num_of_atoms":atom_num_init_sad,"ave_vol_strain":ave_vol_strain["init_sad"],"ave_shear_strain":ave_shear_strain["init_sad"],"ave_disp":ave_disp["init_sad"],"start_ICO_frac":event_voro_res["init"][0],"start_ICO_like_frac":event_voro_res["init"][1],"start_GUM_frac":event_voro_res["init"][2],"end_ICO_frac":event_voro_res["sad"][0],"end_ICO_like_frac":event_voro_res["sad"][1],"end_GUM_frac":event_voro_res["sad"][2], "vol_diff":init_sad_vol,"eng_diff":init_sad_eng})
    #event_sad_fin = pd.DataFrame({"event_id":event_state,"atom_list":atom_list_sad_fin,"num_of_atoms":atom_num_sad_fin,"ave_vol_strain":ave_vol_strain["sad_fin"],"ave_shear_strain":ave_shear_strain["sad_fin"],"ave_disp":ave_disp["sad_fin"],"start_ICO_frac":event_voro_res["sad"][0],"start_ICO_like_frac":event_voro_res["sad"][1],"start_GUM_frac":event_voro_res["sad"][2],"end_ICO_frac":event_voro_res["fin"][0],"end_ICO_like_frac":event_voro_res["fin"][1],"end_GUM_frac":event_voro_res["fin"][2],"vol_diff": sad_fin_vol,"eng_diff":sad_fin_eng})
    #event_init_fin = pd.DataFrame({"event_id":event_state,"atom_list":atom_list_init_fin,"num_of_atoms":atom_num_init_fin,"ave_vol_strain":ave_vol_strain["init_fin"],"ave_shear_strain":ave_shear_strain["init_fin"],"ave_disp":ave_disp["init_fin"],"start_ICO_frac":event_voro_res["init"][0],"start_ICO_like_frac":event_voro_res["init"][1],"start_GUM_frac":event_voro_res["init"][2],"end_ICO_frac":event_voro_res["fin"][0],"end_ICO_like_frac":event_voro_res["fin"][1],"end_GUM_frac":event_voro_res["fin"][2],"vol_diff": init_fin_vol,"eng_diff":init_fin_eng})

    return (event_init_sad, event_sad_fin, event_init_fin)
Ejemplo n.º 16
0
def delete_unused_events_data(path_to_data_dir, input_param):
    """
	this function delete the configuration files of un-used events data
	inside each test of an art data project to save disk space, especially
	when users has not used WRITE_REJECTED_EVENTS = .False. option in bart.sh
	
	First, it will read the final_selected_events.json, which stores all useful events.
	
	Second, it will delete all configuration files that are not saved
	in the final_selected_events.json
	"""
    path_to_final_selected_events = os.path.join(path_to_data_dir,
                                                 "final_selected_events.json")
    if os.path.isfile(path_to_final_selected_events):
        print "reading final_selected_events.json, ensure that you always get most updated final_selected_events.json by --filter --re_calc if you have calculated more tests"
        final_selected_events = json.load(
            open(path_to_final_selected_events, "r"))
    else:
        raise Exception("final_selected_events.json does not exist in %s" %
                        path_to_data_dir)

    all_tests_events = dict()
    for event in final_selected_events:
        test_id = int(event[0][4:])
        init, sad, fin = event[1][0], event[1][1], event[1][2]
        if test_id in all_tests_events:
            all_tests_events[test_id].extend([init, sad, fin])
        else:
            all_tests_events[test_id] = [init, sad, fin]
            #all_tests_id.append(test_id)

    print "confirm deleting (y/n):"
    if prompt_yes_no() is False:
        print "response received, not deleting"
        return None
    else:
        print "response received, begin deleting"

    path_to_central_atom_list = os.path.join(path_to_data_dir,
                                             "central_atom_list.json")
    if os.path.isfile(
            path_to_central_atom_list) or "central_atom_list" in input_param:
        if os.path.isfile(path_to_central_atom_list):
            print "reading central_atom_list.json"
            central_atom_list = json.load(open(path_to_central_atom_list, 'r'))
        elif "central_atom_list" in input_param:
            print "reading central_atom_list from input SETTINGs file"
            central_atom_list = input_param['central_atom_list']
        delete_tests_list = []
        saved_tests_list = []
        for test_id in central_atom_list:
            if test_id not in all_tests_events:
                delete_tests_list.append(test_id)
            else:
                saved_tests_list.append(test_id)
        delete_art_tests(path_to_data_dir, delete_tests_list)
        print ">>> confirm updating central_atom_list.json file (y/n): save original central_atom_list.json if necessary"
        if prompt_yes_no() is True:
            print "response received, updating!"
            update_central_atom_list(path_to_data_dir, saved_tests_list)
        else:
            print "response received, not updating"
    else:
        print "central_atom_list.json does not exist in %s" % path_to_data_dir
        print "central_atom_list key does not exist in input SETTING file"
        print ">>> only deleting unused events for all tests stored in final_selected_events.json"

    print "\n Now begin deleting unused events configuration files for all tests stored in final_selected_events.json"
    for test_id in all_tests_events:
        path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
        for f in os.listdir(path_to_test_dir):
            is_match_config = re.match(r"min[0-9]+", f) or re.match(
                r"sad[0-9]+", f) or re.match(r"min[0-9]+.dump", f) or re.match(
                    r"sad[0-9]+.dump", f)
            path_to_file = os.path.join(path_to_test_dir, f)
            is_file = os.path.isfile(path_to_file)
            if is_match_config and is_file:
                if f.endswith('.dump'):
                    config_id = f[:-5]
                else:
                    config_id = f
                if config_id not in all_tests_events[test_id]:
                    print "deleting the file %s" % path_to_file
                    os.remove(path_to_file)
    print "done deleting unused events data!"
Ejemplo n.º 17
0
def check_tests_status(path_to_data_dir, input_param):
    """
	check the status of each test of central_atom_list in input_param under path_to_data_dir
	as finished test or unfinished test
	
	This function will always overwrite the most update test status result
	into input SETTINGS file input_tests_done.json and input_tests_undone.json
	for the following processing
	
	Return:
		tests_undone: list
			the list of test id that has not been done yet
	"""

    # read the Max_num_events from bart.sh in path_to_input_files
    path_to_input_files = input_param['path_to_input_files']
    path_to_bart = os.path.join(path_to_input_files, "bart.sh")

    with open(path_to_bart, 'r') as f:
        data = f.read()
    pattern = "(setenv[\s]+Max_Number_Events[\s]+)([\d]+)"
    match = re.search(pattern, data)
    max_num_events = int(match.group(2))

    #list_of_test_id = input_param["list_of_test_id"]
    central_atom_list = input_param['central_atom_list']
    tests_not_done = []
    tests_done = []
    for test in central_atom_list:
        try:
            path_to_test = data_dir_to_test_dir(path_to_data_dir, test)
        except Exception:
            tests_not_done.append(test)
        # check if this test contains the final configuration file
        # based on max_num_events
        final_min_id = 1000 + max_num_events
        path_to_final_min = os.path.join(path_to_test, "min%s" % final_min_id)
        if os.path.isfile(path_to_final_min):
            tests_done.append(test)
        else:
            tests_not_done.append(test)
    print "In %s , finished tests ids in central_atom_list of current input SETTINGS file are:" % path_to_data_dir, tests_done
    print "In %s , un-finished tests ids in central_atom_list of current input SETTINGS file are:" % path_to_data_dir, tests_not_done
    print "In %s, start creating art_data input SETTINGs files for finished tests (input_tests_done.json) and unfinished tests(input_tests_undone.json):" % path_to_data_dir

    path_to_tests_done = os.path.join(path_to_data_dir,
                                      "input_tests_done.json")
    path_to_tests_undone = os.path.join(path_to_data_dir,
                                        "input_tests_undone.json")

    input_param_tests_done = copy.deepcopy(input_param)
    input_param_tests_undone = copy.deepcopy(input_param)
    input_param_tests_done["central_atom_list"] = tests_done
    input_param_tests_undone["central_atom_list"] = tests_not_done

    input_param_tests_done["list_of_test_id"] = tests_done
    input_param_tests_undone["list_of_test_id"] = tests_not_done

    with open(path_to_tests_done, 'w') as f:
        json.dump(input_param_tests_done, f, indent=2)
    with open(path_to_tests_undone, 'w') as f:
        json.dump(input_param_tests_undone, f, indent=2)

    print "\n"
    print "For finished tests:"
    print "Now user can check only the results of finished tests by using art_data -s input_tests_done.json --filter, art_data -s input_tests_done.json --eng --calc etc"
    print "For unfinished tests:"
    print "Now user can choose to delete these unfinished tests completely by using art_data -s input_tests_undone.json --art --delete_tests"
    print "Then user can continue to run these unfinished tests from the beginning by using art_data -s input_tests_undone.json --art --run"
    print "test status check done!"
    return tests_not_done
Ejemplo n.º 18
0
def single_event_strain_visualization(path_to_data_dir, event):
    """
	this function plot the shear strain volumetric strain and displacement for a single event 
	"""
    if 'test' in event[0]:
        test_id = int(event[0][4:])
    else:
        test_id = int(event[0])
    path_to_test_dir = data_dir_to_test_dir(path_to_data_dir, test_id)
    path_to_test_result = path_to_test_dir + "/results"
    init, sad, fin = event[1][0], event[1][1], event[1][2]
    path_to_curr_event = path_to_test_result + "/event_" + init + "_" + sad + "_" + fin
    path_to_init_sad = path_to_curr_event + "/init_sad"
    path_to_sad_fin = path_to_curr_event + "/sad_fin"
    path_to_init_fin = path_to_curr_event + "/init_fin"

    path_to_init_sad_strain = path_to_init_sad + "/strain_results_dict.pkl"
    path_to_init_sad_displacement = path_to_init_sad + "/displacement_results_dict.pkl"

    path_to_sad_fin_strain = path_to_sad_fin + "/strain_results_dict.pkl"
    path_to_sad_fin_displacement = path_to_sad_fin + "/displacement_results_dict.pkl"

    path_to_init_fin_strain = path_to_init_fin + "/strain_results_dict.pkl"
    path_to_init_fin_displacement = path_to_init_fin + "/displacement_results_dict.pkl"

    init_sad_strain = pickle.load(open(path_to_init_sad_strain, 'r'))
    init_sad_displacement = pickle.load(
        open(path_to_init_sad_displacement, 'r'))

    sad_fin_strain = pickle.load(open(path_to_sad_fin_strain, 'r'))
    sad_fin_displacement = pickle.load(open(path_to_sad_fin_displacement, 'r'))

    init_fin_strain = pickle.load(open(path_to_init_fin_strain, 'r'))
    init_fin_displacement = pickle.load(
        open(path_to_init_fin_displacement, 'r'))

    init_sad_vol_strain, init_sad_shear_strain, init_sad_disp = event_strain_disp(
        init_sad_strain, init_sad_displacement)
    sad_fin_vol_strain, sad_fin_shear_strain, sad_fin_disp = event_strain_disp(
        sad_fin_strain, sad_fin_displacement)
    init_fin_vol_strain, init_fin_shear_strain, init_fin_disp = event_strain_disp(
        init_fin_strain, init_fin_displacement)

    path_to_init_sad_disp_strain = path_to_init_sad + '/disp_shear_strain.png'
    plot_2d_shear(path_to_init_sad_disp_strain, init_sad_disp,
                  init_sad_shear_strain)

    path_to_init_sad_disp_vol_strain = path_to_init_sad + '/disp_vol_strain.png'
    plot_2d_vol(path_to_init_sad_disp_vol_strain, init_sad_disp,
                init_sad_vol_strain)

    path_to_sad_fin_disp_strain = path_to_sad_fin + '/disp_shear_strain.png'
    plot_2d_shear(path_to_sad_fin_disp_strain, sad_fin_disp,
                  sad_fin_shear_strain)

    path_to_sad_fin_disp_vol_strain = path_to_sad_fin + '/disp_vol_strain.png'
    plot_2d_vol(path_to_sad_fin_disp_vol_strain, sad_fin_disp,
                sad_fin_vol_strain)

    path_to_init_fin_disp_strain = path_to_init_fin + '/disp_shear_strain.png'
    plot_2d_shear(path_to_init_fin_disp_strain, init_fin_disp,
                  init_fin_shear_strain)

    path_to_init_fin_disp_vol_strain = path_to_init_fin + '/disp_vol_strain.png'
    plot_2d_vol(path_to_init_fin_disp_vol_strain, init_fin_disp,
                init_fin_vol_strain)

    plot_histogram_3(path_to_curr_event + "/disp_histogram.png",
                     [init_sad_disp, sad_fin_disp, init_fin_disp])
    plot_histogram_3(
        path_to_curr_event + "/shear_strain_histogram.png",
        [init_sad_shear_strain, sad_fin_shear_strain, init_fin_shear_strain])
    plot_histogram_3(
        path_to_curr_event + "/vol_strain_histogram.png",
        [init_sad_vol_strain, sad_fin_vol_strain, init_fin_vol_strain])

    print "done plotting for the current event:" + event[
        0] + "/event_" + init + "_" + sad + "_" + fin
Ejemplo n.º 19
0
def archived_file_names(path_to_data_dir):
	path_to_final_selected_events = os.path.join(path_to_data_dir,"final_selected_events.json")
	#path_to_central_atom_list = os.path.join(path_to_data_dir,"central_atom_list.json")
	#path_to_int_atom_list = os.path.join(path_to_data_dir, "interested_atom_list.json")
	
	print "reading final_selected_events.json"
	if os.path.exists(path_to_final_selected_events):
		print "loading final_selected_events.json, ensure that you always get most updated final_selected_events.json by --filter --re_calc if you have calculated more tests"
		final_selected_events = json.load(open(path_to_final_selected_events,"r"))
	else:
		raise Exception("final_selected_events.json does not exist in %s"%path_to_data_dir)

	list_of_file_names = [["final_selected_events.json",path_to_final_selected_events]]
	
	#if os.path.exists(path_to_central_atom_list):
	#	print "archiving central_atom_list.json, ensure central_atom_list.json is updated by --update_input if using --art --run_more"
	#	central_file = ["central_atom_list.json",path_to_central_atom_list]
	#	list_of_file_names.append(central_file)
	
	#if os.path.exists(path_to_int_atom_list):
	#	print "archiving interested_atom_list.json"
	#	int_file = ["interested_atom_list.json",path_to_int_atom_list]
	#	list_of_file_names.append(int_file)
	file_dirs = os.listdir(path_to_data_dir)
	for f in file_dirs:
		if f == "final_selected_events.json" or f == "art_data_project.zip" or f == "central_atom_list.json":
			continue
		path_to_file = os.path.join(path_to_data_dir,f)
		if os.path.isfile(path_to_file):
			list_of_file_names.append([f,path_to_file])
			
	all_tests_id = []
	for event in final_selected_events:
		test_id = int(event[0][4:])
		all_tests_id.append(test_id)
		path_to_test_dir = data_dir_to_test_dir(path_to_data_dir,test_id)
		init, sad, fin = event[1][0],event[1][1],event[1][2]
		#path_to_init = os.path.join(path_to_test_dir,init)
		#path_to_sad = os.path.join(path_to_test_dir,sad)
		#path_to_fin = os.path.join(path_to_test_dir,fin)
		init_file = return_file_names(path_to_test_dir, init)
		sad_file = return_file_names(path_to_test_dir, sad)
		fin_file = return_file_names(path_to_test_dir, fin)
		list_of_file_names.append(init_file)
		list_of_file_names.append(sad_file)
		list_of_file_names.append(fin_file)
	
	final_tests_id = list(set(all_tests_id))
	# update the central_atom_list.json
	print "update the central_atom_list.json to be all test id saved in final_selected_events.json"
	print ">>> confirm updating central_atom_list.json file (y/n): save original central_atom_list.json if necessary"
	if prompt_yes_no() is True:
		print "response received, updating!"
		update_central_atom_list(path_to_data_dir,final_tests_id)
	else:
		print "response received, not updating"
	list_of_file_names.append(["central_atom_list.json", os.path.join(path_to_data_dir, "central_atom_list.json")])
	
	for test in final_tests_id:
		path_to_test_dir = data_dir_to_test_dir(path_to_data_dir,test)
		test_file = return_test_results_file_name(path_to_test_dir)
		list_of_file_names.extend(test_file)
	return list_of_file_names
Ejemplo n.º 20
0
def strain_calculator_run_all_tests_mp(path_to_data_dir, input_param):
	"""
	this function run strain calculations on all tests under multiple processors
	"""
	cut_off_distance = input_param["cut_off"]
	box_dim = input_param['box_dim']
	#num_of_tests = input_param['num_of_tests']
	list_of_test_id = input_param['list_of_test_id']
	num_of_proc = input_param['num_of_proc']
	re_calc = input_param["re_calc"]
	# default None calculating all atoms, 
	# if local, make it a dict {"local":8}, only used after calculating all atom strains
	# and perform correlation modeling to find the local number of atoms, then rerun with local mode
	atom_list = input_param["atom_list"]
	
	tests_list = []
	#for i in xrange(num_of_tests+1):
	for i in list_of_test_id:
		try:
			path_to_curr_test = data_dir_to_test_dir(path_to_data_dir, i)
			tests_list.append(path_to_curr_test)
		except Exception:
			pass
	disp_ave, disp_std, disp_max , disp_ave_2, disp_std_2, disp_max_2,disp_ave_3, disp_std_3, disp_max_3 = [], [], [], [], [], [], [], [], []
	
	shear_ave, shear_std, shear_max, shear_ave_2, shear_std_2, shear_max_2,shear_ave_3, shear_std_3, shear_max_3 = [], [], [], [], [], [], [], [], []
	
	vol_ave, vol_std, vol_max, vol_ave_2, vol_std_2, vol_max_2,vol_ave_3, vol_std_3, vol_max_3 = [], [], [], [], [], [], [], [], []
	
	pool = mp.Pool(processes = num_of_proc)
	#partial(my_fun2, general_const=my_const), input_list)
	# search python pool map partial
	# pool.map result preserve the order of input
	result_list = pool.map(partial(strain_calculator_run_single_test,cut_off_distance=cut_off_distance, atom_list=atom_list, box_dim=box_dim, re_calc=re_calc), tests_list)
    
	for curr_test in result_list:
		# if this test do not have data and strain calculation gives None
		# skip this test
		if curr_test is None:
			continue
		for event in curr_test:
			init_sad = event[0]
			sad_fin = event[1]
			init_fin = event[2]
			# calculate the statistics of init_sad and sad_fin		
			disp_ave.append(init_sad["ave"][2])
			disp_std.append(init_sad["std"][2])
			disp_max.append(init_sad["max"][2])
			
			shear_ave.append(init_sad["ave"][1])
			shear_std.append(init_sad["std"][1])
			shear_max.append(init_sad["max"][1])
			
			vol_ave.append(init_sad["ave"][0])
			vol_std.append(init_sad["std"][0])
			vol_max.append(init_sad["max"][0])
			
			disp_ave_2.append(sad_fin["ave"][2])
			disp_std_2.append(sad_fin["std"][2])
			disp_max_2.append(sad_fin["max"][2])
			
			shear_ave_2.append(sad_fin["ave"][1])
			shear_std_2.append(sad_fin["std"][1])
			shear_max_2.append(sad_fin["max"][1])
			
			vol_ave_2.append(sad_fin["ave"][0])
			vol_std_2.append(sad_fin["std"][0])
			vol_max_2.append(sad_fin["max"][0])
			
			disp_ave_3.append(init_fin["ave"][2])
			disp_std_3.append(init_fin["std"][2])
			disp_max_3.append(init_fin["max"][2])
			
			shear_ave_3.append(init_fin["ave"][1])
			shear_std_3.append(init_fin["std"][1])
			shear_max_3.append(init_fin["max"][1])
			
			vol_ave_3.append(init_fin["ave"][0])
			vol_std_3.append(init_fin["std"][0])
			vol_max_3.append(init_fin["max"][0])
			
	pickle.dump({"ave":disp_ave,"std":disp_std,"max":disp_max}, open(path_to_data_dir+"/init_sad_disp_stats.pkl",'w'))
	pickle.dump({"ave":shear_ave,"std":shear_std,"max":shear_max}, open(path_to_data_dir+"/init_sad_shear_stats.pkl",'w'))
	pickle.dump({"ave":vol_ave,"std":vol_std,"max":vol_max}, open(path_to_data_dir+"/init_sad_vol_stats.pkl",'w'))
	
	pickle.dump({"ave":disp_ave_2,"std":disp_std_2,"max":disp_max_2}, open(path_to_data_dir+"/sad_fin_disp_stats.pkl",'w'))
	pickle.dump({"ave":shear_ave_2,"std":shear_std_2,"max":shear_max_2}, open(path_to_data_dir+"/sad_fin_shear_stats.pkl",'w'))
	pickle.dump({"ave":vol_ave_2,"std":vol_std_2,"max":vol_max_2}, open(path_to_data_dir+"/sad_fin_vol_stats.pkl",'w'))
	
	pickle.dump({"ave":disp_ave_3,"std":disp_std_3,"max":disp_max_3}, open(path_to_data_dir+"/init_fin_disp_stats.pkl",'w'))
	pickle.dump({"ave":shear_ave_3,"std":shear_std_3,"max":shear_max_3}, open(path_to_data_dir+"/init_fin_shear_stats.pkl",'w'))
	pickle.dump({"ave":vol_ave_3,"std":vol_std_3,"max":vol_max_3}, open(path_to_data_dir+"/init_fin_vol_stats.pkl",'w'))
	
	plot_histogram_3(path_to_data_dir+"/disp_ave.png", [disp_ave,disp_ave_2,disp_ave_3])
	plot_histogram_3(path_to_data_dir+"/disp_std.png", [disp_std,disp_std_2,disp_std_3])
	plot_histogram_3(path_to_data_dir+"/disp_max.png", [disp_max,disp_max_2,disp_max_3])
	
	plot_histogram_3(path_to_data_dir+"/shear_ave.png", [shear_ave,shear_ave_2,shear_ave_3])
	plot_histogram_3(path_to_data_dir+"/shear_std.png", [shear_std,shear_std_2,shear_std_3])
	plot_histogram_3(path_to_data_dir+"/shear_max.png", [shear_max,shear_max_2,shear_max_3])
	
	plot_histogram_3(path_to_data_dir+"/vol_ave.png", [vol_ave,vol_ave_2,vol_ave_3])
	plot_histogram_3(path_to_data_dir+"/vol_std.png", [vol_std,vol_std_2,vol_std_3])
	plot_histogram_3(path_to_data_dir+"/vol_max.png", [vol_max,vol_max_2,vol_max_3])	
	print "done!"