def avg_feature(timestep): """ calculates the average feature of all available files for a particular timestep @param :timestep: timestep of interest """ # initialization if timestep >= 0 and timestep <= 50000: iterations = 50000 dump_interval = 50 directory = ".\\dump\\young\\" extension = ".YOUNG" elif timestep >= 4000000 and timestep <= 5000000: iterations = 1000000 dump_interval = 1000 timestep -= 4000000 directory = ".\\dump\\old\\" extension = ".OLD" else: raise ValueError("No data available for this timestep") if timestep % dump_interval != 0: raise ValueError("Timestep has to be dividible by the dump_interval") timestep = int(timestep / dump_interval) amount = 0 # iterate through dump files for file in os.listdir(os.fsencode(directory)): filename = os.fsdecode(file) if filename.endswith(extension): print("Calculating file " + str(amount) + ' for timestep ' + str(timestep), sep=' ', end='\r', file=sys.stdout, flush=False) _, types, _, _, _, _, Data = read_data(directory + filename, iterations, dump_interval) if amount == 0: rdf = np.asarray(calc_rdf(Data[0, timestep], types[timestep])) SF = np.asarray(calc_SF(rdf)) else: rdf_new = np.asarray( calc_rdf(Data[0, timestep], types[timestep])) rdf += rdf_new SF += np.asarray(calc_SF(rdf)) amount += 1 rdf /= amount SF /= amount return rdf, SF
def avg_voronoi(timestep): """ Returns the voronoi area and the amount of voronoi edges for each particle averaged over all dump files for given timestep """ # initialization if timestep >= 0 and timestep <= 50000: iterations = 50000 dump_interval = 50 directory = ".\\dump\\young\\" extension = ".YOUNG" elif timestep >= 4000000 and timestep <= 5000000: timestep -= 4000000 iterations = 1000000 dump_interval = 1000 directory = ".\\dump\\old\\" extension = ".OLD" else: raise ValueError("No data available for this timestep") if timestep % dump_interval != 0: raise ValueError("Timestep has to be dividible by the dump_interval") timestep = int(timestep / dump_interval) amount = 0 # iterate through dump files for file in os.listdir(os.fsencode(directory)): filename = os.fsdecode(file) if filename.endswith(extension): print("Calculating file " + str(amount) + ' for timestep ' + str(timestep * dump_interval), sep=' ', end='\r', file=sys.stdout, flush=False) _, types, _, _, vor_area, vor_amn, Data = read_data(directory + filename, iterations, dump_interval) if amount == 0: voronoi_area = vor_area[timestep] voronoi_amount = vor_amn[timestep] else: voronoi_area += vor_area[timestep] voronoi_amount += vor_amn[timestep] amount += 1 voronoi_area /= amount voronoi_amount /= amount return voronoi_area, voronoi_amount
def calc_rdf_cutoff(age): dr = params['dr'] if age == 'young': directory = ".\\dump\\young\\" file_ending = ".YOUNG" iterations = 50000 dump_interval = 50 if age == 'old': directory = ".\\dump\\old\\" file_ending = ".OLD" iterations = 1000000 dump_interval = 1000 grAA, grBB, grAB = [], [], [] for file in os.listdir(os.fsencode(directory)): filename = os.fsdecode(file) if filename.endswith(file_ending): print('calculating', filename) # load data timesteps, types, q6_re, q6_im, Data = read_data( directory + filename, iterations, dump_interval) posData = Data[0] # get minimum for each timestep grAA_amin, grBB_amin, grAB_amin = calc_rdf_minimum(posData, types) print(grAA_amin.mean()) # get mean of cutoff for different times grAA.append(grAA_amin.mean()) grBB.append(grBB_amin.mean()) grAB.append(grAB_amin.mean()) grAA = np.array(grAA) grBB = np.array(grBB) grAB = np.array(grAB) # get mean of cutoff for different files AA_cutoff = np.mean(grAA) * dr BB_cutoff = np.mean(grBB) * dr AB_cutoff = np.mean(grAB) * dr return AA_cutoff, BB_cutoff, AB_cutoff
rdf = True q6 = False directory = ".\\dump\\young\\" extension = ".YOUNG" amount = 0 # iterate through all dump files for file in os.listdir(os.fsencode(directory)): filename = os.fsdecode(file) if filename.endswith(extension): savename = './/saves//' + filename.split(".")[0] + '-rdf.npy' # retrieve and process data timesteps, types, q6_re, q6_im, Data = read_data(directory + filename, iterations, dump_interval) Data = {'position': Data[0, :1], 'force': Data[1], 'q6_re': q6_re, 'q6_im': q6_im} types = types[:1] # declare variable for the first run if amount == 0: if rdf: gr = save_load(lambda: calc_avg_rdf(Data['position'], types), savename) if q6: mean_q6 = np.asarray((calc_mean(Data['q6_re']), calc_mean(Data['q6_im']))) var_q6 = np.asarray((calc_variance(Data['q6_re'], mean_q6[0]), calc_variance(Data['q6_im'], mean_q6[1]))) else: if rdf:
def extract_features(directory, filename, iterations, dump_interval): timesteps, types, q6_re, q6_im, vor_area, vor_amn, Data = read_data( directory + filename, iterations, dump_interval) Data = { 'position': Data[0], 'force': Data[1], 'q6_re': q6_re, 'q6_im': q6_im, 'vor_area': vor_area, 'vor_amn': vor_amn } savename = './/saves//' + filename.split(".")[0] # get the mean square displacement and the variance square displacement of the position data (deprecated) # msd = msd(Data['position']) # vsd = vsd(Data['position'], msd) # calculate the mean and variance nearest neighbour distance per timestep mnn_distance, mnn_amount = save_load(lambda: mean_nn(Data['position'], 1), savename + '-mean_nn.npy') vnn_distance, vnn_amount = save_load( lambda: variance_nn(Data['position'], mnn_distance, mnn_amount, 1), savename + '-var_nn.npy') # calculate the mean and variance of the norm of the force mean_force = calc_mean(Data['force']) variance_force = calc_variance(Data['force'], mean_force) # calculate the mean and variance of the real q6 parameters mean_q6_re = calc_mean(Data['q6_re']) variance_q6_re = calc_variance(Data['q6_re'], mean_q6_re) # calculate the mean and variance of the imaginary q6 parameters mean_q6_im = calc_mean(Data['q6_im']) variance_q6_im = calc_variance(Data['q6_im'], mean_q6_im) # calculate the mean and the variance of the voronoi area mean_vor_area = calc_mean(Data['vor_area']) variance_vor_area = calc_variance(Data['vor_area'], mean_vor_area) # calculate the mean and the variance of the voronoi area side amount mean_vor_amn = calc_mean(Data['vor_amn']) variance_vor_amn = calc_variance(Data['vor_amn'], mean_vor_amn) # calculate the magnitude and count of the max two peaks of the voronoi area and amount area_peak1_count, area_peak2_count, area_peak1_mag, area_peak2_mag, amount_peak1_count, amount_peak2_count, amount_peak1_mag, amount_peak2_mag = calc_voronoi_peaks( timesteps, vor_area, vor_amn) # prepare features in single array features = np.column_stack([ mnn_distance, vnn_distance, mean_force, variance_force, mnn_amount, vnn_amount, mean_q6_re, variance_q6_re, mean_q6_im, variance_q6_im, mean_vor_area, variance_vor_area, mean_vor_amn, variance_vor_amn, area_peak1_count, area_peak2_count, area_peak1_mag, area_peak2_mag, amount_peak1_count, amount_peak2_count, amount_peak1_mag, amount_peak2_mag ]) return features