def GENERATE_RANDOM_EXPANDERS(K, size_H, EPSILON, samples): NAME = '[RANDOM' + str(K) + ']' print NAME + " Generating " + str( samples) + " H (adjacency list matrices) of size " + str( size_H) + " x " + str(K) + " ... " print "\n" eigenvalue = 0 for sampling in range(samples): print NAME + " ## " + str(sampling) + " // " + str( samples) + " ## " H = generate_expander(K, size_H) eigenvalue_aux = helpers.generate_eigenvalue(H, size_H, K, EPSILON, NAME) eigenvalue += eigenvalue_aux eigenvalue = eigenvalue / samples print NAME + " Calculated average of second highest eigenvalue for " + str( samples) + " matrices H." helpers.write_result(NAME, size_H, K, eigenvalue) helpers.cleanup(".aux")
def GENERATE_ANGLUIN_EXPANDERS(size, A_indices, n, EPSILON): size_H = 2 * size print NAME + " Generating H (adjacency list matrix) of size " + str( size_H) + " x " + str(K) + " ... " H = numpy.empty( shape=(size_H, K), dtype=numpy.int32) # Generate H, empty adjacency list matrix for row in A_indices: for element_index in row: # Get the tuple index from the matrix of indices (A) x0 = element_index / n # Grab first value y0 = element_index % n # Grab second value i = element_index # Grab the index of the (x0, y0) element # connect to (x, y) in B x = x0 y = y0 j = (x * n + y % n) + size # add the shift in the H indexing H[i][0] = j # node with index i is connected to node with index j H[j][0] = i # vice-versa # connect to (x + y, y) in B x = (x0 + y0) % n y = y0 j = (x * n + y % n) + size H[i][1] = j H[j][1] = i # connect to (y + 1, -x) in B x = (y0 + 1) % n y = (-x0) % n j = (x * n + y % n) + size H[i][2] = j H[j][2] = i print NAME + " Generated adjacency list matrix H." print NAME + " Calculating second highest eigenvalue of H ... " eigenvalue = helpers.generate_eigenvalue(H, size_H, K, EPSILON, NAME) print NAME + " Calculated second highest eigenvalue of H." helpers.write_result(NAME, size_H, K, eigenvalue) helpers.cleanup(".aux")
def GENERATE_ANGLUIN_EXPANDERS(size, A_indices, n, EPSILON): size_H = 2 * size print NAME + " Generating H (adjacency list matrix) of size " + str(size_H) + " x " + str(K) + " ... " H = numpy.empty(shape=(size_H, K), dtype=numpy.int32) # Generate H, empty adjacency list matrix for row in A_indices: for element_index in row: # Get the tuple index from the matrix of indices (A) x0 = element_index / n # Grab first value y0 = element_index % n # Grab second value i = element_index # Grab the index of the (x0, y0) element # connect to (x, y) in B x = x0 y = y0 j = (x * n + y % n) + size # add the shift in the H indexing H[i][0] = j # node with index i is connected to node with index j H[j][0] = i # vice-versa # connect to (x + y, y) in B x = (x0 + y0) % n y = y0 j = (x * n + y % n) + size H[i][1] = j H[j][1] = i # connect to (y + 1, -x) in B x = (y0 + 1) % n y = (-x0) % n j = (x * n + y % n) + size H[i][2] = j H[j][2] = i print NAME + " Generated adjacency list matrix H." print NAME + " Calculating second highest eigenvalue of H ... " eigenvalue = helpers.generate_eigenvalue(H, size_H, K, EPSILON, NAME) print NAME + " Calculated second highest eigenvalue of H." helpers.write_result(NAME, size_H, K, eigenvalue) helpers.cleanup(".aux")
def main(): logger.info( "Starting data extraction for self-stabilization overhead experiment") exec_time_series = api.get_time_series_for_q(Q_EXEC_TIME) msgs_sent_time_series = api.get_time_series_for_q(Q_MSGS_SENT) bytes_sent_time_series = api.get_time_series_for_q(Q_BYTES_SENT) if len(exec_time_series) == 0: logger.warning("No results found, quitting") return data_points = transform(exec_time_series, msgs_sent_time_series, bytes_sent_time_series) csv_path = helpers.write_to_csv(EXPERIMENT, data_points) snapshot_path = helpers.get_snapshot() helpers.collect_to_res_folder(EXPERIMENT, [csv_path, snapshot_path]) helpers.cleanup()
def run(): """ This is the main function to call other functions for completion of process. """ logging.info('Wind Forecast Process started') #file_date = str(datetime.date.today()) file_date = str(datetime.date.today() - datetime.timedelta(1)) ftp = FTP(settings.FTP_HOST, settings.FTP_USER, settings.FTP_PASS) logging.info('Wind Forecast Process: FTP Connection Established') ftp.cwd(settings.FTP_DIRECTORY) files_prefix_country = settings.FILES_PREFIX for file_prefix_country in files_prefix_country: file_prefix = file_prefix_country.split(':')[0] file_name = file_prefix + file_date + ".CSV" country = file_prefix_country.split(':')[1] status = 1 while status == 1: if file_name in ftp.nlst(): logging.info('Wind Forecast Process:' + ' ' + file_name + ' ' + 'is available in ftp server') helpers.download_from_ftp(ftp, file_name) formatted_json = helpers.convert_csv_to_json( file_name, country) helpers.produce_msg_to_kafka(settings.BOOTSTRAP_SERVER, settings.KAFKA_TOPIC, formatted_json) helpers.cleanup(file_name) status = 0 else: logging.info( 'Wind Forecast Process:' + ' ' + file_name + ' ' + 'is not available in ftp server.. will check again') time.sleep(settings.SLEEPER_TIME) files_prefix_country.append(file_prefix) status = 0 ftp.close() logging.info('Wind Forecast Process: FTP Connection Closed') logging.info('Wind Forecast Process finished')
def GENERATE_RANDOM_EXPANDERS(K, size_H, EPSILON, samples): NAME = '[RANDOM' + str(K) + ']' print NAME + " Generating " + str(samples) + " H (adjacency list matrices) of size " + str(size_H) + " x " + str(K) + " ... " print "\n" eigenvalue = 0 for sampling in range(samples): print NAME + " ## " + str(sampling) + " // " + str(samples) + " ## " H = generate_expander(K, size_H) eigenvalue_aux = helpers.generate_eigenvalue(H, size_H, K, EPSILON, NAME) eigenvalue += eigenvalue_aux eigenvalue = eigenvalue / samples print NAME + " Calculated average of second highest eigenvalue for " + str(samples) + " matrices H." helpers.write_result(NAME, size_H, K, eigenvalue) helpers.cleanup(".aux")
for i in range(len(conv_lat_asc)): # if int(conv_lat_asc[i]["metric"]["view"]) != int(msgs_sent_asc[i]["metric"]["exp_param"]): # raise ValueError("Results not matching") conv_lat = str(float(conv_lat_asc[i]["value"][1])).replace(".", ",") view = int(conv_lat_asc[i]["metric"]["view"]) msgs_sent = int(msgs_sent_asc[i]["value"][1]) bts_sent = int(bts_sent_asc[i]["value"][1]) data_points.append({ "old_view": view, "conv_lat": conv_lat, "msgs_sent": msgs_sent, "bytes_sent": bts_sent }) # build key:val pairs for data points and return return data_points if __name__ == "__main__": logger.info("Starting data extraction for convergence latency experiment") conv_lat_time_series = api.get_time_series_for_q(Q_CONV_LAT) msgs_sent_time_series = api.get_time_series_for_q(Q_MSGS_SENT) bytes_sent_time_series = api.get_time_series_for_q(Q_BYTES_SENT) data_points = transform(conv_lat_time_series, msgs_sent_time_series, bytes_sent_time_series) csv_path = helpers.write_to_csv(EXPERIMENT, data_points) snapshot_path = helpers.get_snapshot() helpers.collect_to_res_folder(EXPERIMENT, [csv_path, snapshot_path]) helpers.cleanup()
# random5_results = { RANDOM_5: {} } print "Starting main program ... \n\n" # Run only if there is any algorithm configured to run if check_configured_run(methods.ANGLUIN, methods.MARGULIS, methods.AJTAI, \ methods.RANDOM_3, methods.RANDOM_5): # Load configuration parameters config_file = open("config.yaml", "r") config_vars = yaml.safe_load(config_file) config_file.close() # Clean existing .results files if config_vars['params']['clear_results_files'] == True: cleanup(".results") for v in VALUES: config_vars['params']['n'] = v # update the new n value # Prepare config file to write the new yaml dictionary config_file = open("config.yaml", "w") config_file.write(yaml.dump(config_vars, default_flow_style=False)) # update yaml dictionary in config file config_file.close() # Call the main method that runs the generating algorithms print "\n\n--------------------------------------------------" print "**** n = " + str(v) + " ****\n" generate_expanders()
def generate_expanders(): # Read n from configuration file config_file = open("config.yaml", "r") config_vars = yaml.safe_load(config_file) config_file.close() # Clean existing .out files if config_vars['params']['cleanup'] == True: helpers.cleanup(".out") n = config_vars['params']['n'] EPSILON = config_vars['params']['epsilon'] # Generate Z(n) #Z_n = list(xrange(n)) size = n * n # Generate matrices A and B only if the algorithms that require them are # configured to run if helpers.check_configured_run(methods.ANGLUIN, methods.MARGULIS): print "Generating matrices A and B with n = " + str(n) + " ... " # Generate the elements of A and B using indices from the cross product indices_of_pairs = numpy.arange(size) # Generate array of indices of the cross product A_indices = numpy.random.permutation(indices_of_pairs).reshape((n, n)) # Randomize in matrix positions B_indices = numpy.random.permutation(indices_of_pairs).reshape((n, n)) # Randomize in matrix positions if config_vars['params']['output_indices_matrices'] == True: helpers.write_indices_matrices(A_indices, B_indices) if config_vars['params']['output_initializer_matrices'] == True: returned_matrices = helpers.generate_pair_matrices(A_indices, B_indices, n) A = returned_matrices[0] B = returned_matrices[1] helpers.write_pair_matrices(A, B) print "Generated matrices A and B." # EXPLICIT ALGORITHMS if config_vars['algorithms'][methods.ANGLUIN] == True: print '' algorithms.EXPLICIT_METHOD(method_name=methods.ANGLUIN, size=size, A_indices=A_indices, n=n, EPSILON=EPSILON) if config_vars['algorithms'][methods.MARGULIS] == True: print '' algorithms.EXPLICIT_METHOD(method_name=methods.MARGULIS, size=size, A_indices=A_indices, n=n, EPSILON=EPSILON) if config_vars['algorithms'][methods.AJTAI] == True: print '' c = config_vars['params']['c'] s = c * numpy.log(n) algorithms.EXPLICIT_METHOD(method_name=methods.AJTAI, size=2 * size, EPSILON=EPSILON, s=s) # RANDOM ALGORITHMS if config_vars['algorithms'][methods.RANDOM_3] == True: print '' samples = config_vars['params']['random_graphs_samples'] algorithms.RANDOM_METHOD(methods.RANDOM_3, 2 * size, EPSILON, samples) if config_vars['algorithms'][methods.RANDOM_5] == True: print '' samples = config_vars['params']['random_graphs_samples'] algorithms.RANDOM_METHOD(methods.RANDOM_5, 2 * size, EPSILON, samples)
def cleanup(): helpers.cleanup()