def main(): parser = argparse.ArgumentParser() parser.add_argument('--config', required=True) parser.add_argument('--span', default=1.0, type=float, help='Parameter sweep range (should be even)') parser.add_argument('--delta', default=0.1, type=float, help='Size of parameter sweep increments') parser.add_argument('--significant_digits', default=1, type=int, help='Number of significant digits in beamx/beamy') args = parser.parse_args() significant_digits = args.significant_digits delta = args.delta span = args.span span_half = span / 2 with open(args.config) as config_json: config = json.load(config_json) beamx = float(config['beamx']) beamy = float(config['beamy']) endpoint = config['endpoint'] endpoint_local = config['endpoint_local'] x_values = np.round(np.arange(beamx - span_half, beamx + span_half + delta, delta), decimals=significant_digits) y_values = np.round(np.arange(beamy - span_half, beamy + span_half + delta, delta), decimals=significant_digits) phil_data = list() phil_data2 = list() for x in x_values: for y in y_values: new_config = copy.deepcopy(config) new_config['beamx'] = x new_config['beamy'] = y new_config['suffix'] = get_random_str() phil_data.append(new_config) fxc = FuncXClient() fxc.throttling_enabled = False fxid_create_phil, fxid_stills_process, fxid_count_ints = get_function_ids( fxc) # Phil files print("Running funcx_create_phil") phil_batch = fxc.create_batch() for phil in phil_data: phil_batch.add(phil, endpoint_id=endpoint_local, function_id=fxid_create_phil) phil_job = fxc.batch_run(phil_batch) wait_batch(fxc, phil_job) # Stills process print("\nRunning funcx_stills_process") stills_batch = fxc.create_batch() for phil in phil_data: stills_batch.add(phil, endpoint_id=endpoint, function_id=fxid_stills_process) stills_job = fxc.batch_run(stills_batch) wait_batch(fxc, stills_job) # Count ints print("\nRunning funcx_count_ints") combined_df = pd.DataFrame() count_batch = fxc.create_batch() for phil in phil_data: count_batch.add(phil, endpoint_id=endpoint, function_id=fxid_count_ints) count_job = fxc.batch_run(count_batch) count_results = wait_batch(fxc, count_job) # Create CSV and heatmap for df in count_results: if combined_df.empty: combined_df = df[2] else: combined_df = pd.concat([combined_df, df[2]], axis=0) combined_df.sort_values(['X', 'Y'], ascending=[True, True], inplace=True) combined_df.to_csv("ints.csv", index=False) plot(combined_df)
ep_id = '4b116d3c-1703-4f8f-9f6f-39921e5864df' print("FN_UUID : ", func_ids) start = time.time() task_count = 5 batch = fx.create_batch() for func_id in func_ids: for i in range(task_count): batch.add(i, i + 1, c=i + 2, d=i + 3, endpoint_id=ep_id, function_id=func_id) task_ids = fx.batch_run(batch) delta = time.time() - start print("Time to launch {} tasks: {:8.3f} s".format(task_count * len(func_ids), delta)) print("Got {} tasks_ids ".format(len(task_ids))) for i in range(10): x = fx.get_batch_status(task_ids) complete_count = sum( [1 for t in task_ids if t in x and x[t].get('pending', False)]) print("Batch status : {}/{} complete".format(complete_count, len(task_ids))) if complete_count == len(task_ids): print(x) break
if args.proxy: store = ps.store.init_store( "redis", hostname="127.0.0.1", port=args.redis_port, ) batch = fxc.create_batch() for _ in range(args.num_arrays): x = np.random.rand(args.size, args.size) if args.proxy: x = store.proxy(x) batch.add(x, endpoint_id=args.funcx_endpoint, function_id=double_uuid) batch_res = fxc.batch_run(batch) mapped_results = fxc.get_batch_result(batch_res) for res in mapped_results.values(): while res["pending"]: time.sleep(0.1) mapped_results = [ fxc.get_result(i) for i, status in mapped_results.items() ] if args.proxy: mapped_results = store.proxy(mapped_results) total = fxc.run( mapped_results, endpoint_id=args.funcx_endpoint, function_id=sum_uuid,