Exemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--config', required=True)
    parser.add_argument('--span',
                        default=1.0,
                        type=float,
                        help='Parameter sweep range (should be even)')
    parser.add_argument('--delta',
                        default=0.1,
                        type=float,
                        help='Size of parameter sweep increments')
    parser.add_argument('--significant_digits',
                        default=1,
                        type=int,
                        help='Number of significant digits in beamx/beamy')
    args = parser.parse_args()

    significant_digits = args.significant_digits
    delta = args.delta
    span = args.span
    span_half = span / 2

    with open(args.config) as config_json:
        config = json.load(config_json)
        beamx = float(config['beamx'])
        beamy = float(config['beamy'])
        endpoint = config['endpoint']
        endpoint_local = config['endpoint_local']

    x_values = np.round(np.arange(beamx - span_half, beamx + span_half + delta,
                                  delta),
                        decimals=significant_digits)
    y_values = np.round(np.arange(beamy - span_half, beamy + span_half + delta,
                                  delta),
                        decimals=significant_digits)

    phil_data = list()
    phil_data2 = list()

    for x in x_values:
        for y in y_values:
            new_config = copy.deepcopy(config)
            new_config['beamx'] = x
            new_config['beamy'] = y
            new_config['suffix'] = get_random_str()
            phil_data.append(new_config)

    fxc = FuncXClient()
    fxc.throttling_enabled = False
    fxid_create_phil, fxid_stills_process, fxid_count_ints = get_function_ids(
        fxc)

    # Phil files
    print("Running funcx_create_phil")
    phil_batch = fxc.create_batch()
    for phil in phil_data:
        phil_batch.add(phil,
                       endpoint_id=endpoint_local,
                       function_id=fxid_create_phil)
    phil_job = fxc.batch_run(phil_batch)
    wait_batch(fxc, phil_job)

    # Stills process
    print("\nRunning funcx_stills_process")
    stills_batch = fxc.create_batch()
    for phil in phil_data:
        stills_batch.add(phil,
                         endpoint_id=endpoint,
                         function_id=fxid_stills_process)
    stills_job = fxc.batch_run(stills_batch)
    wait_batch(fxc, stills_job)

    # Count ints
    print("\nRunning funcx_count_ints")
    combined_df = pd.DataFrame()
    count_batch = fxc.create_batch()
    for phil in phil_data:
        count_batch.add(phil,
                        endpoint_id=endpoint,
                        function_id=fxid_count_ints)
    count_job = fxc.batch_run(count_batch)
    count_results = wait_batch(fxc, count_job)

    # Create CSV and heatmap
    for df in count_results:
        if combined_df.empty:
            combined_df = df[2]
        else:
            combined_df = pd.concat([combined_df, df[2]], axis=0)

    combined_df.sort_values(['X', 'Y'], ascending=[True, True], inplace=True)
    combined_df.to_csv("ints.csv", index=False)
    plot(combined_df)
Exemplo n.º 2
0
def test_batch3(a, b, c=2, d=2):
    return a + 2 * b + 3 * c + 4 * d


funcs = [test_batch1, test_batch2, test_batch3]
func_ids = []
for func in funcs:
    func_ids.append(fx.register_function(func, description='test'))

ep_id = '4b116d3c-1703-4f8f-9f6f-39921e5864df'
print("FN_UUID : ", func_ids)

start = time.time()
task_count = 5
batch = fx.create_batch()
for func_id in func_ids:
    for i in range(task_count):
        batch.add(i,
                  i + 1,
                  c=i + 2,
                  d=i + 3,
                  endpoint_id=ep_id,
                  function_id=func_id)

task_ids = fx.batch_run(batch)

delta = time.time() - start
print("Time to launch {} tasks: {:8.3f} s".format(task_count * len(func_ids),
                                                  delta))
print("Got {} tasks_ids ".format(len(task_ids)))
Exemplo n.º 3
0
    )
    args = parser.parse_args()

    fxc = FuncXClient()

    double_uuid = fxc.register_function(app_double)
    sum_uuid = fxc.register_function(app_sum)

    if args.proxy:
        store = ps.store.init_store(
            "redis",
            hostname="127.0.0.1",
            port=args.redis_port,
        )

    batch = fxc.create_batch()
    for _ in range(args.num_arrays):
        x = np.random.rand(args.size, args.size)
        if args.proxy:
            x = store.proxy(x)
        batch.add(x, endpoint_id=args.funcx_endpoint, function_id=double_uuid)

    batch_res = fxc.batch_run(batch)
    mapped_results = fxc.get_batch_result(batch_res)
    for res in mapped_results.values():
        while res["pending"]:
            time.sleep(0.1)

    mapped_results = [
        fxc.get_result(i) for i, status in mapped_results.items()
    ]