Exemplo n.º 1
0
def master_job(nslaves, resume=False):
    # define the master director
    master = mns.Master(nslaves)

    argo = tools.read_argodb()
    bb = tiles.read_tiles()
    keys = list(bb.keys())

    work = workload(bb)
    if resume:
        missing = [
            k for k in keys if not (os.path.exists(interp.tiles_profiles_file %
                                                   (interp.var_dir["CT"], k)))
        ]
        weight = [work[k] for k in missing]
        idx = np.argsort(weight)
        tasks = idx[::-1]
        keys = missing

    else:
        weight = [work[k] for k in keys]
        idx = np.argsort(weight)
        tasks = idx[::-1]
        #tiles.split_global_into_tiles(bb, argo)

    print(tasks)
    pd.to_pickle(keys, file_tiles_to_interpolate)

    # master defines the tasks
    master.barrier(0)

    # slaves work
    master.async_distrib(tasks)

    master.barrier(1)

    # gather DataFrame
    tiles.gather_global_from_tiles()
    # gather profiles
    interp.gather_global_from_tiles()

    # master gathers the dataframes
    master.barrier(2)
Exemplo n.º 2
0
def master_job(nslaves):
    # define the master director
    master = mns.Master(nslaves, verbose=False)

    atlas.create_folders()
    
    
    tasks = [k for k, s in enumerate(atlas.subd_list)]

    # master defines the tasks
    #print("MASTER has tasks: ", tasks)
    master.barrier(0)


    # slaves work
    master.async_distrib(tasks)
    
    master.barrier(1)

    atlas.write_global_from_subd()
    # master gathers the dataframes
    master.barrier(2)
Exemplo n.º 3
0
def master_job(nslaves, resume=False):
    # define the master director
    master = mns.Master(nslaves, verbose=False)

    stats.create_folders()

    obj = define_tasks(resume=resume)
    tasks, keys = mns.bcast(obj)

    bb = tiles.read_tiles()
    bb = mns.bcast(bb)

    # master defines the tasks
    #print("MASTER has tasks: ", tasks)
    master.barrier(0)

    # slaves work
    master.async_distrib(tasks)

    master.barrier(1)

    # master gathers the dataframes
    master.barrier(2)
Exemplo n.º 4
0
def master_job(nslaves, debug=False):
    # define the master director
    master = mns.Master(nslaves)

    init.init()

    dacs, wmos = tools.get_all_wmos()
    # if debug:
    #     dacs = dacs[::100]
    #     wmos = wmos[::100]
    argo = tools.read_argodb()

    known_wmos = set(argo.WMO)

    if len(wmos) > len(known_wmos):
        update_with_new_wmos()

    nwmos = len(wmos)
    assert nwmos >= nslaves
    print("number of nwmos: %i" % nwmos)
    # define tasks
    print("define the tasks")

    if False:
        task_size = (nwmos // nslaves)
        for itask in range(0, nslaves):
            istr = itask * task_size
            iend = istr + task_size
            if itask == nslaves - 1:
                iend = nwmos
            d = dacs[istr:iend]
            w = wmos[istr:iend]
            a = argo[argo.WMO.isin(w)]
            #a = pd.concat([argo[argo.WMO==x] for x in w])

            task = (a, d, w)
            print('task %02i : %i' % (itask, len(w)))
            f = task_file % itask
            pd.to_pickle(task, f)

    # master defines the tasks
    master.barrier(0)

    print("slaves are working")
    # slaves work

    master.barrier(1)
    print("gather the results")
    # # send tasks to slaves
    # for islave in range(nslaves):
    #     master.send(islave, islave)

    # gather DataFrame
    argos = []
    for itask in range(nslaves):
        f = result_file % itask
        assert os.path.exists(f)
        a = pd.read_pickle(f)
        argos += [a]
    argo = pd.concat(argos)

    print("number of profiles in the the database: %i" % len(argo))
    print("write argo_global.pkl")

    tools.write_argodb(argo)

    print("define tiles")

    tiles.define_tiles()

    # clean up workdir
    print("clean up %s" % work_dir)
    os.system("rm -Rf %s/*.pkl" % work_dir)

    # master gathers the dataframes
    master.barrier(2)