예제 #1
0
 def test_singularity(self):
     command = 'python3.6 ./funlib/tests/am_i_in_a_container.py'
     run_singularity(command,
                     self.singularity_image,
                     self.working_dir,
                     self.mount_dirs,
                     execute=True)
예제 #2
0
def start_worker(
    predict_config,
    worker_config,
    data_config,
    graph_config,
    solve_config,
    queue,
    singularity_container,
    mount_dirs,
    solve_block,
    solve_setup_dir,
):

    worker_id = daisy.Context.from_env().worker_id

    log_out = os.path.join(solve_setup_dir, "{}_worker.out".format(worker_id))
    log_err = os.path.join(solve_setup_dir, "{}_worker.err".format(worker_id))

    base_command = "python -u {} {} {} {} {} {}".format(
        solve_block,
        predict_config,
        worker_config,
        data_config,
        graph_config,
        solve_config,
    )
    if queue == "None":
        logger.warning("Running block **locally**, no queue provided.")
        if singularity_container == "None":
            logger.warning(
                "Running block in current environment, no singularity image provided."
            )
            cmd = [base_command]
        else:
            cmd = run_singularity(
                base_command,
                singularity_container,
                mount_dirs=mount_dirs,
                execute=False,
                expand=False,
            )
    else:
        logger.info("Running block on queue {} and container {}".format(
            queue, singularity_container))
        cmd = run(
            command=base_command,
            queue=queue,
            num_gpus=0,
            num_cpus=1,
            singularity_image=singularity_container,
            mount_dirs=mount_dirs,
            execute=False,
            expand=False,
            batch=True,
        )

    daisy.call(cmd, log_out=log_out, log_err=log_err)

    logger.info("Solve worker finished")
예제 #3
0
def predict_worker(train_setup_dir, predict_setup_dir, predict_number,
                   train_number, experiment, iteration, in_container,
                   in_dataset, out_container, db_host, db_name, queue,
                   singularity_container, num_cpus, num_cache_workers,
                   mount_dirs):

    predict_block = os.path.join(predict_setup_dir, 'predict_block.py')

    run_instruction = {
        'queue': queue,
        'num_cpus': num_cpus,
        'num_cache_workers': num_cache_workers,
        'singularity': singularity_container
    }

    worker_instruction = {
        'train_setup_dir': train_setup_dir,
        'iteration': iteration,
        'in_container': in_container,
        'in_dataset': in_dataset,
        'out_container': out_container,
        'db_host': db_host,
        'db_name': db_name,
        'run_instruction': run_instruction
    }

    worker_id = daisy.Context.from_env().worker_id
    worker_dir = os.path.join(predict_setup_dir, "worker_files")
    try:
        os.makedirs(worker_dir)
    except:
        pass

    worker_instruction_file = os.path.join(
        worker_dir, '{}_worker_instruction.json'.format(worker_id))
    log_out = os.path.join(worker_dir, '{}_worker.out'.format(worker_id))
    log_err = os.path.join(worker_dir, '{}_worker.err'.format(worker_id))

    with open(worker_instruction_file, 'w') as f:
        json.dump(worker_instruction, f)

    logger.info(
        'Running block for prediction (e:{}, t:{}, i:{}, p:{}) and worker instruction {}...'
        .format(experiment, train_number, iteration, predict_number,
                worker_id))

    base_command = "python -u {} {}".format(predict_block,
                                            worker_instruction_file)
    if queue == "None":
        logger.warning("Running block **locally**, no queue provided.")
        if singularity_container == "None":
            logger.warning(
                "Running block in current environment, no singularity image provided."
            )
            cmd = base_command
        else:
            cmd = run_singularity(base_command,
                                  singularity_container,
                                  mount_dirs=mount_dirs,
                                  execute=False,
                                  expand=False)
    else:
        logger.info("Running block on queue {} and container {}".format(
            queue, singularity_container))
        cmd = run(command=base_command,
                  queue=queue,
                  num_gpus=1,
                  num_cpus=num_cpus,
                  singularity_image=singularity_container,
                  mount_dirs=mount_dirs,
                  execute=False,
                  expand=False)

    daisy.call(cmd, log_out=log_out, log_err=log_err)

    logger.info('Predict worker finished')
예제 #4
0
from subprocess import check_call
from funlib.run import run, run_singularity
import logging
from micron import read_predict_config, read_worker_config, read_data_config
import time
import json

predict_config = read_predict_config("predict_config.ini")
worker_config = read_worker_config("worker_config.ini")
data_config = read_data_config("data_config.ini")

start = time.time()

if worker_config["singularity_container"] != "None":
    run_singularity("python mknet.py",
                    singularity_image=worker_config["singularity_container"],
                    mount_dirs=worker_config["mount_dirs"],
                    execute=True)

else:
    check_call("python mknet.py", shell=True)

check_call("python {} {} {} {}".format(predict_config["blockwise"],
                                       os.path.abspath("predict_config.ini"),
                                       os.path.abspath("worker_config.ini"),
                                       os.path.abspath("data_config.ini")),
           shell=True)

end = time.time()
with open("./time_prediction.json", "w") as f:
    json.dump({"t_predict": end - start}, f)
예제 #5
0
from micron import read_predict_config, read_worker_config, read_data_config, read_graph_config

predict_config = read_predict_config("predict_config.ini")
worker_config = read_worker_config("worker_config.ini")
data_config = read_data_config("data_config.ini")
graph_config = read_graph_config("graph_config.ini")

base_cmd = "python {} {} {} {} {}".format(
    graph_config["build_graph"], os.path.abspath("predict_config.ini"),
    os.path.abspath("worker_config.ini"), os.path.abspath("data_config.ini"),
    os.path.abspath("graph_config.ini"))

if worker_config["singularity_container"] != "None" and worker_config[
        "queue"] == "None":
    run_singularity(base_cmd,
                    singularity_image=worker_config["singularity_container"],
                    mount_dirs=worker_config["mount_dirs"],
                    execute=True)

elif worker_config["singularity_container"] != "None" and worker_config[
        "queue"] != "None":
    run(base_cmd,
        singularity_image=worker_config["singularity_container"],
        mount_dirs=worker_config["mount_dirs"],
        queue=worker_config["queue"],
        num_cpus=worker_config["num_cpus"],
        num_gpus=0,
        batch=True,
        execute=True)

else:
    assert (worker_config["singularity_container"] == "None")
예제 #6
0
import os
from subprocess import check_call
from funlib.run import run, run_singularity
import logging
from micron import read_worker_config, read_train_config
import sys

iteration = int(sys.argv[1])
worker_config = read_worker_config("worker_config.ini")

base_cmd = "python {} {}".format("train_pipeline.py", iteration)

if worker_config["singularity_container"] != "None":
    run_singularity("python mknet.py",
                    singularity_image=worker_config["singularity_container"],
                    mount_dirs=["/groups", "/nrs", "/scratch", "/misc"],
                    execute=True)

else:
    check_call("python mknet.py", shell=True)

if worker_config["singularity_container"] != "None" and worker_config[
        "queue"] == "None":
    run_singularity(base_cmd,
                    singularity_image=worker_config["singularity_container"],
                    mount_dirs=worker_config["mount_dirs"],
                    execute=True)

elif worker_config["singularity_container"] != "None" and worker_config[
        "queue"] != "None":
    run(base_cmd,