def test_factory_1():

    config = {
        "sites": [{
            "site": "RCC_Midway_Remote",
            "auth": {
                "channel": "ssh",
                "hostname": "swift.rcc.uchicago.edu",
                "username": "******"
            },
            "execution": {
                "executor": "ipp",
                "provider": "slurm",  # LIKELY SHOULD BE BOUND TO SITE
                "scriptDir": ".scripts",
                "block": {  # Definition of a block
                    "nodes": 1,  # of nodes in that block
                    "taskBlocks": 1,  # total tasks in a block
                    "walltime": "00:05:00",
                    "Options": {
                        "partition": "debug",
                        "account": "pi-wilde",
                        "overrides": "#SBATCH--constraint=haswell"
                    }
                }
            }
        }],
        "globals": {
            "lazyErrors": True
        }
    }

    epf = ExecProviderFactory()
    epf.make(config)
Beispiel #2
0
    def __init__(self, config=None, executors=None, lazy_fail=True,
                 rundir=None, fail_retries=2):
        """ Initialize the DataFlowKernel

        Please note that keyword args passed to the DFK here will always override
        options passed in via the config.

        KWargs:
            config (Dict) : A single data object encapsulating all config attributes
            executors (list of Executor objs): Optional, kept for (somewhat) backward compatibility with 0.2.0
            lazy_fail(Bool) : Default=True, determine failure behavior
            rundir (str) : Path to run directory. Defaults to ./runinfo/runNNN
            fail_retries(int): Default=2, Set the number of retry attempts in case of failure

        Returns:
            DataFlowKernel object
        """
        # Create run dirs for this run
        self.rundir = make_rundir(config=config, path=rundir)

        # Update config with defaults
        self._config = update_config(config, self.rundir)

        # Start the anonymized usage tracker and send init msg
        self.usage_tracker = UsageTracker(self)
        self.usage_tracker.send_message()

        if self._config :
            self._executors_managed = True
            # Create the executors
            epf = EPF()
            self.executors = epf.make(self.rundir, self._config)

            # set global vars from config
            self.lazy_fail = self._config["globals"].get("lazyFail", lazy_fail)
            self.fail_retries = self._config["globals"].get("fail_retries", fail_retries)
            self.flowcontrol     = FlowControl(self, self._config)
        else:
            self._executors_managed = False
            self.fail_retries = fail_retries
            self.lazy_fail    = lazy_fail
            self.executors    = {i:x for i,x in enumerate(executors)}
            print("Executors : ", self.executors)
            self.flowcontrol  = FlowNoControl(self, None)

        self.task_count      = 0
        self.fut_task_lookup = {}
        self.tasks           = {}


        logger.debug("Using executors: {0}".format(self.executors))
        atexit.register(self.cleanup)
def test_factory_2() :

    config = {  "site" : "ipp_local",
                "execution" :
                {  "executor" : "ipp",
                   "provider" : "local",
                   "channel"  : "None",
                   "options" :
                  {"initParallelism" : 2,
                   "maxParallelism" : 2,
                   "minParallelism" : 0,
                   "walltime" : "00:25:00",
                  }
                }}

    epf = ExecProviderFactory()
    executor = epf.make(config)
def test_factory_3():

    config = {
        "sites": [{
            "site": "Local",
            "auth": {
                "channel": None
            },
            "execution": {
                "executor": "threads",
                "provider": None,  # LIKELY SHOULD BE BOUND TO SITE
                "maxThreads": 4
            }
        }],
        "globals": {
            "lazyErrors": True
        }
    }

    epf = ExecProviderFactory()
    epf.make(config)
Beispiel #5
0
    def __init__(self,
                 config=None,
                 executors=None,
                 lazyErrors=True,
                 appCache=True,
                 rundir=None,
                 retries=0,
                 checkpointFiles=None,
                 checkpointMode=None,
                 data_manager=None):
        """ Initialize the DataFlowKernel.

        Please note that keyword args passed to the DFK here will always override
        options passed in via the config.

        KWargs:
            - config (dict) : A single data object encapsulating all config attributes
            - executors (list of Executor objs): Optional, kept for (somewhat) backward compatibility with 0.2.0
            - lazyErrors(bool) : Default=True, allow workflow to continue on app failures.
            - appCache (bool) :Enable caching of apps
            - rundir (str) : Path to run directory. Defaults to ./runinfo/runNNN
            - retries(int): Default=0, Set the number of retry attempts in case of failure
            - checkpointFiles (list of str): List of filepaths to checkpoint files
            - checkpointMode (None, 'dfk_exit', 'task_exit', 'periodic'): Method to use.
            - data_manager (DataManager): User created DataManager
        Returns:
            DataFlowKernel object
        """
        # Create run dirs for this run
        self.rundir = make_rundir(config=config, path=rundir)
        parsl.set_file_logger("{}/parsl.log".format(self.rundir),
                              level=logging.DEBUG)

        logger.info("Parsl version: {}".format(parsl.__version__))
        logger.info("Libsubmit version: {}".format(libsubmit.__version__))

        # Update config with defaults
        self._config = update_config(config, self.rundir)

        # Set the data manager
        if data_manager:
            self.data_manager = data_manager
        else:
            self.data_manager = DataManager(config=self._config)

        # Start the anonymized usage tracker and send init msg
        self.usage_tracker = UsageTracker(self)
        self.usage_tracker.send_message()

        # Load checkpoints if any
        cpts = self.load_checkpoints(checkpointFiles)
        # Initialize the memoizer
        self.memoizer = Memoizer(self, memoize=appCache, checkpoint=cpts)
        self.checkpointed_tasks = 0
        self._checkpoint_timer = None

        if self._config:
            self._executors_managed = True
            # Create the executors
            epf = EPF()
            self.executors = epf.make(self.rundir, self._config)

            # set global vars from config
            self.lazy_fail = self._config["globals"].get(
                "lazyErrors", lazyErrors)
            self.fail_retries = self._config["globals"].get("retries", retries)
            self.flowcontrol = FlowControl(self, self._config)
            self.checkpoint_mode = self._config["globals"].get(
                "checkpointMode", checkpointMode)
            if self.checkpoint_mode == "periodic":
                period = self._config["globals"].get("checkpointPeriod",
                                                     "00:30:00")
                try:
                    h, m, s = map(int, period.split(':'))
                    checkpoint_period = (h * 3600) + (m * 60) + s
                    self._checkpoint_timer = Timer(self.checkpoint,
                                                   interval=checkpoint_period)
                except Exception as e:
                    logger.error(
                        "invalid checkpointPeriod provided:{0} expected HH:MM:SS"
                        .format(period))
                    self._checkpoint_timer = Timer(self.checkpoint,
                                                   interval=(30 * 60))

        else:
            self._executors_managed = False
            self.fail_retries = retries
            self.lazy_fail = lazyErrors
            self.executors = {i: x for i, x in enumerate(executors)}
            self.flowcontrol = FlowNoControl(self, None)
            self.checkpoint_mode = checkpointMode

        self.task_count = 0
        self.fut_task_lookup = {}
        self.tasks = {}
        self.task_launch_lock = threading.Lock()

        logger.debug("Using executors: {0}".format(self.executors))
        atexit.register(self.cleanup)
Beispiel #6
0
    def __init__(self,
                 config=None,
                 executors=None,
                 lazyErrors=True,
                 appCache=True,
                 rundir=None,
                 retries=0,
                 checkpointFiles=None):
        """ Initialize the DataFlowKernel

        Please note that keyword args passed to the DFK here will always override
        options passed in via the config.

        KWargs:
            - config (Dict) : A single data object encapsulating all config attributes
            - executors (list of Executor objs): Optional, kept for (somewhat) backward compatibility with 0.2.0
            - lazyErrors(Bool) : Default=True, allow workflow to continue on app failures.
            - appCache (Bool) :Enable caching of apps
            - rundir (str) : Path to run directory. Defaults to ./runinfo/runNNN
            - retries(int): Default=0, Set the number of retry attempts in case of failure
            - checkpointFiles (list of str): List of filepaths to checkpoint files

        Returns:
            DataFlowKernel object
        """
        # Create run dirs for this run
        self.rundir = make_rundir(config=config, path=rundir)
        parsl.set_file_logger("{}/parsl.log".format(self.rundir),
                              level=logging.INFO)

        logger.info("Parsl version: {}".format(parsl.__version__))
        logger.info("Libsubmit version: {}".format(libsubmit.__version__))

        # Update config with defaults
        self._config = update_config(config, self.rundir)

        # Start the anonymized usage tracker and send init msg
        self.usage_tracker = UsageTracker(self)
        self.usage_tracker.send_message()

        # Load checkpoints if any
        cpts = self.load_checkpoints(checkpointFiles)
        # Initialize the memoizer
        self.memoizer = Memoizer(self, memoize=appCache, checkpoint=cpts)

        if self._config:
            self._executors_managed = True
            # Create the executors
            epf = EPF()
            self.executors = epf.make(self.rundir, self._config)

            # set global vars from config
            self.lazy_fail = self._config["globals"].get(
                "lazyErrors", lazyErrors)
            self.fail_retries = self._config["globals"].get("retries", retries)
            self.flowcontrol = FlowControl(self, self._config)
        else:
            self._executors_managed = False
            self.fail_retries = retries
            self.lazy_fail = lazyErrors
            self.executors = {i: x for i, x in enumerate(executors)}
            self.flowcontrol = FlowNoControl(self, None)

        self.task_count = 0
        self.fut_task_lookup = {}
        self.tasks = {}
        self.task_launch_lock = threading.Lock()

        logger.debug("Using executors: {0}".format(self.executors))
        atexit.register(self.cleanup)
Beispiel #7
0
                "walltime": "00:10:00",
                "account": "pi-chard",
                "submit_script_dir": ".scripts"
                }
           }}


import subprocess
import random
print("starting ipcontroller")

rand_port = random.randint(55000, 59000)
p = subprocess.Popen(['ipcontroller', '--reuse',
                      '--port={0}'.format(rand_port), '--ip=*'], stdout=None, stderr=None)

epf = ExecProviderFactory()
resource = epf.make(config)
dfk = DataFlowKernel(resource)


# Helper function to create an input file from a template
def create_template(template_name, output_name, contents):
    fs_loader = FileSystemLoader('templates')
    env = Environment(loader=fs_loader)
    template = env.get_template(template_name)
    packmol_file = open(output_name, 'w')
    packmol_file.write(template.render(contents))
    packmol_file.close()


@App('bash', dfk)