def __init__(self, config, channel_script_dir=None, channel=None): ''' Initialize the local provider class This provider is unique because the `LocalChannel` is simple enough that a default can be provided. For this reason users can pass `channel=None`, and a default `LocalChannel` will be created. Args: - Config (dict): Dictionary with all the config options. - channel_script_dir (str): Script directory which will be passed to the default `LocalChannel` (this will have no effect if a `channel` is not None) - channel (Channel): Channel to use; if none is provided, a default one will be created ''' if channel is None: if channel_script_dir is None: self.channel = LocalChannel() else: self.channel = LocalChannel(scriptDir=channel_script_dir) else: self.channel = channel self.config = config self.sitename = config['site'] self.current_blocksize = 0 self.scriptDir = self.config["execution"]["scriptDir"] self.taskBlocks = self.config["execution"]["block"].get("taskBlocks", 1) launcher_name = self.config["execution"]["block"].get("launcher", "singleNode") self.launcher = Launchers.get(launcher_name, None) # Dictionary that keeps track of jobs, keyed on job_id self.resources = {}
def test_local(): channel = LocalChannel(None, None) ec, out, err = channel.execute_wait('echo "pwd: $PWD"', 2) assert ec == 0, "Channel execute failed" print("Stdout: ", out) print("Stderr: ", err)
def test_env(): ''' Regression testing for issue #27 ''' lc = LocalChannel() rc, stdout, stderr = lc.execute_wait("env", 1) stdout = stdout.split('\n') x = [l for l in stdout if l.startswith("PATH=")] assert x, "PATH not found" x = [l for l in stdout if l.startswith("HOME=")] assert x, "HOME not found" print("RC:{} \nSTDOUT:{} \nSTDERR:{}".format(rc, stdout, stderr))
def test_env_mod(): ''' Testing for env update at execute time. ''' lc = LocalChannel() rc, stdout, stderr = lc.execute_wait("env", 1, {'TEST_ENV': 'fooo'}) stdout = stdout.split('\n') x = [l for l in stdout if l.startswith("PATH=")] assert x, "PATH not found" x = [l for l in stdout if l.startswith("HOME=")] assert x, "HOME not found" x = [l for l in stdout if l.startswith("TEST_ENV=fooo")] assert x, "User set env missing"
def __init__(self, channel=LocalChannel(), label='local', script_dir='parsl_scripts', tasks_per_node=1, nodes_per_block=1, launcher='single_node', init_blocks=4, min_blocks=0, max_blocks=10, walltime="00:15:00", parallelism=1): self.channel = channel self.label = label if not os.path.exists(script_dir): os.makedirs(script_dir) self.script_dir = script_dir self.provisioned_blocks = 0 self.nodes_per_block = nodes_per_block self.tasks_per_node = tasks_per_node self.launcher = launcher self.init_blocks = init_blocks self.min_blocks = min_blocks self.max_blocks = max_blocks self.parallelism = parallelism self.walltime = walltime # Dictionary that keeps track of jobs, keyed on job_id self.resources = {}
def __init__(self, partition, label='slurm', channel=LocalChannel(), script_dir='parsl_scripts', nodes_per_block=1, tasks_per_node=1, init_blocks=1, min_blocks=0, max_blocks=10, parallelism=1, walltime="00:10:00", overrides='', launcher='single_node'): super().__init__(label, channel, script_dir, nodes_per_block, tasks_per_node, init_blocks, min_blocks, max_blocks, parallelism, walltime, launcher) self.partition = partition self.overrides = overrides
""" ================== Block | ++++++++++++++ | Node | | | | | | Task | | . . . | | | | | ++++++++++++++ | ================== """ from libsubmit.channels.local.local import LocalChannel from libsubmit.providers.grid_engine.grid_engine import GridEngine from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.tests.user_opts import user_opts from parsl.tests.utils import get_rundir config = Config(executors=[ IPyParallelExecutor( label='cc_in2p3_local_single_node', provider=GridEngine( channel=LocalChannel( script_dir=user_opts['cc_in2p3']['script_dir']), nodes_per_block=1, tasks_per_node=1, init_blocks=1, max_blocks=1, overrides=user_opts['cc_in2p3']['overrides'], )) ], run_dir=get_rundir())
class Local(ExecutionProvider): ''' Local Execution Provider This provider is used to launch IPP engines on the localhost. .. warning:: Please note that in the config documented below, description and values are placed inside a schema that is delimited by #{ schema.. } Here's the scheme for the Local provider: .. code-block:: python { "execution" : { # Definition of all execution aspects of a site "executor" : #{Description: Define the executor used as task executor, # Type : String, # Expected : "ipp", # Required : True}, "provider" : #{Description : The provider name, in this case local # Type : String, # Expected : "local", # Required : True }, "scriptDir" : #{Description : Relative or absolute path to a # directory in which intermediate scripts are placed # Type : String, # Default : "./.scripts"}, "block" : { # Definition of a block "initBlocks" : #{Description : # of blocks to provision at the start of # the DFK # Type : Integer # Default : ? # Required : }, "minBlocks" : #{Description : Minimum # of blocks outstanding at any time # WARNING :: Not Implemented # Type : Integer # Default : 0 }, "maxBlocks" : #{Description : Maximum # Of blocks outstanding at any time # WARNING :: Not Implemented # Type : Integer # Default : ? }, "taskBlocks": #{Description : workers to launch per request # Type : Integer # Default : 1 }, } } } ''' def __repr__(self): return "<Local Execution Provider for site:{0}>".format(self.sitename) def __init__(self, config, channel_script_dir=None, channel=None): ''' Initialize the local provider class This provider is unique because the `LocalChannel` is simple enough that a default can be provided. For this reason users can pass `channel=None`, and a default `LocalChannel` will be created. Args: - Config (dict): Dictionary with all the config options. - channel_script_dir (str): Script directory which will be passed to the default `LocalChannel` (this will have no effect if a `channel` is not None) - channel (Channel): Channel to use; if none is provided, a default one will be created ''' if channel is None: if channel_script_dir is None: self.channel = LocalChannel() else: self.channel = LocalChannel(scriptDir=channel_script_dir) else: self.channel = channel self.config = config self.sitename = config['site'] self.current_blocksize = 0 self.scriptDir = self.config["execution"]["scriptDir"] self.taskBlocks = self.config["execution"]["block"].get("taskBlocks", 1) launcher_name = self.config["execution"]["block"].get("launcher", "singleNode") self.launcher = Launchers.get(launcher_name, None) # Dictionary that keeps track of jobs, keyed on job_id self.resources = {} @property def channels_required(self): ''' Returns Bool on whether a channel is required ''' return False def status(self, job_ids): ''' Get the status of a list of jobs identified by their ids. Args: - job_ids (List of ids) : List of identifiers for the jobs Returns: - List of status codes. ''' logging.debug("Checking status of : {0}".format(job_ids)) for job_id in self.resources: poll_code = self.resources[job_id]['proc'].poll() if self.resources[job_id]['status'] in ['COMPLETED', 'FAILED']: continue if poll_code is None: self.resources[job_id]['status'] = 'RUNNING' elif poll_code == 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'COMPLETED' elif poll_code < 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'FAILED' return [self.resources[jid]['status'] for jid in job_ids] def _write_submit_script(self, script_string, script_filename): ''' Load the template string with config values and write the generated submit script to a submit script file. Args: - template_string (string) : The template string to be used for the writing submit script - script_filename (string) : Name of the submit script Returns: - True: on success Raises: SchedulerMissingArgs : If template is missing args ScriptPathError : Unable to write submit script out ''' try: with open(script_filename, 'w') as f: f.write(script_string) except KeyError as e: logger.error("Missing keys for submit script : %s", e) raise (ep_error.SchedulerMissingArgs(e.args, self.sitename)) except IOError as e: logger.error("Failed writing to submit script: %s", script_filename) raise (ep_error.ScriptPathError(script_filename, e)) return True def submit(self, cmd_string, blocksize, job_name="parsl.auto"): ''' Submits the cmd_string onto an Local Resource Manager job of blocksize parallel elements. Submit returns an ID that corresponds to the task that was just submitted. If tasks_per_node < 1: 1/tasks_per_node is provisioned If tasks_per_node == 1: A single node is provisioned If tasks_per_node > 1 : tasks_per_node * blocksize number of nodes are provisioned. Args: - cmd_string :(String) Commandline invocation to be made on the remote side. - blocksize :(float) - Not really used for local Kwargs: - job_name (String): Name for job, must be unique Returns: - None: At capacity, cannot provision more - job_id: (string) Identifier for the job ''' job_name = "{0}.{1}".format(job_name, time.time()) # Set script path script_path = "{0}/{1}.sh".format(self.scriptDir, job_name) script_path = os.path.abspath(script_path) wrap_cmd_string = self.launcher(cmd_string, taskBlocks=self.taskBlocks) self._write_submit_script(wrap_cmd_string, script_path) job_id, proc = self.channel.execute_no_wait('bash {0}'.format(script_path), 3) self.resources[job_id] = {'job_id': job_id, 'status': 'RUNNING', 'blocksize': blocksize, 'proc': proc} return job_id def cancel(self, job_ids): ''' Cancels the jobs specified by a list of job ids Args: job_ids : [<job_id> ...] Returns : [True/False...] : If the cancel operation fails the entire list will be False. ''' for job in job_ids: logger.debug("Terminating job/proc_id : {0}".format(job)) # Here we are assuming that for local, the job_ids are the process id's proc = self.resources[job]['proc'] os.killpg(os.getpgid(proc.pid), signal.SIGTERM) self.resources[job]['status'] = 'CANCELLED' rets = [True for i in job_ids] return rets @property def scaling_enabled(self): return True @property def current_capacity(self): return len(self.resources) @property def channels_required(self): return False
from parsl.executors.ipp_controller import Controller from libsubmit.channels.local.local import LocalChannel from libsubmit.providers.condor.condor import Condor from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.tests.user_opts import user_opts from parsl.tests.utils import get_rundir config = Config(executors=[ IPyParallelExecutor( label='osg_local_ipp', provider=Condor( channel=LocalChannel(username=user_opts['osg']['username'], script_dir=user_opts['osg']['script_dir']), nodes_per_block=1, tasks_per_node=1, init_blocks=4, max_blocks=4, overrides= 'Requirements = OSGVO_OS_STRING == "RHEL 6" && Arch == "X86_64" && HAS_MODULES == True', worker_setup= 'module load python/3.5.2; python3 -m venv parsl_env; source parsl_env/bin/activate; pip3 install ipyparallel' )) ], controller=Controller(public_ip='192.170.227.195'), run_dir=get_rundir())