def grid_only_execute(pipeline, options): if options.execution.queue_type != 'pbs': raise ValueError("currently we only support submitting the server to PBS/Torque systems") roq = runOnQueueingSystem(options, sys.argv) roq.createAndSubmitPbsScripts() # TODO: make the local server create the directories (first time only) OR create them before submitting OR submit a separate stage? # NOTE we can't add a stage to the pipeline at this point since the pipeline doesn't support any sort of incremental recomputation ... logger.info("Finished submitting PBS job scripts...quitting")
def main(): # command line option handling # use an environment variable to look for a default config file # Alternately, we could use a default location for the file # (say `files = ['/etc/pydpiper.cfg', '~/pydpiper.cfg', './pydpiper.cfg']`) # TODO this logic is duplicated in application.py #if "PYDPIPER_CONFIG_FILE" in os.environ: default_config_file = os.getenv("PYDPIPER_CONFIG_FILE") if default_config_file is not None: try: with open(PYDPIPER_CONFIG_FILE): pass except: warnings.warn(f"PYDPIPER_CONFIG_FILE is set to '{default_config_file}', which can't be opened.") if default_config_file is not None: files = [default_config_file] else: files = [] from pydpiper.core.arguments import _mk_execution_parser parser = ArgParser(default_config_files=files) _mk_execution_parser(parser) # using parse_known_args instead of parse_args is a hack since we # currently send ALL arguments from the main program to the executor. # Alternately, we could keep a copy of the executor parser around # when constructing the executor shell command options, _ = parser.parse_known_args() ensure_exec_specified(options.num_exec) def local_launch(options): pe = pipelineExecutor(options=options, uri_file=options.urifile, pipeline_name="anon-executor") # didn't parse application options so don't have a --pipeline-name # FIXME - I doubt missing the other options even works, otherwise we could change the executor interface!! # executors don't use any shared-memory constructs, so OK to copy ps = [Process(target=launchExecutor, args=(pe,)) for _ in range(options.num_exec)] for p in ps: p.start() for p in ps: p.join() if options.local: local_launch(options) elif options.submit_server: roq = q.runOnQueueingSystem(options, sysArgs=sys.argv) for i in range(options.num_exec): roq.createAndSubmitExecutorJobFile(i, after=None, time=q.timestr_to_secs(options.time)) elif options.queue_type is not None: for i in range(options.num_exec): pe = pipelineExecutor(options=options, uri_file=options.urifile, pipeline_name="anon-executor") pe.submitToQueue(1) # TODO is there a reason why we have logic for submitting `i` executors again here? else: local_launch(options)
def main(): # command line option handling # use an environment variable to look for a default config file # Alternately, we could use a default location for the file # (say `files = ['/etc/pydpiper.cfg', '~/pydpiper.cfg', './pydpiper.cfg']`) # TODO this logic is duplicated in application.py #if "PYDPIPER_CONFIG_FILE" in os.environ: default_config_file = os.getenv("PYDPIPER_CONFIG_FILE") if default_config_file is not None: try: with open(PYDPIPER_CONFIG_FILE): pass except: warnings.warn( f"PYDPIPER_CONFIG_FILE is set to '{default_config_file}', which can't be opened." ) if default_config_file is not None: files = [default_config_file] else: files = [] from pydpiper.core.arguments import _mk_execution_parser parser = ArgParser(default_config_files=files) _mk_execution_parser(parser) # using parse_known_args instead of parse_args is a hack since we # currently send ALL arguments from the main program to the executor. # Alternately, we could keep a copy of the executor parser around # when constructing the executor shell command options, _ = parser.parse_known_args() ensure_exec_specified(options.num_exec) def local_launch(options): pe = pipelineExecutor( options=options, uri_file=options.urifile, pipeline_name="anon-executor" ) # didn't parse application options so don't have a --pipeline-name # FIXME - I doubt missing the other options even works, otherwise we could change the executor interface!! # executors don't use any shared-memory constructs, so OK to copy ps = [ Process(target=launchExecutor, args=(pe, )) for _ in range(options.num_exec) ] for p in ps: p.start() for p in ps: p.join() if options.local: local_launch(options) elif options.submit_server: roq = q.runOnQueueingSystem(options, sysArgs=sys.argv) for i in range(options.num_exec): roq.createAndSubmitExecutorJobFile(i, after=None, time=q.timestr_to_secs( options.time)) elif options.queue_type is not None: for i in range(options.num_exec): pe = pipelineExecutor(options=options, uri_file=options.urifile, pipeline_name="anon-executor") pe.submitToQueue( 1 ) # TODO is there a reason why we have logic for submitting `i` executors again here? else: local_launch(options)