Exemple #1
0
def runfile(script=None,
            raw_args='',
            wdir='.',
            code=None,
            kernel=None,
            **kwargs):
    # this has something to do with Prefix matching rule of parse_known_args
    #
    # That is to say
    #
    #   --rep 3
    #
    # would be parsed as
    #
    #   args.workflow=3, unknown --rep
    #
    # instead of
    #
    #   args.workflow=None, unknown --rep 3
    #
    # we then have to change the parse to disable args.workflow when
    # there is no workflow option.
    raw_args = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args
    if (script is None and code is None) or '-h' in raw_args:
        parser = get_run_parser(interactive=True, with_workflow=True)
        parser.print_help()
        return
    if raw_args and raw_args[0].lstrip().startswith('-'):
        parser = get_run_parser(interactive=True, with_workflow=False)
        parser.error = _parse_error
        args, workflow_args = parser.parse_known_args(raw_args)
        args.workflow = None
    else:
        parser = get_run_parser(interactive=True, with_workflow=True)
        parser.error = _parse_error
        args, workflow_args = parser.parse_known_args(raw_args)

    # for reporting purpose
    sys.argv = ['%run'] + raw_args

    env.verbosity = args.verbosity
    if kernel and not isinstance(env.logger.handlers[0],
                                 NotebookLoggingHandler):
        env.logger.handlers = []
        levels = {
            0: logging.ERROR,
            1: logging.WARNING,
            2: logging.INFO,
            3: logging.DEBUG,
            4: logging.TRACE,
            None: logging.INFO
        }
        env.logger.addHandler(
            NotebookLoggingHandler(levels[env.verbosity],
                                   kernel,
                                   title=' '.join(sys.argv)))
    else:
        env.logger.handers[0].setTitle(' '.join(sys.argv))

    dt = datetime.datetime.now().strftime('%m%d%y_%H%M')
    if args.__dag__ is None:
        args.__dag__ = f'workflow_{dt}.dot'
    elif args.__dag__ == '':
        args.__dag__ = None

    if args.__report__ is None:
        args.__report__ = f'workflow_{dt}.html'
    elif args.__report__ == '':
        args.__report__ = None

    if args.__remote__:
        from sos.utils import load_config_files
        cfg = load_config_files(args.__config__)
        env.sos_dict.set('CONFIG', cfg)

        # if executing on a remote host...
        from sos.hosts import Host
        host = Host(args.__remote__)
        #
        if script is None:
            if not code.strip():
                return
            script = os.path.join('.sos', '__interactive__.sos')
            with open(script, 'w') as s:
                s.write(code)

        # copy script to remote host...
        host.send_to_host(script)
        from sos.utils import remove_arg
        argv = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args
        argv = remove_arg(argv, '-r')
        argv = remove_arg(argv, '-c')
        # execute the command on remote host
        try:
            with kernel.redirect_sos_io():
                ret = host._host_agent.run_command(['sos', 'run', script] +
                                                   argv,
                                                   wait_for_task=True,
                                                   realtime=True)
            if ret:
                kernel.send_response(
                    kernel.iopub_socket, 'stream',
                    dict(name='stderr',
                         text=
                         f'remote execution of workflow exited with code {ret}'
                         ))
        except Exception as e:
            if kernel:
                kernel.send_response(kernel.iopub_socket, 'stream', {
                    'name': 'stdout',
                    'text': str(e)
                })
        return

    if args.__bin_dirs__:
        for d in args.__bin_dirs__:
            if d == '~/.sos/bin' and not os.path.isdir(os.path.expanduser(d)):
                os.makedirs(os.path.expanduser(d), exist_ok=True)
        os.environ['PATH'] = os.pathsep.join(
            [os.path.expanduser(x)
             for x in args.__bin_dirs__]) + os.pathsep + os.environ['PATH']

    # clear __step_input__, __step_output__ etc because there is
    # no concept of passing input/outputs across cells.
    env.sos_dict.set('__step_output__', sos_targets([]))
    for k in [
            '__step_input__', '__default_output__', 'step_input',
            'step_output', 'step_depends', '_input', '_output', '_depends'
    ]:
        env.sos_dict.pop(k, None)

    try:
        if script is None:
            if not code.strip():
                return
            if kernel is None:
                script = SoS_Script(content=code)
            else:
                if kernel._workflow_mode:
                    # in workflow mode, the content is sent by magics %run and %sosrun
                    script = SoS_Script(content=code)
                else:
                    # this is a scratch step...
                    # if there is no section header, add a header so that the block
                    # appears to be a SoS script with one section
                    if not any([
                            SOS_SECTION_HEADER.match(line)
                            or line.startswith('%from')
                            or line.startswith('%include')
                            for line in code.splitlines()
                    ]):
                        code = '[scratch_0]\n' + code
                        script = SoS_Script(content=code)
                    else:
                        #kernel.send_frontend_msg('stream',
                        #                         {'name': 'stdout', 'text': 'Workflow cell can only be executed with magic %run or %sosrun.'},
                        #                         title='# SoS warning')
                        return
        else:
            script = SoS_Script(filename=script)
        workflow = script.workflow(args.workflow,
                                   use_default=not args.__targets__)
        env.config: DefaultDict[str, Union[None, bool, str]] = defaultdict(str)
        executor = Interactive_Executor(
            workflow,
            args=workflow_args,
            config={
                'config_file':
                args.__config__,
                'output_dag':
                args.__dag__,
                'output_report':
                args.__report__,
                'sig_mode':
                'ignore' if args.dryrun else args.__sig_mode__,
                'default_queue':
                '' if args.__queue__ is None else args.__queue__,
                'wait_for_task':
                True if args.__wait__ is True or args.dryrun else
                (False if args.__no_wait__ else None),
                'resume_mode':
                kernel is not None and kernel._resume_execution,
                'run_mode':
                'dryrun' if args.dryrun else 'interactive',
                'verbosity':
                args.verbosity,

                # wait if -w or in dryrun mode, not wait if -W, otherwise use queue default
                'max_procs':
                args.__max_procs__,
                'max_running_jobs':
                args.__max_running_jobs__,
                # for infomration and resume only
                'workdir':
                os.getcwd(),
                'script':
                "interactive",
                'workflow':
                args.workflow,
                'targets':
                args.__targets__,
                'bin_dirs':
                args.__bin_dirs__,
                'workflow_args':
                workflow_args
            })
        return executor.run(args.__targets__)['__last_res__']
    except PendingTasks:
        raise
    except SystemExit:
        # this happens because the executor is in resume mode but nothing
        # needs to be resumed, we simply pass
        return
    except Exception:
        if args.verbosity and args.verbosity > 2:
            sys.stderr.write(get_traceback())
        raise
    finally:
        env.config['sig_mode'] = 'ignore'
        env.verbosity = 2
Exemple #2
0
def runfile(script=None,
            raw_args='',
            wdir='.',
            code=None,
            kernel=None,
            **kwargs):
    # this has something to do with Prefix matching rule of parse_known_args
    #
    # That is to say
    #
    #   --rep 3
    #
    # would be parsed as
    #
    #   args.workflow=3, unknown --rep
    #
    # instead of
    #
    #   args.workflow=None, unknown --rep 3
    #
    # we then have to change the parse to disable args.workflow when
    # there is no workflow option.
    args = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args
    if (script is None and code is None) or '-h' in args:
        parser = get_run_parser(interactive=True, with_workflow=True)
        parser.print_help()
        return
    if args and args[0].lstrip().startswith('-'):
        parser = get_run_parser(interactive=True, with_workflow=False)
        parser.error = _parse_error
        args, workflow_args = parser.parse_known_args(args)
        args.workflow = None
    else:
        parser = get_run_parser(interactive=True, with_workflow=True)
        parser.error = _parse_error
        args, workflow_args = parser.parse_known_args(args)

    # no multi-processing in interactive mode
    env.max_jobs = 1
    env.verbosity = args.verbosity

    if args.__queue__ == '':
        from sos.hosts import list_queues
        list_queues(args.__config__, args.verbosity)
        return

    if args.__remote__:
        from sos.utils import load_config_files
        cfg = load_config_files(args.__config__)
        env.sos_dict.set('CONFIG', cfg)
        if args.__remote__ == '':
            from .hosts import list_queues
            list_queues(cfg, args.verbosity)
            return

        # if executing on a remote host...
        from sos.hosts import Host
        host = Host(args.__remote__)
        #
        if script is None:
            if not code.strip():
                return
            script = os.path.join('.sos', '__interactive__.sos')
            with open(script, 'w') as s:
                s.write(code)

        # copy script to remote host...
        host.send_to_host(script)
        from sos.utils import remove_arg
        argv = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args
        argv = remove_arg(argv, '-r')
        argv = remove_arg(argv, '-c')
        # execute the command on remote host
        try:
            with kernel.redirect_sos_io():
                ret = host._host_agent.run_command(['sos', 'run', script] +
                                                   argv,
                                                   wait_for_task=True,
                                                   realtime=True)
            if ret:
                kernel.send_response(
                    kernel.iopub_socket, 'stream',
                    dict(name='stderr',
                         text=
                         f'remote execution of workflow exited with code {ret}'
                         ))
        except Exception as e:
            if kernel:
                kernel.send_response(kernel.iopub_socket, 'stream', {
                    'name': 'stdout',
                    'text': str(e)
                })
        return

    if args.__bin_dirs__:
        import fasteners
        for d in args.__bin_dirs__:
            if d == '~/.sos/bin' and not os.path.isdir(os.path.expanduser(d)):
                with fasteners.InterProcessLock(
                        os.path.join(tempfile.gettempdir(), 'sos_lock_bin')):
                    os.makedirs(os.path.expanduser(d))
            elif not os.path.isdir(os.path.expanduser(d)):
                raise ValueError(f'directory does not exist: {d}')
        os.environ['PATH'] = os.pathsep.join(
            [os.path.expanduser(x)
             for x in args.__bin_dirs__]) + os.pathsep + os.environ['PATH']

    # clear __step_input__, __step_output__ etc because there is
    # no concept of passing input/outputs across cells.
    env.sos_dict.set('__step_output__', [])
    for k in ['__step_input__', '__default_output__', 'input', 'output', \
        'depends', '_input', '_output', '_depends']:
        env.sos_dict.pop(k, None)

    try:
        if script is None:
            if not code.strip():
                return
            if kernel is None:
                script = SoS_Script(content=code)
            else:
                if kernel._workflow_mode:
                    # in workflow mode, the content is sent by magics %run and %sosrun
                    script = SoS_Script(content=code)
                else:
                    # this is a scratch step...
                    # if there is no section header, add a header so that the block
                    # appears to be a SoS script with one section
                    if not any([
                            SOS_SECTION_HEADER.match(line)
                            or line.startswith('%from')
                            or line.startswith('%include')
                            for line in code.splitlines()
                    ]):
                        code = '[scratch_0]\n' + code
                        script = SoS_Script(content=code)
                    else:
                        if kernel.cell_idx == -1:
                            kernel.send_frontend_msg(
                                'stream', {
                                    'name':
                                    'stdout',
                                    'text':
                                    'Workflow can only be executed with magic %run or %sosrun.'
                                })
                        return
        else:
            script = SoS_Script(filename=script)
        workflow = script.workflow(args.workflow)
        executor = Interactive_Executor(
            workflow,
            args=workflow_args,
            config={
                'config_file':
                args.__config__,
                'output_dag':
                args.__dag__,
                'sig_mode':
                args.__sig_mode__,
                'default_queue':
                '' if args.__queue__ is None else args.__queue__,
                'wait_for_task':
                True if args.__wait__ is True or args.dryrun else
                (False if args.__no_wait__ else None),
                'resume_mode':
                kernel is not None and kernel._resume_execution,
                'run_mode':
                'dryrun' if args.dryrun else 'interactive',
                'verbosity':
                args.verbosity,

                # wait if -w or in dryrun mode, not wait if -W, otherwise use queue default
                'max_procs':
                1,
                'max_running_jobs':
                args.__max_running_jobs__,
                # for infomration and resume only
                'workdir':
                os.getcwd(),
                'script':
                "interactive",
                'workflow':
                args.workflow,
                'targets':
                args.__targets__,
                'bin_dirs':
                args.__bin_dirs__,
                'workflow_args':
                workflow_args
            })
        return executor.run(args.__targets__)
    except PendingTasks:
        raise
    except SystemExit:
        # this happens because the executor is in resume mode but nothing
        # needs to be resumed, we simply pass
        return
    except Exception:
        if args.verbosity and args.verbosity > 2:
            sys.stderr.write(get_traceback())
        raise
    finally:
        env.config['sig_mode'] = 'ignore'
        env.verbosity = 2