コード例 #1
0
ファイル: preview.py プロジェクト: pgcudahy/sos
def preview_dot(filename, kernel=None, style=None):
    data = dot_to_gif(filename,
                      warn=kernel.warn if kernel else env.logger.warning)
    # according to https://github.com/ipython/ipython/issues/10045
    # I have to use 'image/png' instead of 'image/gif' to get the gif displayed.
    return {"image/png": data}
コード例 #2
0
def execute(args, unknown_args):
    if args.to_remove:
        if args.target is None and args.to_remove not in ('obsolete', 'all'):
            raise ValueError("``-d`` must be specified with ``--target``.")
        rm_objects = args.target
        args.target = None
    if args.target:
        env.logger.info("Load command line DSC sequence: ``{}``".\
                        format(' '.join(', '.join(args.target).split())))
    # Import packages
    import platform
    from .utils import workflow2html, dsc2html, transcript2html
    from sos import execute_workflow
    from .dsc_parser import DSC_Script, DSC_Pipeline, remote_config_parser
    from .dsc_translator import DSC_Translator
    # Parse DSC script
    script = DSC_Script(args.dsc_file,
                        output=args.output,
                        sequence=args.target,
                        global_params=unknown_args,
                        truncate=args.truncate,
                        replicate=1 if args.truncate else args.replicate)
    script.init_dsc(env)
    pipeline_obj = DSC_Pipeline(script).pipelines
    # Apply clean-up
    if args.to_remove:
        if args.to_remove == 'all':
            plain_remove(script.runtime.output)
        else:
            remove(pipeline_obj, {
                **script.runtime.concats,
                **script.runtime.groups
            }, rm_objects, script.runtime.output, args.to_remove == 'obsolete')
        return
    db = os.path.basename(script.runtime.output)
    # Archive scripts
    lib_content = [(f"From <code>{k}</code>", sorted(glob.glob(f"{k}/*.*")))
                   for k in script.runtime.options['lib_path'] or []]
    exec_content = [(k, script.modules[k].exe)
                    for k in script.runtime.sequence_ordering]
    dsc2html('\n'.join(script.transcript), script.runtime.output,
             script.runtime.sequence, exec_content, lib_content,
             script.print_help(to_html=True))
    env.logger.info(f"DSC script exported to ``{script.runtime.output}.html``")
    if args.debug:
        workflow2html(f'{DSC_CACHE}/{db}_workflow.html', pipeline_obj,
                      list(script.dump().values()))
    # Resolve executable paths
    # FIXME: always assume args.host is a Linux machine and not checking it
    exec_path = [
        os.path.join(
            k, 'mac' if platform.system() == 'Darwin' and args.host is None
            else 'linux') for k in (script.runtime.options['exec_path'] or [])
    ] + (script.runtime.options['exec_path'] or [])
    exec_path = [x for x in exec_path if os.path.isdir(x)]
    # Generate remote job configuration settings
    if args.host:
        conf = remote_config_parser(args.host, exec_path)
        conf_tpl = {'localhost': 'localhost', 'hosts': conf['DSC']}
    else:
        conf = conf_tpl = dict()
    # Obtain pipeline scripts
    pipeline = DSC_Translator(
        pipeline_obj, script.runtime, args.__construct__ == "none",
        args.__max_jobs__, False, None
        if len(conf) == 0 else {k: v
                                for k, v in conf.items() if k != 'DSC'},
        args.debug and args.verbosity == 0)
    # Generate DSC meta databases
    env.logger.info(f"Constructing DSC from ``{args.dsc_file}`` ...")
    script_prepare = pipeline.get_pipeline("prepare", args.debug)
    settings = {
        'sig_mode': 'default',
        'workflow_vars': {
            '__bin_dirs__': exec_path
        },
        'max_running_jobs': None if args.host else args.__max_jobs__,
        'worker_procs': args.__max_jobs__,
    }
    if args.__construct__ == "none":
        settings['sig_mode'] = "force"
    # Get mapped IO database
    settings['verbosity'] = args.verbosity if args.debug else 0
    status = execute_workflow(script_prepare,
                              workflow='deploy',
                              options=settings)
    env.verbosity = args.verbosity
    if args.__construct__ == "existing":
        settings['sig_mode'] = "build"
    if args.__construct__ == "lenient":
        settings['sig_mode'] = "skip"
    # Get DSC meta database
    env.logger.info("Building DSC database ...")
    status = execute_workflow(script_prepare,
                              workflow='build',
                              options=settings)
    if args.__construct__ == "all":
        return
    # Get the executed pipeline
    pipeline.filter_execution(args.debug)
    script_run = pipeline.get_pipeline("run", args.debug)
    if args.debug:
        if args.host:
            import yaml
            yaml.safe_dump(conf_tpl,
                           open(f'{DSC_CACHE}/{db}_remote_config.yml', 'w'),
                           default_flow_style=False)
        return
    env.logger.debug(f"Running command ``{' '.join(sys.argv)}``")
    env.logger.info(f"Building execution graph & running DSC ...")
    try:
        settings['verbosity'] = args.verbosity if args.host else max(
            0, args.verbosity - 1)
        settings['output_dag'] = f'{db}.dot' if args.__dag__ else None
        status = execute_workflow(script_run,
                                  workflow='DSC',
                                  options=settings,
                                  config=conf_tpl)
        env.verbosity = args.verbosity
    except Exception as e:
        if args.host is None:
            transcript2html('.sos/transcript.txt',
                            f'{db}.scripts.html',
                            title=db)
            env.logger.warning(f"Please examine ``stderr`` files below and/or run commands ``in green`` to reproduce" \
                               "the errors;\nadditional scripts upstream of the error can be found in " \
                               f"``{db}.scripts.html``.\n" + '=' * 75)
        raise Exception(e)
    # Plot DAG
    if args.__dag__:
        from sos.utils import dot_to_gif
        try:
            env.logger.info(
                "Generating DAG animation for benchmark (may take a while; can be disrupted if no longer wanted) ..."
            )
            dag = dot_to_gif(f"{db}.dot")
            with open(f'{db}_DAG.html', 'w') as f:
                f.write(
                    f'<img class="dag-image" src="data:image/png;base64,{dag}">'
                )
            env.logger.info(f"Execution graph saved to ``{db}_DAG.html``")
        except Exception as e:
            env.logger.warning(f'Failed to generate execution graph: {e}')
    env.logger.info("DSC complete!")
コード例 #3
0
def execute(args, unknown_args):
    if args.to_remove:
        if args.target is None and args.to_remove not in ('obsolete', 'all'):
            raise ValueError("``-d`` must be specified with ``--target``.")
        rm_objects = args.target
        args.target = None
    if args.target:
        env.logger.info("Load command line DSC sequence: ``{}``".\
                        format(' '.join(', '.join(args.target).split())))
    # Import packages
    from .utils import workflow2html, transcript2html
    from sos import execute_workflow
    from .dsc_parser import DSC_Script, DSC_Pipeline, remote_config_parser
    from .dsc_translator import DSC_Translator
    # Parse DSC script
    script = DSC_Script(args.dsc_file,
                        output=args.output,
                        sequence=args.target,
                        global_params=unknown_args,
                        truncate=args.truncate,
                        replicate=1 if args.truncate else args.replicate,
                        host=args.host,
                        debug=args.debug)
    script.init_dsc(env)
    pipeline_obj = DSC_Pipeline(script).pipelines
    db = os.path.basename(script.runtime.output)
    # Apply clean-up
    if args.to_remove:
        if args.to_remove == 'all':
            plain_remove(script.runtime.output)
        else:
            remove(pipeline_obj, {
                **script.runtime.concats,
                **script.runtime.groups
            }, rm_objects, script.runtime.output, args.to_remove == 'obsolete')
        return
    # Archive scripts
    script.to_html()
    env.logger.info(f"DSC script exported to ``{script.runtime.output}.html``")
    if args.debug:
        workflow2html(f'{DSC_CACHE}/{db}_workflow.html', pipeline_obj,
                      list(script.dump().values()))
    # Generate remote job configuration settings
    if args.host:
        conf = remote_config_parser(args.host)
        conf_tpl = {'localhost': 'localhost', 'hosts': conf['DSC']}
    else:
        conf = conf_tpl = dict()
    # Obtain pipeline scripts
    pipeline = DSC_Translator(
        pipeline_obj, script.runtime, args.__construct__ == "none",
        args.__max_jobs__, False, None
        if len(conf) == 0 else {k: v
                                for k, v in conf.items() if k != 'DSC'},
        args.debug and args.verbosity == 0)
    # Generate DSC meta databases
    env.logger.info(f"Constructing DSC from ``{args.dsc_file}`` ...")
    script_prepare = pipeline.get_pipeline("prepare", args.debug)
    settings = {
        'sig_mode': 'default',
        'error_mode': 'default',
        'max_running_jobs': None if args.host else args.__max_jobs__,
        'worker_procs': args.__max_jobs__,
    }
    if args.__construct__ == "none":
        settings['sig_mode'] = "force"
    # Get mapped IO database
    settings['verbosity'] = args.verbosity if args.debug else 1
    status = execute_workflow(script_prepare,
                              workflow='deploy',
                              options=settings)
    env.verbosity = args.verbosity
    if args.__construct__ == "existing":
        settings['sig_mode'] = "build"
    if args.__construct__ == "lenient":
        settings['sig_mode'] = "skip"
    # Get DSC meta database
    env.logger.info("Building DSC database ...")
    status = execute_workflow(script_prepare,
                              workflow='build',
                              options=settings)
    if args.__construct__ == "all":
        return
    # Get the executed pipeline
    pipeline.filter_execution(args.debug)
    script_run = pipeline.get_pipeline("run", args.debug)
    if args.debug:
        if args.host:
            import yaml
            yaml.safe_dump(conf_tpl,
                           open(f'{DSC_CACHE}/{db}_remote_config.yml', 'w'),
                           default_flow_style=False)
        return
    env.logger.debug(f"Running command ``{' '.join(sys.argv)}``")
    if os.path.isfile(f'{env.exec_dir}/transcript.txt'):
        os.remove(f'{env.exec_dir}/transcript.txt')
    env.logger.info(f"Building execution graph & running DSC ...")
    # making verbosity level consistent with before SoS version 0.21.2
    verbosity_map = {0: 1, 1: 0, 2: 2 if args.host else 0, 3: 3, 4: 4}
    try:
        if not args.error_mode == 'ignore-safe':
            settings['error_mode'] = args.error_mode
        settings['verbosity'] = verbosity_map[args.verbosity]
        settings['output_dag'] = f'{db}.dot' if args.__dag__ else None
        status = execute_workflow(script_run,
                                  workflow='DSC',
                                  options=settings,
                                  config=conf_tpl)
        env.verbosity = args.verbosity
    except Exception as e:
        if args.host is None:
            transcript2html(f'{env.exec_dir}/transcript.txt',
                            f'{db}.scripts.html',
                            title=db)
            env.logger.error(f"Please examine ``stderr`` files below and/or run commands ``in green`` to reproduce " \
                               "the errors;\nadditional scripts upstream of the error can be found in " \
                               f"``{db}.scripts.html``.\n" + '=' * 75)
        raise Exception(e)
    # Plot DAG
    if args.__dag__:
        from sos.utils import dot_to_gif
        try:
            env.logger.info(
                "Generating DAG animation for benchmark (may take a while; can be disrupted if no longer wanted) ..."
            )
            dag = dot_to_gif(f"{db}.dot")
            with open(f'{db}_DAG.html', 'w') as f:
                f.write(
                    f'<img class="dag-image" src="data:image/png;base64,{dag}">'
                )
            env.logger.info(f"Execution graph saved to ``{db}_DAG.html``")
        except Exception as e:
            env.logger.warning(f'Failed to generate execution graph: {e}')
    env.logger.info("DSC complete!")