Exemplo n.º 1
0
def job_resource_create_and_setup_logs(job_root_dir, bg, task_opts, workflow_level_opts, ep_d):
    """
    Create job resource dirs and setup log handlers

    :type job_root_dir: str
    :type bg: BindingsGraph
    :type task_opts: dict
    :type workflow_level_opts: WorkflowLevelOptions
    :type ep_d: dict
    """

    job_resources = to_job_resources_and_create_dirs(job_root_dir)

    pb_log_path = os.path.join(job_resources.logs, 'pbsmrtpipe.log')
    master_log_path = os.path.join(job_resources.logs, "master.log")
    master_log_level = logging.INFO
    stdout_level = logging.INFO
    if workflow_level_opts.debug_mode:
        master_log_level = logging.DEBUG
        stdout_level = logging.DEBUG

    setup_internal_logs(master_log_path, master_log_level, pb_log_path, stdout_level)

    log.info("Starting pbsmrtpipe v{v}".format(v=pbsmrtpipe.get_version()))
    log.info("\n" + _log_pbsmrptipe_header())

    BU.write_binding_graph_images(bg, job_resources.workflow)

    write_entry_points_json(job_resources.entry_points_json, ep_d)

    # Need to map entry points to a FileType and store in the DataStore? or
    # does DataStore only represent outputs?
    smrtpipe_log_df = DataStoreFile(str(uuid.uuid4()), "pbsmrtpipe::pbsmrtpipe.log", FileTypes.LOG.file_type_id, pb_log_path)
    master_log_df = DataStoreFile(str(uuid.uuid4()), "pbsmrtpipe::master.log", FileTypes.LOG.file_type_id, master_log_path)
    ds = write_and_initialize_data_store_json(job_resources.datastore_json, [smrtpipe_log_df, master_log_df])
    slog.info("successfully initialized datastore.")

    write_workflow_settings(workflow_level_opts, os.path.join(job_resources.workflow, 'options-workflow.json'))
    log.info("Workflow Options:")
    log.info(pprint.pformat(workflow_level_opts.to_dict(), indent=4))

    task_opts_path = os.path.join(job_resources.workflow, 'options-task.json')
    with open(task_opts_path, 'w') as f:
        f.write(json.dumps(task_opts, sort_keys=True, indent=4))

    env_path = os.path.join(job_resources.workflow, '.env.json')
    IO.write_env_to_json(env_path)

    try:
        sa_system, sa_components = IO.get_smrtanalysis_system_and_components_from_env()
        log.info(sa_system)
        for c in sa_components:
            log.info(c)
    except Exception:
        # black hole exception
        log.warn("unable to determine SMRT Analysis version.")
        pass

    slog.info("completed setting up job directory resources and logs in {r}".format(r=job_root_dir))
    return job_resources, ds
Exemplo n.º 2
0
def job_resource_create_and_setup_logs(job_root_dir, bg, task_opts, workflow_level_opts, ep_d):
    """
    Create job resource dirs and setup log handlers

    :type job_root_dir: str
    :type bg: BindingsGraph
    :type task_opts: dict
    :type workflow_level_opts: WorkflowLevelOptions
    :type ep_d: dict
    """

    job_resources = to_job_resources_and_create_dirs(job_root_dir)

    pb_log_path = os.path.join(job_resources.logs, 'pbsmrtpipe.log')
    master_log_path = os.path.join(job_resources.logs, "master.log")
    master_log_level = logging.INFO
    stdout_level = logging.INFO
    if workflow_level_opts.debug_mode:
        master_log_level = logging.DEBUG
        stdout_level = logging.DEBUG

    setup_internal_logs(master_log_path, master_log_level, pb_log_path, stdout_level)

    log.info("Starting pbsmrtpipe v{v}".format(v=pbsmrtpipe.get_version()))
    log.info("\n" + _log_pbsmrptipe_header())

    BU.write_binding_graph_images(bg, job_resources.workflow)

    write_entry_points_json(job_resources.entry_points_json, ep_d)

    # Need to map entry points to a FileType
    ds = write_and_initialize_data_store_json(job_resources.datastore_json, [])
    slog.info("successfully initialized datastore.")

    write_workflow_settings(workflow_level_opts, os.path.join(job_resources.workflow, 'options-workflow.json'))
    log.info("Workflow Options:")
    log.info(pprint.pformat(workflow_level_opts.to_dict(), indent=4))

    task_opts_path = os.path.join(job_resources.workflow, 'options-task.json')
    with open(task_opts_path, 'w') as f:
        f.write(json.dumps(task_opts, sort_keys=True, indent=4))

    env_path = os.path.join(job_resources.workflow, '.env.json')
    IO.write_env_to_json(env_path)

    try:
        sa_system, sa_components = IO.get_smrtanalysis_system_and_components_from_env()
        log.info(sa_system)
        for c in sa_components:
            log.info(c)
    except Exception:
        # black hole exception
        log.warn("unable to determine SMRT Analysis version.")
        pass

    slog.info("completed setting up job directory resources and logs in {r}".format(r=job_root_dir))
    return job_resources, ds
Exemplo n.º 3
0
def job_resource_create_and_setup_logs(job_root_dir, bg, task_opts,
                                       workflow_level_opts, ep_d):
    """
    Create job resource dirs and setup log handlers

    :type job_root_dir: str
    :type bg: BindingsGraph
    :type task_opts: dict
    :type workflow_level_opts: WorkflowLevelOptions
    :type ep_d: dict
    """

    job_resources = to_job_resources_and_create_dirs(job_root_dir)

    pb_log_path = os.path.join(job_resources.logs, 'pbsmrtpipe.log')
    master_log_path = os.path.join(job_resources.logs, "master.log")
    master_log_level = logging.INFO
    stdout_level = logging.INFO
    if workflow_level_opts.debug_mode:
        master_log_level = logging.DEBUG
        stdout_level = logging.DEBUG

    setup_internal_logs(master_log_path, master_log_level, pb_log_path,
                        stdout_level)

    log.info("Starting pbsmrtpipe v{v}".format(v=pbsmrtpipe.get_version()))
    log.info("\n" + _log_pbsmrptipe_header())

    BU.write_binding_graph_images(bg, job_resources.workflow)

    write_entry_points_json(job_resources.entry_points_json, ep_d)

    # Need to map entry points to a FileType and store in the DataStore? or
    # does DataStore only represent outputs?
    smrtpipe_log_df = DataStoreFile(str(uuid.uuid4()),
                                    "pbsmrtpipe::pbsmrtpipe.log",
                                    FileTypes.LOG.file_type_id,
                                    pb_log_path,
                                    name="Analysis Log",
                                    description="pbsmrtpipe log")
    master_log_df = DataStoreFile(str(uuid.uuid4()),
                                  "pbsmrtpipe::master.log",
                                  FileTypes.LOG.file_type_id,
                                  master_log_path,
                                  name="Master Log",
                                  description="Master log")
    ds = write_and_initialize_data_store_json(job_resources.datastore_json,
                                              [smrtpipe_log_df, master_log_df])
    slog.info("successfully initialized datastore.")

    write_workflow_settings(
        workflow_level_opts,
        os.path.join(job_resources.workflow, 'options-workflow.json'))
    if workflow_level_opts.system_message is not None:
        slog.info("Command: {m}".format(m=workflow_level_opts.system_message))

    slog.info("Entry Points:")
    slog.info("\n" + pprint.pformat(ep_d, indent=4))

    slog.info("Workflow Options:")
    slog.info("\n" + pprint.pformat(workflow_level_opts.to_dict(), indent=4))

    slog.info("Task Options:")
    slog.info("\n" + pprint.pformat(task_opts, indent=4))

    task_opts_path = os.path.join(job_resources.workflow, 'options-task.json')
    with open(task_opts_path, 'w') as f:
        f.write(json.dumps(task_opts, sort_keys=True, indent=4))

    env_path = os.path.join(job_resources.workflow, '.env.json')
    IO.write_env_to_json(env_path)
    log.info("wrote current env to {e}".format(e=env_path))

    try:
        sa_system, sa_components = IO.get_smrtanalysis_system_and_components_from_env(
        )
        log.info(sa_system)
        for c in sa_components:
            log.info(c)
    except Exception:
        # black hole exception
        log.warn("unable to determine SMRT Analysis version.")
        pass

    slog.info(
        "completed setting up job directory resources and logs in {r}".format(
            r=job_root_dir))
    return job_resources, ds, master_log_df
Exemplo n.º 4
0
def job_resource_create_and_setup_logs(job_root_dir, bg, task_opts,
                                       workflow_level_opts, ep_d):
    """
    Create job resource dirs and setup log handlers

    :type job_root_dir: str
    :type bg: BindingsGraph
    :type task_opts: dict
    :type workflow_level_opts: WorkflowLevelOptions
    :type ep_d: dict
    """

    job_resources = to_job_resources_and_create_dirs(job_root_dir)

    pb_log_path = os.path.join(job_resources.logs, 'pbsmrtpipe.log')
    master_log_path = os.path.join(job_resources.logs, "master.log")
    master_log_level = logging.INFO
    stdout_level = logging.INFO
    if workflow_level_opts.debug_mode:
        master_log_level = logging.DEBUG
        stdout_level = logging.DEBUG

    setup_internal_logs(master_log_path, master_log_level, pb_log_path,
                        stdout_level)

    log.info("Starting pbsmrtpipe {v}".format(v=pbsmrtpipe.get_version()))
    log.info("\n" + _log_pbsmrptipe_header())

    BU.write_binding_graph_images(bg, job_resources.workflow)

    write_entry_points_json(job_resources.entry_points_json, ep_d)

    # Need to map entry points to a FileType and store in the DataStore? or
    # does DataStore only represent outputs?

    # For historical reasons, this is a bit non-obvious. The "master" log is now at the
    # the SMRT Link level, so we've promoted the pbsmrtpipe "master" log (i.e., master.log) to the
    # be Analysis Details Log using the pbsmrtpipe::pbsmrtpipe.log source Id. There's also this friction point
    # of marketing using "Analysis" vs "pbsmrtpipe which has generated some inconsistency.
    smrtpipe_log_df = DataStoreFile(str(uuid.uuid4()),
                                    GlobalConstants.SOURCE_ID_INFO_LOG,
                                    FileTypes.LOG.file_type_id,
                                    pb_log_path,
                                    name="Analysis Log",
                                    description="pbsmrtpipe INFO log")
    master_log_df = DataStoreFile(str(uuid.uuid4()),
                                  GlobalConstants.SOURCE_ID_MASTER_LOG,
                                  FileTypes.LOG.file_type_id,
                                  master_log_path,
                                  name="Analysis Details Log",
                                  description="Analysis Details log")

    ds = write_and_initialize_data_store_json(job_resources.datastore_json,
                                              [smrtpipe_log_df, master_log_df])
    slog.info("successfully initialized datastore.")

    write_workflow_settings(
        workflow_level_opts,
        os.path.join(job_resources.workflow, 'options-workflow.json'))
    if workflow_level_opts.system_message is not None:
        slog.info("Command: {m}".format(m=workflow_level_opts.system_message))

    slog.info("Entry Points:")
    slog.info("\n" + pprint.pformat(ep_d, indent=4))

    slog.info("Workflow Options:")
    slog.info("\n" + pprint.pformat(workflow_level_opts.to_dict(), indent=4))

    slog.info("Task Options:")
    slog.info("\n" + pprint.pformat(task_opts, indent=4))

    task_opts_path = os.path.join(job_resources.workflow, 'options-task.json')
    with open(task_opts_path, 'w') as f:
        f.write(json.dumps(task_opts, sort_keys=True, indent=4))

    env_path = os.path.join(job_resources.workflow, '.env.json')
    IO.write_env_to_json(env_path)
    log.info("wrote current env to {e}".format(e=env_path))

    slog.info(
        "completed setting up job directory resources and logs in {r}".format(
            r=job_root_dir))
    return job_resources, ds, master_log_df