Exemple #1
0
def _get_opts():
    oid = OP.to_opt_id('dev.hello_message')
    return {
        oid:
        OP.to_option_schema(oid, "string", 'Hello Message',
                            "Hello Message for dev Task.", "Default Message")
    }
Exemple #2
0
def _get_simple_opts():
    # Util func to create an task option id 'pbsmrtpipe.task_options.dev.hello_message'
    oid = OP.to_opt_id('dev.hello_message')
    return {
        oid:
        OP.to_option_schema(oid, "string", 'Hello Message',
                            "Hello Message for dev Task.", "Default Message")
    }
Exemple #3
0
def _get_opts():
    oid = OP.to_opt_id('dev.hello_message')
    return {oid: OP.to_option_schema(oid, "string", 'Hello Message', "Hello Message for dev Task.", "Default Message")}
Exemple #4
0
def _get_simple_opts():
    # Util func to create an task option id 'pbsmrtpipe.task_options.dev.hello_message'
    oid = OP.to_opt_id('dev.hello_message')
    return {oid: OP.to_option_schema(oid, "string", 'Hello Message', "Hello Message for dev Task.", "Default Message")}
Exemple #5
0
    s = func()
    _ = jsonschema.Draft4Validator(s)
    oid = s['properties'].keys()[0]
    REGISTERED_WORKFLOW_OPTIONS[oid] = s

    return func


def register_validation_func(option_id):
    def wrapper(func):
        OPTION_VALIDATORS[option_id].append(func)

    return wrapper


@register_validation_func(OP.to_opt_id('tmp_dir'))
def validator(value):
    if os.path.isdir(value):
        return value
    raise ValueError(
        "Option id '{i}' invalid. Unable to find {v}".format(v=value))


@register_workflow_option
def _to_max_chunks_option():
    return OP.to_option_schema(
        _to_wopt_id("max_nchunks"), "integer", "Max Number of Chunks",
        "Max Number of chunks that a file will be scattered into",
        GlobalConstants.MAX_NCHUNKS)

Exemple #6
0
import pbsmrtpipe.schema_opt_utils as OP
# generate a task option id
oid = OP.to_opt_id('dev.hello_message')
# generate a schema
s = OP.to_option_schema(oid, "string", 'Hello Message',
                        "Hello Message for dev Task.", "Default Message")
print {oid: s}
Exemple #7
0
import logging

from pbsmrtpipe.schema_opt_utils import to_opt_id


log = logging.getLogger(__name__)

_FILTER_OPTS_NAMES = 'filter_trim filter_artifact_score use_subreads ' \
                     'filter_read_score filter_min_read_length ' \
                     'filter_max_read_length filter_min_subread_length ' \
                     'filter_max_subread_length ' \
                     'filter_whitelist filter_min_snr'.split()

_FILTER_OPTS = [to_opt_id(s) for s in _FILTER_OPTS_NAMES]
Exemple #8
0
    s = func()
    _ = jsonschema.Draft4Validator(s)
    oid = s["properties"].keys()[0]
    REGISTERED_WORKFLOW_OPTIONS[oid] = s

    return func


def register_validation_func(option_id):
    def wrapper(func):
        OPTION_VALIDATORS[option_id].append(func)

    return wrapper


@register_validation_func(OP.to_opt_id("tmp_dir"))
def validator(value):
    if os.path.isdir(value):
        return value
    raise ValueError("Option id '{i}' invalid. Unable to find {v}".format(v=value))


@register_workflow_option
def _to_max_chunks_option():
    return OP.to_option_schema(
        _to_wopt_id("max_nchunks"),
        "integer",
        "Max Number of Chunks",
        "Max Number of chunks that a file will be scattered into",
        GlobalConstants.MAX_NCHUNKS,
    )
Exemple #9
0
    s = func()
    _ = jsonschema.Draft4Validator(s)
    oid = s['properties'].keys()[0]
    REGISTERED_WORKFLOW_OPTIONS[oid] = s

    return func


def register_validation_func(option_id):
    def wrapper(func):
        OPTION_VALIDATORS[option_id].append(func)
    return wrapper


@register_validation_func(OP.to_opt_id('tmp_dir'))
def validator(value):
    if os.path.isdir(value):
        return value
    raise ValueError("Option id '{i}' invalid. Unable to find {v}".format(v=value))


@register_workflow_option
def _to_max_chunks_option():
    return OP.to_option_schema(_to_wopt_id("max_nchunks"), "integer", "Max Number of Chunks",
                               "Max Number of chunks that a file will be scattered into", GlobalConstants.MAX_NCHUNKS)


@register_workflow_option
def _to_max_nproc_option():
    return OP.to_option_schema(_to_wopt_id("max_nproc"), "integer",
Exemple #10
0
class TestTestDevSimpleTask(TestDevSimpleTaskDefaults):
    # Test different set of options
    TASK_OPTIONS = {to_opt_id('dev.hello_message'): "Custom Message"}
Exemple #11
0
class _TestBase(unittest.TestCase):
    NTASKS = 10
    NFILES = 12
    EPOINTS = 1
    PB_PIPELINE_ID = to_pipeline_ns("rs_fetch_1")
    # this will be created from EPOINTS_NAMES in class setup
    EPOINTS_D = {}
    EPOINTS_NAMES = {'eid_input_xml': "_entry_point.xml"}
    TASK_OPTIONS = {to_opt_id('filter_artifact_score'): -7,
                    to_opt_id('filter_max_read_length'): 10000,
                    to_opt_id('filter_min_read_length'): 1000}

    # this object could create manually
    PRESET_XML = 'cli_preset_01.xml'

    @classmethod
    def setUpClass(cls):
        pipeline = REGISTERED_PIPELINES[cls.PB_PIPELINE_ID]
        log.debug(pipeline)

        cls.bindings = pipeline.all_bindings
        cls.EPOINTS_D = {k: get_temp_file(v) for k, v in cls.EPOINTS_NAMES.iteritems()}

        log.debug(pprint.pformat(cls.bindings, indent=4))
        log.debug("Number of registered tasks {n}".format(n=len(REGISTERED_TASKS)))

        cls.bgraph = B.binding_strs_to_binding_graph(REGISTERED_TASKS, cls.bindings)
        d = os.path.expanduser('~/scratch/tmp_pbsmrtpipe') if getpass.getuser() == 'mkocher' else None
        cls.output_dir = tempfile.mkdtemp(prefix='job_test_', dir=d)

        preset_record = IO.parse_pipeline_preset_xml(os.path.join(TEST_DATA_DIR, cls.PRESET_XML))
        cls.workflow_options = preset_record.to_workflow_level_opt()

        # leave this for now
        cls.envs = []
        cls.cluster_engine = C.load_installed_cluster_templates_by_name("sge")

    @classmethod
    def tearDownClass(cls):
        if not DEEP_DEBUG:
            if hasattr(cls, 'output_dir'):
                if os.path.exists(cls.output_dir):
                    shutil.rmtree(cls.output_dir)

    def test_validate_bindings_graph(self):
        emsg = "Invalid workflow with id '{x}'".format(x=self.PB_PIPELINE_ID)
        self.assertTrue(B.validate_binding_graph_integrity(self.bgraph), emsg)

    def test_number_of_entry_point_nodes(self):
        """Basic running test"""
        n = len(self.bgraph.entry_point_nodes())
        self.assertEqual(self.EPOINTS, n)

    def test_number_of_tasks(self):

        n = len(self.bgraph.task_nodes())
        self.assertEqual(self.NTASKS, n)

    def test_is_validate_binding_graph(self):
        self.assertTrue(B.validate_binding_graph_integrity(self.bgraph))

    def test_is_validate_binding_types(self):
        self.assertTrue(B.validate_compatible_binding_file_types(self.bgraph))

    def test_n_files(self):
        self.assertEqual(self.NFILES, len(self.bgraph.file_nodes()))

    @unittest.skip
    def test_mock_runner(self):
        B.resolve_entry_points(self.bgraph, self.EPOINTS_D)
        state = M.mock_workflow_runner(self.bgraph, {},
                                       self.output_dir,
                                       self.workflow_options,
                                       self.TASK_OPTIONS,
                                       REGISTERED_FILE_TYPES,
                                       self.cluster_engine, self.envs)

        _ = B.get_tasks_by_state(self.bgraph, B.TaskStates.SUCCESSFUL)

        if state is False:
            log.debug(B.to_binding_graph_summary(self.bgraph))

        self.assertTrue(state)
import pbsmrtpipe.schema_opt_utils as OP
# generate a task option id
oid = OP.to_opt_id('dev.hello_message')
# generate a schema
s = OP.to_option_schema(oid, "string", 'Hello Message', "Hello Message for dev Task.", "Default Message")
print {oid: s}