def test_convert_to_d_and_load(self): import pbsmrtpipe.loader as L import pprint rtasks, rfile_types, chunk_operators, pipelines = L.load_all() for pipeline in pipelines.values(): pipeline_d = IO.pipeline_template_to_dict(pipeline, rtasks) #print "Raw Pipeline converted to dict" #print pprint.pformat(pipeline_d) pipeline_loaded = IO.load_pipeline_template_from(pipeline_d) #print "Pipeline task options loaded from dict" #print pprint.pformat(pipeline_loaded.task_options) self.assertEqual(pipeline.idx, pipeline_loaded.idx) self.assertEqual(pipeline.display_name, pipeline_loaded.display_name) self.assertEqual(len(pipeline.all_bindings), len(pipeline_loaded.all_bindings)) self.assertEqual(len(pipeline.entry_bindings), len(pipeline_loaded.entry_bindings)) # note that the internally registered pipeline does not necessarily # have any task_options at this point, so we can't simply test for # equality. however after another cycle they should be identical if len(pipeline.task_options) > 0: self.assertGreater(len(pipeline_loaded.task_options), 0) pipeline_d2 = IO.pipeline_template_to_dict( pipeline_loaded, rtasks) #print pprint.pformat(pipeline_d) pipeline_loaded2 = IO.load_pipeline_template_from(pipeline_d2) self.assertEqual(len(pipeline_loaded.task_options), len(pipeline_loaded2.task_options)) n = 1 for k, v in pipeline.task_options.iteritems(): if k in pipeline_loaded2.task_options: v2 = pipeline_loaded2.task_options[k] msg = "task option #{n} {k} expected '{v}' got '{x}'".format( k=k, v=v, x=v2, n=n) self.assertEqual(v, v2, msg) print "Valid " + msg n += 1
def test_convert_to_d_and_load(self): import pbsmrtpipe.loader as L rtasks, rfile_types, chunk_operators, pipelines = L.load_all() for pipeline in pipelines.values(): pipeline_d = IO.pipeline_template_to_dict(pipeline, rtasks) pipeline_loaded = IO.load_pipeline_template_from(pipeline_d) self.assertEqual(pipeline.idx, pipeline_loaded.idx) self.assertEqual(pipeline.display_name, pipeline_loaded.display_name) self.assertEqual(len(pipeline.all_bindings), len(pipeline_loaded.all_bindings)) self.assertEqual(len(pipeline.entry_bindings), len(pipeline_loaded.entry_bindings))
def test_convert_to_d_and_load(self): import pbsmrtpipe.loader as L rtasks, rfile_types, chunk_operators, pipelines = L.load_all() for pipeline in pipelines.values(): pipeline_d = IO.pipeline_template_to_dict(pipeline, rtasks) pipeline_loaded = IO.load_pipeline_template_from(pipeline_d) self.assertEqual(pipeline.idx, pipeline_loaded.idx) self.assertEqual(pipeline.display_name, pipeline_loaded.display_name) self.assertEqual(len(pipeline.all_bindings), len(pipeline_loaded.all_bindings)) self.assertEqual(len(pipeline.entry_bindings), len(pipeline_loaded.entry_bindings)) # note that the internally registered pipeline does not necessarily # have any task_options at this point, so we can't simply test for # equality. however after another cycle they should be identical if len(pipeline.task_options) > 0: self.assertGreater(len(pipeline_loaded.task_options), 0) pipeline_d2 = IO.pipeline_template_to_dict(pipeline_loaded, rtasks) pipeline_loaded2 = IO.load_pipeline_template_from(pipeline_d2) self.assertEqual(len(pipeline_loaded.task_options), len(pipeline_loaded2.task_options)) for k,v in pipeline.task_options.iteritems(): if k in pipeline_loaded2.task_options: self.assertEqual(v, pipeline_loaded2.task_options[k])
def _load_resolved_pipeline_template_json_from_dir(dir_name): """ :rtype: list[Pipeline] """ import pbsmrtpipe.pb_io as IO pipelines = [] if os.path.exists(dir_name): for file_name in os.listdir(dir_name): if file_name.endswith(".json"): try: p = IO.load_pipeline_template_from(os.path.join(dir_name, file_name)) pipelines.append(p) except Exception as e: log.warn("Unable to load Resolved Pipeline Template from {}. {}".format(dir_name, str(e))) else: log.warn("Unable to load Resolved Pipeline Template from {}. Path does not exist.".format(dir_name)) return pipelines
def load_resolved_pipeline_template_jsons_from_dir(dir_name): """ :rtype: list[Pipeline] """ import pbsmrtpipe.pb_io as IO pipelines = [] if os.path.exists(dir_name): for file_name in os.listdir(dir_name): if file_name.endswith(".json"): try: p = IO.load_pipeline_template_from(os.path.join(dir_name, file_name)) pipelines.append(p) except Exception as e: log.warn("Unable to load Resolved Pipeline Template from {}. {}".format(dir_name, str(e))) else: log.warn("Unable to load Resolved Pipeline Template from {}. Path does not exist.".format(dir_name)) return pipelines
def test_load_pipeline_template_json(self): path = os.path.join(TEST_DATA_DIR, "example_pipeline_template_01.json") pipeline_loaded = IO.load_pipeline_template_from(path) self.assertEqual(len(pipeline_loaded.task_options), 1)