def test_01(self): file_name = 'example_pipeline_template_01.json' path = get_data_file(file_name) with open(path, 'r') as f: d = json.loads(f.read()) is_valid = validate_pipeline_template(d) self.assertTrue(is_valid, "pipeline template {p} is NOT valid.".format(p=path))
def test_all_sane(self): """Test that all pipelines are well defined""" errors = [] rtasks, rfiles_types, chunk_operators, pipelines = L.load_all() for pipeline_id, pipeline in pipelines.items(): emsg = "Pipeline {p} is not valid.".format(p=pipeline_id) log.debug("Checking Sanity of registered Pipeline {i}".format( i=pipeline_id)) log.info(pipeline_id) log.debug(pipeline) try: # Validate with Avro d = pipeline_template_to_dict(pipeline, rtasks) _ = validate_pipeline_template(d) name = pipeline_id + "_pipeline_template.avro" output_file = get_temp_file(suffix=name) log.info( "{p} converted to avro successfully".format(p=pipeline_id)) bg = BU.binding_strs_to_binding_graph(rtasks, pipeline.all_bindings) BU.validate_binding_graph_integrity(bg) BU.validate_compatible_binding_file_types(bg) validate_entry_points(d) # pprint.pprint(d) # for debugging purposes output_json = output_file.replace(".avro", '.json') log.info("writing pipeline to {p}".format(p=output_json)) with open(output_json, 'w') as j: j.write(json.dumps(d, sort_keys=True, indent=4)) log.info( "writing pipeline template to {o}".format(o=output_file)) # Test writing to avro if the pipeline is actually valid write_pipeline_template_to_avro(pipeline, rtasks, output_file) log.info("Pipeline {p} is valid.".format(p=pipeline_id)) log.info("Loading avro {i} from {p}".format(i=pipeline_id, p=output_file)) pipeline_d = load_pipeline_template_from_avro(output_file) self.assertIsInstance(pipeline_d, dict) except Exception as e: m = emsg + " Error: " + e.message log.error(m) errors.append(m) log.error(emsg) log.error(e) msg = "\n".join(errors) if errors else "" self.assertEqual([], errors, msg)