예제 #1
0
    def test_serialization(self):
        simple_wf_examples = workflow_local.WorkflowExamplesLocal()
        tr_wf_examples = workflow_transfer.WorkflowExamplesTransfer()
        srp_wf_examples = workflow_shared.WorkflowExamplesShared()
        self.temporaries += [
            simple_wf_examples.output_dir, tr_wf_examples.output_dir,
            srp_wf_examples.output_dir
        ]
        workflows = []
        workflows.append(("multiple", simple_wf_examples.example_multiple()))
        workflows.append(
            ("special_command", simple_wf_examples.example_special_command()))

        workflows.append(
            ("mutiple_transfer", tr_wf_examples.example_multiple()))
        workflows.append(("special_command_transfer",
                          tr_wf_examples.example_special_command()))
        workflows.append(
            ("special_transfer", tr_wf_examples.example_special_transfer()))

        workflows.append(("mutiple_srp", srp_wf_examples.example_multiple()))
        workflows.append(
            ("special_command_srp", srp_wf_examples.example_special_command()))

        for workflow_name, workflow in workflows:
            print("Testing", workflow_name)

            file_path = tempfile.mkstemp(prefix="json_",
                                         suffix=workflow_name + ".wf")
            os.close(file_path[0])
            file_path = file_path[1]
            Helper.serialize(file_path, workflow)

            new_workflow = Helper.unserialize(file_path)

            self.assertTrue(
                new_workflow.attributs_equal(workflow),
                "Serialization failed for workflow %s" % workflow_name)

            try:
                os.remove(file_path)
            except IOError:
                pass
예제 #2
0
    def test_serialization(self):
        simple_wf_examples = workflow_local.WorkflowExamplesLocal()
        tr_wf_examples = workflow_transfer.WorkflowExamplesTransfer()
        srp_wf_examples = workflow_shared.WorkflowExamplesShared()
        self.temporaries += [simple_wf_examples.output_dir,
                             tr_wf_examples.output_dir,
                             srp_wf_examples.output_dir]
        workflows = []
        workflows.append(("multiple", simple_wf_examples.example_multiple()))
        workflows.append(("special_command",
                          simple_wf_examples.example_special_command()))

        workflows.append(("mutiple_transfer",
                          tr_wf_examples.example_multiple()))
        workflows.append(("special_command_transfer",
                          tr_wf_examples.example_special_command()))
        workflows.append(("special_transfer",
                          tr_wf_examples.example_special_transfer()))

        workflows.append(("mutiple_srp", srp_wf_examples.example_multiple()))
        workflows.append(("special_command_srp",
                          srp_wf_examples.example_special_command()))

        for workflow_name, workflow in workflows:
            print("Testing", workflow_name)

            file_path = tempfile.mkstemp(prefix="json_",
                                         suffix=workflow_name + ".wf")
            os.close(file_path[0])
            file_path = file_path[1]
            Helper.serialize(file_path, workflow)

            new_workflow = Helper.unserialize(file_path)

            self.assertTrue(new_workflow.attributs_equal(workflow),
                            "Serialization failed for workflow %s" %
                            workflow_name)

            try:
                os.remove(file_path)
            except IOError:
                pass
예제 #3
0
    def test_serialization(self):
        directory = "/tmp/"

        simple_wf_examples = workflow_local.WorkflowExamplesLocal()
        tr_wf_examples = workflow_transfer.WorkflowExamplesTransfer()
        srp_wf_examples = workflow_shared.WorkflowExamplesShared()
        workflows = []
        workflows.append(("multiple", simple_wf_examples.example_multiple()))
        workflows.append(("special_command",
                          simple_wf_examples.example_special_command()))

        workflows.append(("mutiple_transfer",
                          tr_wf_examples.example_multiple()))
        workflows.append(("special_command_transfer",
                          tr_wf_examples.example_special_command()))
        workflows.append(("special_transfer",
                          tr_wf_examples.example_special_transfer()))

        workflows.append(("mutiple_srp", srp_wf_examples.example_multiple()))
        workflows.append(("special_command_srp",
                          srp_wf_examples.example_special_command()))

        for workflow_name, workflow in workflows:
            print "Testing", workflow_name

            file_path = os.path.join(directory,
                                     "json_" + workflow_name + ".wf")
            Helper.serialize(file_path, workflow)

            new_workflow = Helper.unserialize(file_path)

            self.assertTrue(new_workflow.attributs_equal(workflow),
                            "Serialization failed for workflow %s" %
                            workflow_name)

            try:
                os.remove(file_path)
            except IOError:
                pass
예제 #4
0
    def test_serialization(self):
        directory = "/tmp/"

        simple_wf_examples = workflow_local.WorkflowExamplesLocal()
        tr_wf_examples = workflow_transfer.WorkflowExamplesTransfer()
        srp_wf_examples = workflow_shared.WorkflowExamplesShared()
        workflows = []
        workflows.append(("multiple", simple_wf_examples.example_multiple()))
        workflows.append(("special_command",
                          simple_wf_examples.example_special_command()))

        workflows.append(("mutiple_transfer",
                          tr_wf_examples.example_multiple()))
        workflows.append(("special_command_transfer",
                          tr_wf_examples.example_special_command()))
        workflows.append(("special_transfer",
                          tr_wf_examples.example_special_transfer()))

        workflows.append(("mutiple_srp", srp_wf_examples.example_multiple()))
        workflows.append(("special_command_srp",
                          srp_wf_examples.example_special_command()))

        for workflow_name, workflow in workflows:
            print("Testing", workflow_name)

            file_path = os.path.join(directory,
                                     "json_" + workflow_name + ".wf")
            Helper.serialize(file_path, workflow)

            new_workflow = Helper.unserialize(file_path)

            self.assertTrue(new_workflow.attributs_equal(workflow),
                            "Serialization failed for workflow %s" %
                            workflow_name)

            try:
                os.remove(file_path)
            except IOError:
                pass
예제 #5
0
            logger.info("epd_to_deploy " + repr(options.epd_to_deploy)) 
            logger.info("untar_directory " + repr(options.untar_directory))
            sch = MPIScheduler(comm, interval=1, nb_attempt_per_job=options.nb_attempt_per_job)

            config.disable_queue_limits()    

            workflow_engine = ConfiguredWorkflowEngine(database_server,
                                                       sch,
                                                       config)
            if options.workflow_file and os.path.exists(options.workflow_file):
                workflow_file = options.workflow_file  
                logger.info(" ")
                logger.info("******* submission of worklfow **********")
                logger.info("workflow file: " + repr(workflow_file))

                workflow = Helper.unserialize(workflow_file)
                workflow_engine.submit_workflow(workflow,
                                                expiration_date=None,
                                                name=None,
                                                queue=None)
            if options.wf_id_to_restart != None:
                workflow_id = options.wf_id_to_restart
                logger.info(" ")
                logger.info("******* restart worklfow **********")
                logger.info("workflow if: " + repr(workflow_id))
                workflow_engine.stop_workflow(workflow_id)
                workflow_engine.restart_workflow(workflow_id, queue=None)
     
            while not workflow_engine.engine_loop.are_jobs_and_workflow_done():
                time.sleep(2)
            for slave in range(1, comm.size):
            logger.info("untar_directory " + repr(options.untar_directory))
            sch = MPIScheduler(comm,
                               interval=1,
                               nb_attempt_per_job=options.nb_attempt_per_job)

            config.disable_queue_limits()

            workflow_engine = ConfiguredWorkflowEngine(database_server, sch,
                                                       config)
            if options.workflow_file and os.path.exists(options.workflow_file):
                workflow_file = options.workflow_file
                logger.info(" ")
                logger.info("******* submission of worklfow **********")
                logger.info("workflow file: " + repr(workflow_file))

                workflow = Helper.unserialize(workflow_file)
                workflow_engine.submit_workflow(workflow,
                                                expiration_date=None,
                                                name=None,
                                                queue=None)
            if options.wf_id_to_restart != None:
                workflow_id = options.wf_id_to_restart
                logger.info(" ")
                logger.info("******* restart worklfow **********")
                logger.info("workflow if: " + repr(workflow_id))
                workflow_engine.stop_workflow(workflow_id)
                workflow_engine.restart_workflow(workflow_id, queue=None)

            while not workflow_engine.engine_loop.are_jobs_and_workflow_done():
                time.sleep(2)
            for slave in range(1, comm.size):