Пример #1
0
    def run(self):

        job_type = get_jobtype(self.jobid)

        exec_file = "cactus_build_dare.py"
        cmd = os.path.join(DARECACTUS_HOME, "darecactus", "lib", "DARE", "examples", exec_file)
        print cmd, " -c ", self.conffile
        p1 = Popen(["python", cmd, "-c", self.conffile], stdout=PIPE)

        update_job_pid(self.jobid, p1.pid)
        update_job_status(self.jobid, RUNNING)
        print "p1.pid", p1.pid
        p1.wait()

        update_job_status(self.jobid, RUNNING)
        exec_file = "cactus_dare.py"
        p2 = Popen(
            [
                "python",
                os.path.join(DARECACTUS_HOME, "darecactus", "lib", "DARE", "examples", exec_file),
                "-c",
                self.conffile,
            ],
            stdout=PIPE,
        )
        print "p2.pid", p2.pid
        update_job_pid(self.jobid, p2.pid)
        p2.wait()
        # update_job_status(jobid, 4)
        print "Job %s  Started  " % self.jobid
Пример #2
0
    def run(self):

        #create a logfile
        LOG_FILENAME = self.job_conf["log_filename"]
        print LOG_FILENAME
        self.logger = logging.getLogger('dare_multijob')
        hdlr = logging.FileHandler(LOG_FILENAME)
        formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
        hdlr.setFormatter(formatter)
        self.logger.addHandler(hdlr)
        self.logger.setLevel(logging.INFO)

        #first info in log file
        self.logger.info("Job id  is " + self.job_conf["jobid"])
        self.logger.info("RESOURCES used are " + self.job_conf["num_resources"])

        try:
            #get resource info
            #start the big job agents
            resource_list = []
            self.mjs = []
            for i in range(0, int(self.job_conf["num_resources"])):
                resource_list.append([])
                resource_list[i].append(dict_section(self.config, "resource_" + str(i)))
                #create multiple manyjobs
                print "Create manyjob service "
                self.mjs.append(many_job_service(resource_list[i], COORDINATION_URL))
            total_number_of_jobs = 0
            ### run the step
            wus_count = 0
            for STEP in range(0, int(self.job_conf["num_steps"])):
                starttime = time.time()

                #job started update status
                if (self.load_update_env == "true"):
                    update_job_detail_status(self.job_conf["jobid"], "In step " + str(STEP + 1))

                step_wus = self.job_conf["step_" + str(STEP)].split(',')
                if ("step_" + str(STEP)) not in self.job_conf["ft_steps"].split(','):
                    ### submit the each step wus to bigjob
                    for wu_count in range(0, len(step_wus)):
                        wu = dict_section(self.config, step_wus[wu_count].strip())
                        wus_count = wus_count + 1
                        self.submit_wu(wu)
                    self.wait_for_wus(wus_count)
                else:
                    #time.sleep(10)
                    for wu_count in range(0, len(step_wus)):
                        fs = dict_section(self.config, step_wus[wu_count].strip())
                        self.submit_fs(fs)

                runtime = time.time() - starttime

                self.logger.info("STEP" + str(STEP) + " Runtime: " + str(runtime))

            #all jobs done update status
            if (self.load_update_env == "true"):
                update_job_detail_status(self.job_conf["jobid"], "")
                update_job_status(self.job_conf["jobid"], 4)

            for i in range(0, int(self.job_conf["num_resources"])):
                self.mjs[i].cancel()

        except:
            traceback.print_exc(file=sys.stdout)
            try:
                for i in range(0, int(self.job_conf["num_resources"])):
                    self.mjs[i].cancel()
            except:
                sys.exit()