コード例 #1
0
    def start(self):

        # create own unique working space for this run
        #self.setup_working_space()

        # print the common log settings here right after the job is started
        self.save_common_settings()

        # store some info into ENV variables that jobs may need to use later on.
        self.setup_job_info()

        # what nodes(s) are this job running on...
        nodes = platform.node().partition('.')[0]
        os.environ['PV_NODES'] = nodes
        os.environ['GZ_NODES'] = os.environ['PV_NODES']
        print "<nodes> " + nodes + "\n"

        self.logger.info(self.lh + " : args=" + str(self.configs['run']['test_args']))

        # build the exact command to run
        cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \
            os.environ['PVINSTALL'] + "/PAV/scripts/mytime ./" + self.configs['run']['cmd']
        print "\n ->  RawJobController: invoke %s" % cmd

        # Get any buffered output into the output file now
        # so that the the order doesn't look all mucked up
        sys.stdout.flush()

        # Invoke the cmd and send the output to the file setup when
        # the object was instantiated

        self.logger.info(self.lh + " run: " + cmd)
        p = subprocess.Popen(cmd, stdout=self.job_log_file, stderr=self.job_log_file, shell=True)
        # wait for the subprocess to finish
        output, errors = p.communicate()

        if p.returncode or errors:
            print "Error: something went wrong!"
            print [p.returncode, errors, output]
            self.logger.info(self.lh + " run error: " + errors)

        # The post_complete file needs to be placed in the results dir
        # for Gazebo compatibility
        pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete"
        text_file = open(pcf, "w")
        text_file.write("{}\n".format("command complete"))
        self.run_epilog()
        text_file.write("{}\n".format("epilog complete"))
        self.cleanup()
        text_file.write("{}\n".format("cleanup complete"))
        text_file.close()

        print "<end>", self.now()

        # The trend_data file needs to be placed in the results dir
        # for Gazebo compatibility
        JobController.process_trend_data()
コード例 #2
0
    def start(self):

        # create own unique working space for this run
        #self.setup_working_space()

        # print the common log settings here right after the job is started
        self.save_common_settings()

        # store some info into ENV variables that jobs may need to use later on.
        self.setup_job_info()

        # what nodes(s) are this job running on...
        nodes = platform.node().partition('.')[0]
        os.environ['PV_NODES'] = nodes
        os.environ['GZ_NODES'] = os.environ['PV_NODES']
        print "<nodes> " + nodes + "\n"

        self.logger.info(self.lh + " : args=" + str(self.configs['run']['test_args']))

        # build the exact command to run
        cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \
            os.environ['PVINSTALL'] + "/PAV/scripts/mytime ./" + self.configs['run']['cmd']
        print "\n ->  RawJobController: invoke %s" % cmd

        # Get any buffered output into the output file now
        # so that the the order doesn't look all mucked up
        sys.stdout.flush()

        # Invoke the cmd and send the output to the file setup when
        # the object was instantiated

        self.logger.info(self.lh + " run: " + cmd)
        p = subprocess.Popen(cmd, stdout=self.job_log_file, stderr=self.job_log_file, shell=True)
        # wait for the subprocess to finish
        output, errors = p.communicate()

        if p.returncode or errors:
            print "Error: something went wrong!"
            print [p.returncode, errors, output]
            self.logger.info(self.lh + " run error: " + errors)

        # The post_complete file needs to be placed in the results dir
        # for Gazebo compatibility
        pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete"
        text_file = open(pcf, "w")
        text_file.write("{}\n".format("command complete"))
        self.run_epilog()
        text_file.write("{}\n".format("epilog complete"))
        self.cleanup()
        text_file.write("{}\n".format("cleanup complete"))
        text_file.close()

        print "<end>", self.now()

        # The trend_data file needs to be placed in the results dir
        # for Gazebo compatibility
        JobController.process_trend_data()
コード例 #3
0
def main():
    """
      calls the user's job script/program.
      Will also start LDMS if requested.
    """

    cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \
        os.environ['PVINSTALL'] + "/PAV/scripts/mytime " + os.environ['USER_CMD']

    nodes = get_node_list()
    os.environ['PV_NODES'] = nodes
    os.environ['GZ_NODES'] = os.environ['PV_NODES']
    job_log_file = os.environ["PV_JOB_RESULTS_LOG"]

    with open(job_log_file, 'a') as lf:
        with stdout_redirected(lf):

            #redirect STDERR to the same file
            sys.stderr = lf

            print "<nodes> " + nodes + "\n"
            print "slurm_job_handler: "

            # start LDMS here if requested!  The start command ought to be
            # defined, so let's go!
            try:
                if os.environ['LDMS_START_CMD']:
                    print "start ldms! "
                    LDMS.start()
            except KeyError, e:
                #print 'I got a KeyError - no: "%s"' % str(e)
                pass

            print "<start>", now()
            print "  start job with: \n    " + cmd
            lf.flush()

            # Call the command that runs the users test/job
            # This works with job_out_file = /users/cwi/mystdout
            #subprocess.call(cmd1, stdout=job_out_file, shell=True)

            subprocess.call(cmd, stdout=lf, stderr=lf, shell=True)

            # The post_complete file needs to be placed in the results dir
            # for Gazebo compatibility
            pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete"
            text_file = open(pcf, "w")
            text_file.write("{}\n".format("command complete"))
            JobController.run_epilog()
            text_file.write("{}\n".format("epilog complete"))
            JobController.cleanup()
            text_file.write("{}\n".format("cleanup complete"))
            text_file.close()

            print "<end>", now()
            lf.flush()

            # The trend_data file needs to be placed in the results dir
            # for Gazebo compatibility
            JobController.process_trend_data()
コード例 #4
0
def main():
    """
      calls the user's job script/program.
      Will also start LDMS if requested.
    """

    cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \
        os.environ['PVINSTALL'] + "/PAV/scripts/mytime " + os.environ['USER_CMD']

    nodes = get_node_list()
    os.environ['PV_NODES'] = nodes
    os.environ['GZ_NODES'] = os.environ['PV_NODES']
    job_log_file = os.environ["PV_JOB_RESULTS_LOG"]

    with open(job_log_file, 'a') as lf:
        with stdout_redirected(lf):

            #redirect STDERR to the same file
            sys.stderr = lf

            print "<nodes> " + nodes + "\n"
            print "slurm_job_handler: "

            # start LDMS here if requested!  The start command ought to be
            # defined, so let's go!
            try:
                if os.environ['LDMS_START_CMD']:
                    print "start ldms! "
                    LDMS.start()
            except KeyError:
                #print 'I got a KeyError - no: "%s"' % str(e)
                pass

            print "<start>", now()
            print "  start job with: \n    " + cmd
            lf.flush()

            # Call the command that runs the users test/job
            # This works with job_out_file = /users/cwi/mystdout
            #subprocess.call(cmd1, stdout=job_out_file, shell=True)

            subprocess.call(cmd, stdout=lf, stderr=lf, shell=True)

            # The post_complete file needs to be placed in the results dir
            # for Gazebo compatibility
            pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete"
            text_file = open(pcf, "w")
            text_file.write("{}\n".format("command complete"))
            JobController.run_epilog()
            text_file.write("{}\n".format("epilog complete"))
            JobController.cleanup()
            text_file.write("{}\n".format("cleanup complete"))
            text_file.close()

            print "<end>", now()
            lf.flush()

            # The trend_data file needs to be placed in the results dir
            # for Gazebo compatibility
            JobController.process_trend_data()