def main(): """ calls the user's job script/program. Will also start LDMS if requested. """ cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \ os.environ['PVINSTALL'] + "/PAV/scripts/mytime " + os.environ['USER_CMD'] nodes = get_node_list() os.environ['PV_NODES'] = nodes os.environ['GZ_NODES'] = os.environ['PV_NODES'] job_log_file = os.environ["PV_JOB_RESULTS_LOG"] with open(job_log_file, 'a') as lf: with stdout_redirected(lf): #redirect STDERR to the same file sys.stderr = lf print "<nodes> " + nodes + "\n" print "slurm_job_handler: " # start LDMS here if requested! The start command ought to be # defined, so let's go! try: if os.environ['LDMS_START_CMD']: print "start ldms! " LDMS.start() except KeyError: #print 'I got a KeyError - no: "%s"' % str(e) pass print "<start>", now() print " start job with: \n " + cmd lf.flush() # Call the command that runs the users test/job # This works with job_out_file = /users/cwi/mystdout #subprocess.call(cmd1, stdout=job_out_file, shell=True) subprocess.call(cmd, stdout=lf, stderr=lf, shell=True) # The post_complete file needs to be placed in the results dir # for Gazebo compatibility pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete" text_file = open(pcf, "w") text_file.write("{}\n".format("command complete")) JobController.run_epilog() text_file.write("{}\n".format("epilog complete")) JobController.cleanup() text_file.write("{}\n".format("cleanup complete")) text_file.close() print "<end>", now() lf.flush() # The trend_data file needs to be placed in the results dir # for Gazebo compatibility JobController.process_trend_data()
def main(): """ calls the user's job script/program. Will also start LDMS if requested. """ cmd = "cd " + os.environ['PV_RUNHOME'] + "; " + \ os.environ['PVINSTALL'] + "/PAV/scripts/mytime " + os.environ['USER_CMD'] nodes = get_node_list() os.environ['PV_NODES'] = nodes os.environ['GZ_NODES'] = os.environ['PV_NODES'] job_log_file = os.environ["PV_JOB_RESULTS_LOG"] with open(job_log_file, 'a') as lf: with stdout_redirected(lf): #redirect STDERR to the same file sys.stderr = lf print "<nodes> " + nodes + "\n" print "slurm_job_handler: " # start LDMS here if requested! The start command ought to be # defined, so let's go! try: if os.environ['LDMS_START_CMD']: print "start ldms! " LDMS.start() except KeyError, e: #print 'I got a KeyError - no: "%s"' % str(e) pass print "<start>", now() print " start job with: \n " + cmd lf.flush() # Call the command that runs the users test/job # This works with job_out_file = /users/cwi/mystdout #subprocess.call(cmd1, stdout=job_out_file, shell=True) subprocess.call(cmd, stdout=lf, stderr=lf, shell=True) # The post_complete file needs to be placed in the results dir # for Gazebo compatibility pcf = os.environ["PV_JOB_RESULTS_LOG_DIR"] + "/post_complete" text_file = open(pcf, "w") text_file.write("{}\n".format("command complete")) JobController.run_epilog() text_file.write("{}\n".format("epilog complete")) JobController.cleanup() text_file.write("{}\n".format("cleanup complete")) text_file.close() print "<end>", now() lf.flush() # The trend_data file needs to be placed in the results dir # for Gazebo compatibility JobController.process_trend_data()
def prep_ldms(self): """ starts LDMS, since it works under Moab """ self.logger.info('setup LDMS for this job (' + self.handle + ') type') LDMS(self)