def test_exec_env(self): with TestAreaContext("exec_env_test"): with open("exec_env.py", "w") as f: f.write("""#!/usr/bin/env python\n import os import json with open("exec_env_exec_env.json") as f: exec_env = json.load(f) assert exec_env["TEST_ENV"] == "123" assert exec_env["NOT_SET"] is None """) os.chmod("exec_env.py", stat.S_IEXEC + stat.S_IREAD) with open("EXEC_ENV", "w") as f: f.write("EXECUTABLE exec_env.py\n") f.write("EXEC_ENV TEST_ENV 123\n") f.write("EXEC_ENV NOT_SET") ext_job = ExtJob("EXEC_ENV", False) job_list = ExtJoblist() job_list.add_job("EXEC_ENV", ext_job) forward_model = ForwardModel(job_list) forward_model.add_job("EXEC_ENV") global_args = SubstitutionList() env_varlist = EnvironmentVarlist() forward_model.formatted_fprintf( "run_id", None, "data_root", global_args, 0, env_varlist) jobm = JobManager(json_file = "jobs.json") job0 = jobm[0] exec_env = job0.get("exec_env") self.assertEqual(len(exec_env), 2) exit_status, msg = jobm.runJob(job0) self.assertEqual(exit_status, 0)
def test_jobs_py(self): with TestAreaContext("no_jobs_py"): with self.assertRaises(IOError): jobm = JobManager(module_file="Does/not/exist") with TestAreaContext("invalid_jobs"): # Syntax error with open("jobs.py", "w") as f: f.write("Hello - this is not valid Python code ...") with self.assertRaises(ImportError): jobm = JobManager() # Missing jobList attribute if os.path.isfile("jobs.pyc"): os.unlink("jobs.pyc") with open("jobs.py", "w") as f: f.write("A = 1") with self.assertRaises(AttributeError): jobm = JobManager() create_jobs_py([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() self.assertEqual(len(jobm), 1) job0 = jobm[0] with self.assertRaises(IndexError): _ = jobm[1] job0 = jobm["COPY_FILE"] with self.assertRaises(KeyError): _ = jobm["NO-SUCH-JOB"] self.assertTrue("COPY_FILE" in jobm) self.assertFalse("COPY_FILEX" in jobm)
def test_get_env(self): with TestAreaContext("job_manager_get_env"): with open("x.py", "w") as f: f.write("#!/usr/bin/env python\n") f.write("import os\n") f.write("assert(os.environ['KEY_ONE'] == 'FirstValue')\n") f.write("assert(os.environ['KEY_TWO'] == 'SecondValue')\n") f.write("assert(os.environ['PATH104'] == 'NewPath')\n") f.write("assert(os.environ['KEY_THREE'] == 'FourthValue:ThirdValue')\n") f.write("assert(os.environ['KEY_FOUR'] == 'FifthValue:SixthValue:ThirdValue:FourthValue')\n") os.chmod("x.py", stat.S_IEXEC + stat.S_IREAD) executable = "./x.py" joblist = {"name" : "TEST_GET_ENV1", "executable" : executable, "stdout" : "outfile.stdout", "stderr" : "outfile.stderr", "argList" : [] } data = {"umask" : "0000", "global_environment" : {"KEY_ONE" : "FirstValue", "KEY_TWO" : "SecondValue", "KEY_THREE" : "ThirdValue", "KEY_FOUR" : "ThirdValue:FourthValue" }, "global_update_path" : {"PATH104" : "NewPath", "KEY_THREE" : "FourthValue", "KEY_FOUR" : "FifthValue:SixthValue"}, "DATA_ROOT" : "/path/to/data", "jobList" : [joblist, joblist]} jobs_file = os.path.join(os.getcwd(), "jobs.json") with open(jobs_file, "w") as f: f.write(json.dumps(data)) jobm = JobManager() exit_status, msg = jobm.runJob(jobm[0]) self.assertEqual(exit_status, 0) exit_status, msg = jobm.runJob(jobm[1]) self.assertEqual(exit_status, 0)
def _test_run_multiple_OK(self, create_jobs): with TestAreaContext("mkdir"): joblist = [] dir_list = ["1", "2", "3", "4", "5"] for d in dir_list: job = { "name": "MKDIR", "executable": "/bin/mkdir", "stdout": "mkdir_out", "stderr": "mkdir_err", "argList": ["-p", "-v", d] } joblist.append(job) create_jobs(joblist) jobm = JobManager() for (index, job) in enumerate(jobm): exit_status, msg = jobm.runJob(job) self.assertEqual(exit_status, 0) self.assertTrue(os.path.isdir(dir_list[index])) self.assertTrue(os.path.isfile("mkdir_out.%d" % index)) self.assertTrue(os.path.isfile("mkdir_err.%d" % index)) self.assertEqual(0, os.path.getsize("mkdir_err.%d" % index))
def test1(self): with TestAreaContext("job_manager_runtime_int_kw"): executable = "echo" job_0 = { "name": "JOB_1", "executable": executable, "stdout": "outfile.stdout.1", "stderr": None, "argList": ['a_file', '5.12'], "min_arg": 1, "max_arg": 2, "arg_types": ['STRING', 'RUNTIME_INT'] } data = { "umask": "0000", "DATA_ROOT": "/path/to/data", "jobList": [job_0] } jobs_file = os.path.join(os.getcwd(), "jobs.json") with open(jobs_file, "w") as f: f.write(json.dumps(data)) jobm = JobManager() exit_status, msg = jobm.runJob(jobm[0]) self.assertEqual(exit_status, 1)
def test_fsInfo(self): self.assertEqual(('?', '?.?.?.?'), JobManager.mountPoint('/no/such/path/')) for mnt_point in ('/project/res', '/prog/ecl'): (file_server, ip) = JobManager.mountPoint(mnt_point) self.assert_ip_address(ip) (file_server, isilon_addr), _ = JobManager.fsInfo(path=mnt_point) self.assertEqual(ip, isilon_addr)
def test_data_from_forward_model_json(self): with TestAreaContext("json_from_forward_model_NO_DATA_ROOT"): with open("jobs.json", "w") as f: f.write(JSON_STRING_NO_DATA_ROOT) jobm = JobManager() self.assertIsNone(jobm.data_root()) self.assertNotIn("DATA_ROOT", os.environ)
def _test_runtime(self, create_jobs): with TestAreaContext(gen_area_name("runtime", create_jobs)): create_jobs([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() start_time = jobm.getStartTime() time.sleep(5) run_time = jobm.getRuntime() self.assertTrue(run_time > 5)
def test_data_from_forward_model_json(self): with TestAreaContext("json_from_forward_model"): with open("jobs.json", "w") as f: f.write(JSON_STRING) jobm = JobManager() self.assertEquals("PERLIN", jobm[0]["name"]) self.assertEqual("/path/to/data", jobm.data_root()) self.assertEqual("/path/to/data", os.environ["DATA_ROOT"])
def test_complete(self): with TestAreaContext("json_from_forward_model_NO_DATA_ROOT"): with open("jobs.json", "w") as f: f.write(JSON_STRING_NO_DATA_ROOT) jobm = JobManager() jobm.complete() st = ForwardModelStatus.load(os.getcwd()) self.assertTrue( isinstance(st.end_time, datetime.datetime)) self.assertTrue( isinstance(st.start_time, datetime.datetime)) self.assertTrue( st.end_time >= st.start_time ) dt = datetime.datetime.now() - st.start_time self.assertTrue( dt.total_seconds() < 5 )
def test_job_list_to_log_ordering(self): with TestAreaContext("json_from_forward_model"): with open("jobs.json", "w") as f: f.write(JSON_STRING) job_manager = JobManager() job_manager.job_list = [{'name': 'b'}, {'name': 'd'}, {'name': 'a'}, {'name': 'c'}] job_manager._job_map = {'b': {'name': 'b'}, 'd': {'name': 'd'}, 'a': {'name': 'a'}, 'c': {'name': 'c'}} vals = job_manager._ordered_job_map_values() # Verify _job_map.values() is indeed giving us values in not wanted order self.assertNotEqual(job_manager.job_list, job_manager._job_map.values()) # Verify _ordered_job_map_values is giving us values in same order as job_list self.assertEqual(job_manager.job_list, vals)
def test_repr(self): with TestAreaContext("jobman_repr"): self.assert_clean_slate() create_jobs_json([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() self.assertIn('len=1', repr(jobm)) self.assertTrue(repr(jobm).startswith('JobManager('))
def test_data_from_forward_model_json(self): with TestAreaContext("json_from_forward_model"): with open("jobs.json", "w") as f: f.write(JSON_STRING) jobm = JobManager() self.assertEquals("PERLIN", jobm[0]["name"])
def test_logged_fields(self): with TestAreaContext("jobman_repr"): self.assert_clean_slate() create_jobs_json([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() self.assertIn('kernel_version', jobm.information) self.assertIn('res_version', jobm.information) self.assertIn('ecl_version', jobm.information)
def test_post_error(self): with TestAreaContext(gen_area_name("test_post_error", create_jobs_json)): create_jobs_json([{'name' : 'COPY_FILE', 'executable' : 'XYZ'}]) jobm = JobManager() job = {"name" : "TESTING", "executable" : "/bin/testing/path", "argList" : "arg1 arg2 arg3", "stderr" : "stderr.txt", "stdout" : "stdout.txt" } with open("stderr.txt","w") as f: f.write("stderr: %s\n" % datetime.datetime.now()) with open("stdout.txt","w") as f: f.write("stdout: %s\n" % datetime.datetime.now()) jobm.postError(job, "TESTING: Error message")
def test_missing_joblist_json(self): with TestAreaContext("missing_joblist_json"): self.assert_clean_slate() with open("jobs.json", "w") as f: f.write(json.dumps({"umask": "0000"})) with self.assertRaises(IOError): jobm = JobManager()
def test_invalid_jobs_json(self): with TestAreaContext("invalid_jobs_json"): self.assert_clean_slate() # Syntax error with open("jobs.json", "w") as f: f.write("Hello - this is not valid JSON ...") with self.assertRaises(IOError): jobm = JobManager()
def checkFileServerBlackList(job_manager): fs_info, usage = JobManager.fsInfo() file_server = fs_info[0] if file_server in FILE_SERVER_BLACKLIST: msg = """************************************************************************ You are now running a forward model simulation in a runpath directory: which is served from the file server: %s This file server is not suitable for large scale FMU usage. Please use a different RUNPATH setting in your ert configuration file. Please contact Ketil Nummedal if you do not understand how to proceed. ************************************************************************ """ % job_manager.file_server payload = { "user": job_manager.user, "ert_job": "FILE_SERVER_CHECK", "executable": "/bin/???", "arg_list": "--", "error_msg": "Simulation started on blacklisted file_server:%s" % job_manager.file_server, "cwd": os.getcwd(), "file_server": job_manager.isilon_node, "node": job_manager.node, "fs_use": "%s / %s / %s" % job_manager.fs_use, "stderr": "???", "stdout": "???" } print json.dumps(payload) try: stat = requests.post(ERROR_URL, headers={"Content-Type": "application/json"}, data=json.dumps(payload)) print stat.text except: pass with open("WARNING-ILLEGAL-FILESERVER.txt", "w") as f: f.write(msg) if block_illegal_fileserver: illegal_fileserver_exit(msg, job_manager.user)
def test_aaaa(self): with TestAreaContext("evil_by_name"): jobs_file = os.path.join(os.getcwd(), "jobs.json") with open(jobs_file, "w") as f: f.write(json.dumps({"A": 1})) with self.assertRaises(IOError): jobm = JobManager()
def test_missing_umask_json(self): with TestAreaContext("test_missing_umask_json"): print os.getcwd() self.assert_clean_slate() with open("jobs.json", "w") as f: f.write(json.dumps({"jobList": "[]"})) with self.assertRaises(IOError): jobm = JobManager()
def test_run_job(self): with TestAreaContext(gen_area_name("run_job_fail", create_jobs_json)): with open("run.sh", "w") as f: f.write("#!/bin/sh\n") f.write("exit 1\n") st = os.stat("run.sh") os.chmod("run.sh", st.st_mode | stat.S_IEXEC) executable = os.path.join(os.getcwd(), "run.sh") joblist = [{"name" : "TEST_JOB", "executable" : executable, "argList" : ["A","B"]}] create_jobs_json(joblist) jobm = JobManager() self.assertTrue(os.path.isfile(executable)) exit_status, msg = jobm.runJob(jobm[0]) self.assertEqual(exit_status, 1)
def test_run_multiple_fail(self): with TestAreaContext(gen_area_name("exit", create_jobs_json)): joblist = [] for index in range(1, 6): job = {"name" : "exit", "executable" : "/bin/bash", "stdout" : "exit_out", "stderr": "exit_err", # produces something on stderr, and exits with exit_code=index "argList": ["-c", "echo \"failed with %s\" 1>&2 ; exit %s" % (index, index)]} joblist.append(job) create_jobs_json(joblist) jobm = JobManager() for (index, job) in enumerate(jobm): exit_status, msg = jobm.runJob(job) self.assertEqual(exit_status, index + 1) self.assertTrue(os.path.getsize( "exit_err.%d" % index) > 0)
def _test_statusfile(self, create_jobs): with TestAreaContext(gen_area_name("status_test", create_jobs)): with open(JobManager.STATUS_file, "w") as f: pass with open(JobManager.OK_file, "w") as f: pass with open(JobManager.EXIT_file, "w") as f: pass create_jobs([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() for f in [JobManager.EXIT_file, JobManager.OK_file]: self.assertTrue(not os.path.exists(f)) self.assertTrue(os.path.exists(jobm.STATUS_file)) jobm.sleep_time = 0 jobm.createOKFile() self.assertTrue(os.path.exists(jobm.OK_file))
def test_run_output_rename(self): with TestAreaContext(gen_area_name("output_rename", create_jobs_json)): job = {"name" : "TEST_JOB", "executable" : "/bin/mkdir", "stdout" : "out", "stderr" : "err"} joblist = [ job,job, job, job, job ] create_jobs_json(joblist) jobm = JobManager() for (index,job) in enumerate(jobm): self.assertEqual(job["stderr"], "err.%d" % index) self.assertEqual(job["stdout"], "out.%d" % index)
def _test_run_multiple_fail(self, create_jobs): with TestAreaContext(gen_area_name("mkdir", create_jobs)): joblist = [] dir_list = ["1", "2", "3", "4", "5"] for d in dir_list: job = { "name": "MKDIR", "executable": "/bin/mkdir", "stdout": "mkdir_out", "stderr": "mkdir_err", "argList": ["-p", "-v", "read-only/%s" % d] } joblist.append(job) create_jobs(joblist) jobm = JobManager() os.mkdir("read-only") os.chmod("read-only", stat.S_IRUSR + stat.S_IXUSR) for (index, job) in enumerate(jobm): exit_status, msg = jobm.runJob(job) self.assertEqual(exit_status, 1) self.assertTrue(os.path.getsize("mkdir_err.%d" % index) > 0)
def test_indexing_json(self): with TestAreaContext("indexing_json"): self.assert_clean_slate() create_jobs_json([{'name': 'COPY_FILE', 'executable': 'XYZ'}]) jobm = JobManager() self.assertEqual(len(jobm), 1) job0 = jobm[0] with self.assertRaises(IndexError): _ = jobm[1] job0 = jobm["COPY_FILE"] with self.assertRaises(KeyError): _ = jobm["NO-SUCH-JOB"] self.assertTrue("COPY_FILE" in jobm) self.assertFalse("COPY_FILEX" in jobm)
def test_mountpoint(self): with TestAreaContext("mount_test"): os.makedirs("path/to/test/dir") with self.assertRaises(ValueError): JobManager.fsInfo("path/to/test/dir") with self.assertRaises(ValueError): JobManager.fsInfo("/path/does/not/exist") with self.assertRaises(ValueError): JobManager.fsInfo("/scratch") (server, ip), fs_use = JobManager.fsInfo("/prog/ecl") self.assert_ip_address(ip) self.assertEqual(len(fs_use), 3) for t in fs_use: self.assertTrue(t[0].isdigit())
def test_json_over_py(self): with TestAreaContext("json_wins"): jobListPy = [{ "name": "PYTHON_JOB", "executable": "/bin/mkdir", "stdout": "out", "stderr": "err" }] jobListJson = [{ "name": "JSON_JOB", "executable": "/bin/mkdir", "stdout": "out", "stderr": "err" }] create_jobs_py(jobListPy) create_jobs_json(jobListJson) jobm = JobManager() self.assertEquals(jobListJson[0]["name"], jobm[0]["name"])
def main(argv): if len(sys.argv) >= 2: run_path = sys.argv[1] if not os.path.exists( run_path ): sys.stderr.write("*****************************************************************\n") sys.stderr.write("** FATAL Error: Could not find directory: %s \n" % run_path) sys.stderr.write("** CWD: %s\n" % os.getcwd()) sys.stderr.write("*****************************************************************\n") sys.exit(-1) os.chdir( run_path ) ################################################################# # 1. Modify the sys.path variable to include the runpath # 2. Import the jobs module. ################################################################# random.seed() check_version() max_runtime = 0 job_manager = JobManager(error_url=LOG_URL, log_url=LOG_URL) checkFileServerBlackList(job_manager) if len(sys.argv) <= 2: # Normal batch run. # Set this to true to ensure that empty job lists come out successfully. OK = True for job in job_manager: job_manager.startStatus( job ) (OK , exit_status, error_msg) = run_one( job_manager , job) job_manager.completeStatus(exit_status, error_msg, job=job) if not OK: job_manager.exit( job, exit_status , error_msg ) if OK: job_manager.createOKFile( ) else: #Interactive run for job_name in sys.argv[2:]: # This is totally unpredictable if there more jobs with # the same name. if job_name in job_manager: job = job_manager[job_name] print "Running job: %s ... " % job_name, sys.stdout.flush() (OK , exit_status, error_msg) = run_one( job_manager, job ) if OK: print "OK" else: print "failed ...." print "-----------------------------------------------------------------" if job.get("stderr"): print "Error:%s " % error_msg if os.path.exists(job["stderr"]): fileH = open(job["stderr"],"r") for line in fileH.readlines(): print line, fileH.close() print "-----------------------------------------------------------------" sys.exit() else: print "Job: %s does not exist. Available jobs:" % job_name for j in jobs.jobList: print " %s" % j["name"]
def test_no_jobs_json(self): with TestAreaContext("no_jobs_json") as tac: self.assert_clean_slate() with self.assertRaises(IOError): jobm = JobManager(module_file="Does/not/exist", json_file="Neither/does/this/one")