def test_serialize_object_file_exists(self): filename = "temp.bin" obj = [1, 2, 3] with self.assertRaises(OSError): Serialization.serialize_object(obj, filename) Serialization.serialize_object(obj, filename) self.assertTrue(os.path.exists(filename)) os.remove(filename)
def test_serialize_object_file_not_exists(self): filename = "temp.bin" obj = [1, 2, 3] try: os.remove(filename) except OSError: pass Serialization.serialize_object(obj, filename) self.assertTrue(os.path.exists(filename))
def submit_wrapped_pbs_job(self, wrapped_job, job_name): job_folder = self.get_job_foldername(job_name) # try to create folder if not yet exists job_filename = self.get_job_filename(job_name) logger.info("Creating job with file %s" % job_filename) try: makedirs(job_folder) except OSError: pass Serialization.serialize_object(wrapped_job, job_filename) # allow the queue to process things time.sleep(self.submission_delay) dispatcher_string = self._get_dispatcher_string(job_filename) # get computing ressource constraints from job walltime, memory, nodes = wrapped_job.get_walltime_mem_nodes() job_string = self.create_batch_script(job_name, dispatcher_string, walltime, memory, nodes) # put the custom parameter string in front if existing # but not as first line to avoid problems with #/bin/bash things if self.batch_parameters.parameter_prefix != "": lines = job_string.split(os.linesep) job_string = os.linesep.join( [lines[0], self.batch_parameters.parameter_prefix] + lines[1:]) f = open( job_folder + os.sep + BatchClusterComputationEngine.batch_script_filename, "w") f.write(job_string) f.close() job_id = self.submit_to_batch_system(job_string) if job_id == "": raise RuntimeError( "Could not parse job_id. Something went wrong with the job submission" ) f = open( job_folder + os.sep + BatchClusterComputationEngine.job_id_filename, 'w') f.write(job_id + os.linesep) f.close() if not isinstance(wrapped_job, FireAndForgetJob): # track submitted (and unfinished) jobs and their start time self._insert_job_time_sorted(job_name, job_id)
def submit_wrapped_pbs_job(self, wrapped_job, job_name): job_folder = self.get_job_foldername(job_name) # try to create folder if not yet exists job_filename = self.get_job_filename(job_name) logger.info("Creating job with file %s" % job_filename) try: makedirs(job_folder) except OSError: pass Serialization.serialize_object(wrapped_job, job_filename) # allow the queue to process things time.sleep(self.submission_delay) dispatcher_string = self._get_dispatcher_string(job_filename) # get computing ressource constraints from job walltime, memory, nodes = wrapped_job.get_walltime_mem_nodes() job_string = self.create_batch_script(job_name, dispatcher_string, walltime, memory, nodes) # put the custom parameter string in front if existing # but not as first line to avoid problems with #/bin/bash things if self.batch_parameters.parameter_prefix != "": lines = job_string.split(os.linesep) job_string = os.linesep.join([lines[0], self.batch_parameters.parameter_prefix] + lines[1:]) f = open(job_folder + os.sep + BatchClusterComputationEngine.batch_script_filename, "w") f.write(job_string) f.close() job_id = self.submit_to_batch_system(job_string) if job_id == "": raise RuntimeError("Could not parse job_id. Something went wrong with the job submission") f = open(job_folder + os.sep + BatchClusterComputationEngine.job_id_filename, 'w') f.write(job_id + os.linesep) f.close() if not isinstance(wrapped_job, FireAndForgetJob): # track submitted (and unfinished) jobs and their start time self._insert_job_time_sorted(job_name, job_id)