def process(self, item): job = item.jobObj ## It is very likely that the job's downloading task has been created ## and assigned in a previous monitoring loop ignore such kind of cases if job.status in ['completing', 'completed', 'failed']: return True job.updateStatus('completing') try: logger.info("poststaging output sandbox files") logger.info(" * %s", saga.url(job.backend.saga_job_out).url) stdout = saga.filesystem.file(saga.url(job.backend.saga_job_out)); stdout.copy("file://localhost/"+job.getOutputWorkspace().getPath()+"stdout"); logger.info(" * %s", saga.url(job.backend.saga_job_err).url) stderr = saga.filesystem.file(saga.url(job.backend.saga_job_err)); stderr.copy("file://localhost/"+job.getOutputWorkspace().getPath()+"stderr"); if len(job.outputsandbox) > 0 : output_sandbox = saga.url(job.backend.filesystem_url+"/"+job.backend.workdir_uuid+"/_output_sandbox.tgz") logger.info(" * %s", output_sandbox.url) osb = saga.filesystem.file(output_sandbox) osb.copy("file://localhost/"+job.getOutputWorkspace().getPath()) ## Unpack the output sandbox and delete the archive osbpath = job.getOutputWorkspace().getPath()+"_output_sandbox.tgz" if os.path.exists(osbpath): if os.system("tar -C %s -xzf %s"%(job.getOutputWorkspace().getPath(),job.getOutputWorkspace().getPath()+"/_output_sandbox.tgz")) != 0: job.updateStatus('failed') raise Exception('cannot upack output sandbox') os.remove(osbpath) job.updateStatus('completed') except saga.exception as e: logger.error('exception caught while poststaging: %s', e.get_full_message()) job.updateStatus('failed') return True
def setupworkdir(self, path): import shutil, errno job = self.getJobObject() # As a side effect, these functions create the # workspace directories if they don't exist input_wd_path = job.getInputWorkspace().getPath() output_wd_path = job.getOutputWorkspace().getPath() logger.debug("local workspace - input dir: %s", input_wd_path) logger.debug("local workspace - output dir: %s", output_wd_path) return True
def setupworkdir(self, path): import shutil, errno job = self.getJobObject() # As a side effect, these functions create the # workspace directories if they don't exist input_wd_path = job.getInputWorkspace().getPath() output_wd_path = job.getOutputWorkspace().getPath() logger.debug('local workspace - input dir: %s', input_wd_path) logger.debug('local workspace - output dir: %s', output_wd_path) return True
def process(self, item): job = item.jobObj ## It is very likely that the job's downloading task has been created ## and assigned in a previous monitoring loop ignore such kind of cases if job.status in ['completing', 'completed', 'failed']: return True job.updateStatus('completing') try: logger.info("poststaging output sandbox files") logger.info(" * %s", saga.url(job.backend.saga_job_out).url) stdout = saga.filesystem.file(saga.url(job.backend.saga_job_out)) stdout.copy("file://localhost/" + job.getOutputWorkspace().getPath() + "stdout") logger.info(" * %s", saga.url(job.backend.saga_job_err).url) stderr = saga.filesystem.file(saga.url(job.backend.saga_job_err)) stderr.copy("file://localhost/" + job.getOutputWorkspace().getPath() + "stderr") if len(job.outputsandbox) > 0: output_sandbox = saga.url(job.backend.filesystem_url + "/" + job.backend.workdir_uuid + "/_output_sandbox.tgz") logger.info(" * %s", output_sandbox.url) osb = saga.filesystem.file(output_sandbox) osb.copy("file://localhost/" + job.getOutputWorkspace().getPath()) ## Unpack the output sandbox and delete the archive osbpath = job.getOutputWorkspace().getPath( ) + "_output_sandbox.tgz" if os.path.exists(osbpath): if os.system("tar -C %s -xzf %s" % (job.getOutputWorkspace().getPath(), job.getOutputWorkspace().getPath() + "/_output_sandbox.tgz")) != 0: job.updateStatus('failed') raise Exception('cannot upack output sandbox') os.remove(osbpath) job.updateStatus('completed') except saga.exception as e: logger.error('exception caught while poststaging: %s', e.get_full_message()) job.updateStatus('failed') return True