def _iterate_tasks(self): """launch and monitor task executions""" job_signal = False # starting jobs for t in self.workflow.tasks: input_entries = [entry for entry in self.data if (entry['type']=='task_input' and entry['task'].id==t.id)] # if data available and not already running if (len(input_entries)==len([entry for entry in input_entries if entry.has_key('value') or entry.has_key('srcFileName')])): job_entry = [job_entry for job_entry in self.sub_jobs if job_entry['task'].id==t.id][0] if job_entry.has_key('job_id'): job_id = job_entry['job_id'] if not(job_entry['job_status'].isEnded()): j = Mobyle.JobFacade.JobFacade.getFromJobId(job_id) su_signal = self._process_subjob_status_update(j, job_entry, t) job_signal = job_signal or su_signal else: # if job is not running, start it log.debug('starting job for task %s' % t.id) log.debug('registry.getProgramUrl(t.service = %s,t.server= %s)' %(t.service , t.server) ) if t.server is None: t.server = 'local' job_parameters = {} try: url = registry.getProgramUrl(t.service,t.server) j = Mobyle.JobFacade.JobFacade.getFromService(programUrl=url, workflowId=self.id) job_parameters['programName'] = url except: url = registry.getWorkflowUrl(t.service,t.server) j = Mobyle.JobFacade.JobFacade.getFromService(workflowUrl=url, workflowId=self.id) job_parameters['workflowUrl'] = url job_signal = True for i_e in input_entries: if i_e.has_key('value'): job_parameters[i_e['parameter_id']]=i_e['value'] else: job_parameters[i_e['parameter_id']+'.src']=i_e['src'] job_parameters[i_e['parameter_id']+'.srcFileName']=i_e['srcFileName'] job_parameters[i_e['parameter_id']+'.mode']='result' job_parameters[i_e['parameter_id']+'.name']=i_e['parameter_id']+'.data' job_parameters['email'] = self.email try: resp = j.create(request_dict=job_parameters) except Exception, e: raise WorkflowJobError("error during submission of task %s(%s)" \ %(t.id, t.description)) job_entry['job_id'] = resp['id'] if resp.has_key('errorparam') or resp.has_key('errormsg'): raise WorkflowJobError("error during submission of task %s(%s).\n job %s message: %s: %s." \ %(t.id, t.description,job_entry['job_id'],resp.get('errorparam'),resp.get('errormsg'))) self.jobState.setTaskJob(t,job_entry['job_id']) self.jobState.commit() su_signal = self._process_subjob_status_update(j, job_entry, t) job_signal = job_signal or su_signal log.debug('job for task %s: %s' % (t.id, job_entry['job_id']))
def dump(self, type, dir, registry=registry): output = open(os.path.join(dir, type + self.indexClass.indexFileName), "wb") idx = {} for s in getattr(registry, type + "s"): try: doc = etree.parse(s.path) doc.xinclude() if isinstance(s, ProgramDef): idx[registry.getProgramUrl(s.name, s.server.name)] = self.indexClass.getIndexEntry(doc, s) elif isinstance(s, WorkflowDef): idx[registry.getWorkflowUrl(s.name, s.server.name)] = self.indexClass.getIndexEntry(doc, s) elif isinstance(s, ViewerDef): idx[registry.getViewerUrl(s.name)] = self.indexClass.getIndexEntry(doc, s) except: i_log.error( "Error while generating %s entry for %s/%s" % (self.indexClass.__name__, s.server.name, s.name), exc_info=True, ) i_log.error(s.name) i_log.error(s.path) i_log.error(s.url) pickle.dump(idx, output, 2) output.close()