def convert_old_workflow_to_new_workflow(data): if ('request_metadata' in data and data['request_metadata'] and 'workflow' in data['request_metadata']): workflow = data['request_metadata']['workflow'] if workflow: for work_key in workflow.works_template: work = workflow.works_template[work_key] for coll_key in work.collections: coll = work.collections[coll_key] if type(coll) in [Collection]: pass else: coll_metadata = copy.copy(coll) del coll_metadata['scope'] del coll_metadata['name'] new_coll = Collection(scope=coll['scope'], name=coll['name'], coll_metadata=coll_metadata) new_coll.internal_id = coll_key work.collections[coll_key] = new_coll for proc_key in work.processings: proc = work.processings[proc_key] if type(proc) in [Processing]: pass else: proc_metadata = proc['processing_metadata'] new_proc = Processing(processing_metadata=proc_metadata) new_proc.internal_id = proc_key if 'rule_id' in proc_metadata: new_proc.external_id = proc_metadata['rule_id'] work.processings[proc_key] = new_proc return data
def create_processing(self, input_output_maps=[]): """ *** Function called by Transformer agent. :param input_output_maps: new maps from inputs to outputs. """ processing_metadata = {'panda_task_id': self.panda_task_id} proc = Processing(processing_metadata=processing_metadata) proc.workload_id = self.panda_task_id self.add_processing_to_processings(proc) self.active_processings.append(proc.internal_id) return proc
def create_processing(self, input_output_maps=[]): processing_metadata = {'src_rse': self.src_rse, 'dest_rse': self.dest_rse, 'life_time': self.life_time, 'rule_id': self.rule_id} proc = Processing(processing_metadata=processing_metadata) proc.external_id = self.rule_id if self.rule_id: proc.submitted_at = datetime.datetime.utcnow() self.add_processing_to_processings(proc) self.active_processings.append(proc.internal_id) return proc
def create_processing(self, input_output_maps=[]): """ *** Function called by Transformer agent. :param input_output_maps: new maps from inputs to outputs. """ # avoid duplicated task name self.task_name = self.task_name + "_" + str(self.get_work_id()) in_files = [] for job in self.dependency_map: in_files.append(job['name']) task_param_map = {} task_param_map['vo'] = 'wlcg' if self.queue and len(self.queue) > 0: task_param_map['site'] = self.queue task_param_map['workingGroup'] = 'lsst' task_param_map['nFilesPerJob'] = 1 task_param_map['nFiles'] = len(in_files) task_param_map['noInput'] = True task_param_map['pfnList'] = in_files task_param_map['taskName'] = self.task_name task_param_map['userName'] = '******' task_param_map['taskPriority'] = 900 task_param_map['architecture'] = '' task_param_map['transUses'] = '' task_param_map['transHome'] = None if self.encode_command_line: task_param_map[ 'transPath'] = 'https://atlpan.web.cern.ch/atlpan/bash-c-enc' task_param_map['encJobParams'] = True else: task_param_map[ 'transPath'] = 'https://atlpan.web.cern.ch/atlpan/bash-c' task_param_map['processingType'] = self.processingType task_param_map['prodSourceLabel'] = self.prodSourceLabel task_param_map['taskType'] = self.task_type task_param_map['coreCount'] = self.core_count task_param_map['skipScout'] = True task_param_map['cloud'] = self.task_cloud if self.task_rss and self.task_rss > 0: task_param_map['ramCount'] = self.task_rss task_param_map['ramUnit'] = 'MB' task_param_map['inputPreStaging'] = True task_param_map['prestagingRuleID'] = 123 task_param_map['nChunksToWait'] = 1 task_param_map['maxCpuCount'] = self.maxWalltime task_param_map['maxWalltime'] = self.maxWalltime task_param_map['maxFailure'] = self.maxAttempt task_param_map['maxAttempt'] = self.maxAttempt task_param_map['log'] = self.task_log task_param_map['jobParameters'] = [ { 'type': 'constant', 'value': self.executable, # noqa: E501 }, ] processing_metadata = {'task_param': task_param_map} proc = Processing(processing_metadata=processing_metadata) proc.workload_id = None self.add_processing_to_processings(proc) self.active_processings.append(proc.internal_id) return proc
def create_processing(self, input_output_maps): processing_metadata = {} proc = Processing(processing_metadata=processing_metadata) self.add_processing_to_processings(proc) self.active_processings.append(proc.internal_id) return proc
def create_processing(self, input_output_maps=[]): processing_metadata = {'points_to_generate': self.points_to_generate} proc = Processing(processing_metadata=processing_metadata) self.add_processing_to_processings(proc) self.active_processings.append(proc.internal_id) return proc