def add_initial_tasks(sobject, pipeline_code=None, processes=[], contexts=[], skip_duplicate=True, mode='standard',start_offset=0): '''add initial tasks based on the pipeline of the sobject''' from pipeline import Pipeline def _get_context(existing_task_dict, process_name, context=None): existed = False if not existing_task_dict: if context: context = context else: context = process_name else: compare_key = "%s:%s" %(process_name, context) max_num = 0 for item in existing_task_dict.keys(): item_stripped = re.sub('/\d+$', '', item) #if item.startswith(compare_key): if item_stripped == compare_key: existing_context = item.replace('%s:'%process_name,'') suffix = existing_context.split('/')[-1] try: num = int(suffix) except: num = 0 if num > max_num: max_num = num existed = True if existed: context = "%s/%0.3d" % (context, max_num+1) return context # get pipeline if not pipeline_code: pipeline_code = sobject.get_value("pipeline_code") if pipeline_code in ['', '__default__']: pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "__default__") pipeline.set_value("pipeline", ''' <pipeline> <process name='publish'/> </pipeline> ''') # FIXME: HACK to initialize virtual pipeline pipeline.set_pipeline(pipeline.get_value("pipeline")) else: pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: print "WARNING: pipeline '%s' does not exist" % pipeline_code return [] #TODO: add recursive property here if processes: process_names = processes else: process_names = pipeline.get_process_names(recurse=True) # remember which ones already exist existing_tasks = Task.get_by_sobject(sobject, order=False) existing_task_dict = {} for x in existing_tasks: key1 = '%s:%s' %(x.get_value('process'),x.get_value("context")) existing_task_dict[key1] = True # for backward compatibility, if the process has been created, we will skip later below # we may remove this in the future #key2 = '%s' %(x.get_value('process')) #existing_task_dict[key2] = True # create all of the tasks description = "" tasks = [] start_date = Date() start_date.add_days(start_offset) bid_duration_unit = ProdSetting.get_value_by_key("bid_duration_unit") if not bid_duration_unit: bid_duration_unit = 'hour' # that's the date range in 5 days (not hours) default_duration = 5 default_bid_duration = 8 if bid_duration_unit == 'minute': default_bid_duration = 60 last_task = None # this is the explicit mode for creating task for a specific process:context combo if mode=='context': for context_combo in contexts: process_name, context = context_combo.split(':') # depend_id is None since these are arbitrary tasks depend_id = None # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue process_obj = pipeline.get_process(process_name) if not process_obj: continue context=_get_context(existing_task_dict,process_name, context) pipe_code = process_obj.get_task_pipeline() attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() # Create the task last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks for process_name in process_names: if last_task: depend_id = last_task.get_id() else: depend_id = None process_obj = pipeline.get_process(process_name) if not process_obj: continue attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() if duration >= 1: # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) # output contexts could be duplicated from 2 different outout processes if mode == 'simple process': output_contexts = [process_name] else: output_contexts = pipeline.get_output_contexts(process_obj.get_name(), show_process=False) pipe_code = process_obj.get_task_pipeline() start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() for context in output_contexts: # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue if contexts and context not in contexts: continue context = _get_context(existing_task_dict, process_name, context) last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks
def add_initial_tasks(sobject, pipeline_code=None, processes=[], contexts=[], skip_duplicate=True, mode='standard',start_offset=0): '''add initial tasks based on the pipeline of the sobject''' from pipeline import Pipeline def _get_context(existing_task_dict, process_name, context=None): existed = False if not existing_task_dict: if context: context = context else: context = process_name else: compare_key = "%s:%s" %(process_name, context) max_num = 0 for item in existing_task_dict.keys(): item_stripped = re.sub('/\d+$', '', item) #if item.startswith(compare_key): if item_stripped == compare_key: existing_context = item.replace('%s:'%process_name,'') suffix = existing_context.split('/')[-1] try: num = int(suffix) except: num = 0 if num > max_num: max_num = num existed = True if existed: context = "%s/%0.3d" % (context, max_num+1) return context # get pipeline if not pipeline_code: pipeline_code = sobject.get_value("pipeline_code") if pipeline_code in ['', '__default__']: pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "__default__") pipeline.set_value("pipeline", ''' <pipeline> <process name='publish'/> </pipeline> ''') # FIXME: HACK to initialize virtual pipeline pipeline.set_pipeline(pipeline.get_value("pipeline")) else: pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: print "WARNING: pipeline '%s' does not exist" % pipeline_code return [] #TODO: add recursive property here if processes: process_names = processes else: process_names = pipeline.get_process_names(recurse=True, type=["node","approval"]) # remember which ones already exist existing_tasks = Task.get_by_sobject(sobject, order=False) existing_task_dict = {} for x in existing_tasks: key1 = '%s:%s' %(x.get_value('process'),x.get_value("context")) existing_task_dict[key1] = True # for backward compatibility, if the process has been created, we will skip later below # we may remove this in the future #key2 = '%s' %(x.get_value('process')) #existing_task_dict[key2] = True # create all of the tasks description = "" tasks = [] start_date = Date() start_date.add_days(start_offset) bid_duration_unit = ProdSetting.get_value_by_key("bid_duration_unit") if not bid_duration_unit: bid_duration_unit = 'hour' # that's the date range in 5 days (not hours) default_duration = 5 default_bid_duration = 8 if bid_duration_unit == 'minute': default_bid_duration = 60 last_task = None # this is the explicit mode for creating task for a specific process:context combo if mode=='context': for context_combo in contexts: process_name, context = context_combo.split(':') # depend_id is None since these are arbitrary tasks depend_id = None # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue process_obj = pipeline.get_process(process_name) if not process_obj: continue context=_get_context(existing_task_dict,process_name, context) pipe_code = process_obj.get_task_pipeline() attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() # Create the task last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks for process_name in process_names: if last_task: depend_id = last_task.get_id() else: depend_id = None process_obj = pipeline.get_process(process_name) if not process_obj: continue attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() if duration >= 1: # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) # output contexts could be duplicated from 2 different outout processes if mode == 'simple process': output_contexts = [process_name] else: output_contexts = pipeline.get_output_contexts(process_obj.get_name(), show_process=False) pipe_code = process_obj.get_task_pipeline() start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() for context in output_contexts: # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue if contexts and context not in contexts: continue context = _get_context(existing_task_dict, process_name, context) last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks