def add_initial_tasks(sobject, pipeline_code=None, processes=[], contexts=[], skip_duplicate=True, mode='standard',start_offset=0): '''add initial tasks based on the pipeline of the sobject''' from pipeline import Pipeline def _get_context(existing_task_dict, process_name, context=None): existed = False if not existing_task_dict: if context: context = context else: context = process_name else: compare_key = "%s:%s" %(process_name, context) max_num = 0 for item in existing_task_dict.keys(): item_stripped = re.sub('/\d+$', '', item) #if item.startswith(compare_key): if item_stripped == compare_key: existing_context = item.replace('%s:'%process_name,'') suffix = existing_context.split('/')[-1] try: num = int(suffix) except: num = 0 if num > max_num: max_num = num existed = True if existed: context = "%s/%0.3d" % (context, max_num+1) return context # get pipeline if not pipeline_code: pipeline_code = sobject.get_value("pipeline_code") if pipeline_code in ['', '__default__']: pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "__default__") pipeline.set_value("pipeline", ''' <pipeline> <process name='publish'/> </pipeline> ''') # FIXME: HACK to initialize virtual pipeline pipeline.set_pipeline(pipeline.get_value("pipeline")) else: pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: print "WARNING: pipeline '%s' does not exist" % pipeline_code return [] #TODO: add recursive property here if processes: process_names = processes else: process_names = pipeline.get_process_names(recurse=True) # remember which ones already exist existing_tasks = Task.get_by_sobject(sobject, order=False) existing_task_dict = {} for x in existing_tasks: key1 = '%s:%s' %(x.get_value('process'),x.get_value("context")) existing_task_dict[key1] = True # for backward compatibility, if the process has been created, we will skip later below # we may remove this in the future #key2 = '%s' %(x.get_value('process')) #existing_task_dict[key2] = True # create all of the tasks description = "" tasks = [] start_date = Date() start_date.add_days(start_offset) bid_duration_unit = ProdSetting.get_value_by_key("bid_duration_unit") if not bid_duration_unit: bid_duration_unit = 'hour' # that's the date range in 5 days (not hours) default_duration = 5 default_bid_duration = 8 if bid_duration_unit == 'minute': default_bid_duration = 60 last_task = None # this is the explicit mode for creating task for a specific process:context combo if mode=='context': for context_combo in contexts: process_name, context = context_combo.split(':') # depend_id is None since these are arbitrary tasks depend_id = None # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue process_obj = pipeline.get_process(process_name) if not process_obj: continue context=_get_context(existing_task_dict,process_name, context) pipe_code = process_obj.get_task_pipeline() attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() # Create the task last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks for process_name in process_names: if last_task: depend_id = last_task.get_id() else: depend_id = None process_obj = pipeline.get_process(process_name) if not process_obj: continue attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() if duration >= 1: # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) # output contexts could be duplicated from 2 different outout processes if mode == 'simple process': output_contexts = [process_name] else: output_contexts = pipeline.get_output_contexts(process_obj.get_name(), show_process=False) pipe_code = process_obj.get_task_pipeline() start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() for context in output_contexts: # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue if contexts and context not in contexts: continue context = _get_context(existing_task_dict, process_name, context) last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values() ) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids ) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values() ) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add( HtmlElement.br() ) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name,value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values()) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value( True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values()) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add(HtmlElement.br()) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name, value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def add_initial_tasks(sobject, pipeline_code=None, processes=[], contexts=[], skip_duplicate=True, mode='standard',start_offset=0): '''add initial tasks based on the pipeline of the sobject''' from pipeline import Pipeline def _get_context(existing_task_dict, process_name, context=None): existed = False if not existing_task_dict: if context: context = context else: context = process_name else: compare_key = "%s:%s" %(process_name, context) max_num = 0 for item in existing_task_dict.keys(): item_stripped = re.sub('/\d+$', '', item) #if item.startswith(compare_key): if item_stripped == compare_key: existing_context = item.replace('%s:'%process_name,'') suffix = existing_context.split('/')[-1] try: num = int(suffix) except: num = 0 if num > max_num: max_num = num existed = True if existed: context = "%s/%0.3d" % (context, max_num+1) return context # get pipeline if not pipeline_code: pipeline_code = sobject.get_value("pipeline_code") if pipeline_code in ['', '__default__']: pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "__default__") pipeline.set_value("pipeline", ''' <pipeline> <process name='publish'/> </pipeline> ''') # FIXME: HACK to initialize virtual pipeline pipeline.set_pipeline(pipeline.get_value("pipeline")) else: pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: print "WARNING: pipeline '%s' does not exist" % pipeline_code return [] #TODO: add recursive property here if processes: process_names = processes else: process_names = pipeline.get_process_names(recurse=True, type=["node","approval"]) # remember which ones already exist existing_tasks = Task.get_by_sobject(sobject, order=False) existing_task_dict = {} for x in existing_tasks: key1 = '%s:%s' %(x.get_value('process'),x.get_value("context")) existing_task_dict[key1] = True # for backward compatibility, if the process has been created, we will skip later below # we may remove this in the future #key2 = '%s' %(x.get_value('process')) #existing_task_dict[key2] = True # create all of the tasks description = "" tasks = [] start_date = Date() start_date.add_days(start_offset) bid_duration_unit = ProdSetting.get_value_by_key("bid_duration_unit") if not bid_duration_unit: bid_duration_unit = 'hour' # that's the date range in 5 days (not hours) default_duration = 5 default_bid_duration = 8 if bid_duration_unit == 'minute': default_bid_duration = 60 last_task = None # this is the explicit mode for creating task for a specific process:context combo if mode=='context': for context_combo in contexts: process_name, context = context_combo.split(':') # depend_id is None since these are arbitrary tasks depend_id = None # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue process_obj = pipeline.get_process(process_name) if not process_obj: continue context=_get_context(existing_task_dict,process_name, context) pipe_code = process_obj.get_task_pipeline() attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() # Create the task last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks for process_name in process_names: if last_task: depend_id = last_task.get_id() else: depend_id = None process_obj = pipeline.get_process(process_name) if not process_obj: continue attrs = process_obj.get_attributes() duration = attrs.get("duration") if duration: duration = int(duration) else: duration = default_duration bid_duration = attrs.get("bid_duration") if not bid_duration: bid_duration = default_bid_duration else: bid_duration = int(bid_duration) end_date = start_date.copy() if duration >= 1: # for a task to be x days long, we need duration x-1. end_date.add_days(duration-1) # output contexts could be duplicated from 2 different outout processes if mode == 'simple process': output_contexts = [process_name] else: output_contexts = pipeline.get_output_contexts(process_obj.get_name(), show_process=False) pipe_code = process_obj.get_task_pipeline() start_date_str = start_date.get_db_date() end_date_str = end_date.get_db_date() for context in output_contexts: # first check if it already exists when skip_duplicate is True key1 = '%s:%s' %(process_name, context) task_existed = False for item in existing_task_dict: if item.startswith(key1): task_existed = True break if skip_duplicate and task_existed: continue if contexts and context not in contexts: continue context = _get_context(existing_task_dict, process_name, context) last_task = Task.create(sobject, process_name, description, depend_id=depend_id, pipeline_code=pipe_code, start_date=start_date_str, end_date=end_date_str, context=context, bid_duration=bid_duration) # this avoids duplicated tasks for process connecting to multiple processes new_key = '%s:%s' %(last_task.get_value('process'), last_task.get_value("context") ) existing_task_dict[new_key] = True # for backward compatibility, if the process has been created, we will skip later below tasks.append(last_task) start_date = end_date.copy() # start the day after start_date.add_days(1) return tasks