def postprocess(my): value = my.get_value() if not value: return from pyasm.biz import Task Task.add_initial_tasks(my.sobject)
def _test_parent_search(self): from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','burt') person.commit() person2 = SearchType.create('unittest/person') person2.set_value('name_first','sean') person2.commit() person_search = Search('unittest/person') person_search.add_filters('id', [person.get_id(), person2.get_id()]) for xrange in (1, 50): Task.create(person, 'process_CC','some task', 'admin', context='process_CC') for xrange in (1, 50): Task.create(person2, 'process_DD','some task', 'admin') # find parent of tasks search2 = Search('sthpw/task') search2.add_relationship_filters([person,person2]) tasks = search2.get_sobjects() search3 = Search('sthpw/task') search3.add_relationship_search_filter(person_search) tasks2 = search3.get_sobjects() self.assertEquals(SObject.get_values(tasks, 'id'), SObject.get_values(tasks2, 'id'))
def execute(my): input = my.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = my.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, skip_duplicate=True, mode='standard')
def postprocess(self): value = self.get_value() if not value: return from pyasm.biz import Task Task.add_initial_tasks(self.sobject)
def execute(my): print "EXECUTING sample command" # create the render render = SearchType.create("prod/render") render.set_parent(my.prev_command.sobject) render.set_value("pipeline_code", "turntable") render.commit() Task.add_initial_tasks(render) prev_sobject = my.prev_command.sobject prev_process = "model" this_sobject = my.prev_command.sobject this_process = "turntable" # get the deliverable snapshot = Snapshot.get_latest_by_sobject(prev_sobject, prev_process) if not snapshot: return # once we have this snapshot, open the file and process lib_dir = snapshot.get_lib_dir() file_name = snapshot.get_name_by_type("maya") file_path = "%s/%s" % (lib_dir, file_name) f = open(file_path, 'r') lines = f.readlines() f.close() tmp_dir = Environment.get_tmp_dir() new_file_name = "whatever.new" new_file_path = "%s/%s" % (tmp_dir, new_file_name) f2 = open(new_file_path, 'wb') for i, line in enumerate(lines): line = "%s - %s" % (i, line) f2.write(line) f2.close() file_paths = [new_file_path] file_types = ['maya'] from pyasm.checkin import FileCheckin checkin = FileCheckin.get(this_sobject, file_paths, file_types, context=this_process) checkin.execute() my.set_event_name("task/approved") my.set_process("preprocess") my.set_pipeline_code("turntable") my.sobjects = [render] # ??? my.sobject = render my.set_as_approved()
def postprocess(my): web = WebContainer.get_web() add_initial_tasks = web.get_form_value("add_initial_tasks") if add_initial_tasks != "on": return Task.add_initial_tasks(my.sobject)
def postprocess(self): web = WebContainer.get_web() add_initial_tasks = web.get_form_value("add_initial_tasks") if add_initial_tasks != "on": return Task.add_initial_tasks(self.sobject)
def execute(my): print "EXECUTING sample command" # create the render render = SearchType.create("prod/render") render.set_parent(my.prev_command.sobject) render.set_value("pipeline_code", "turntable") render.commit() Task.add_initial_tasks(render) prev_sobject = my.prev_command.sobject prev_process = "model" this_sobject = my.prev_command.sobject this_process = "turntable" # get the deliverable snapshot = Snapshot.get_latest_by_sobject(prev_sobject, prev_process) if not snapshot: return # once we have this snapshot, open the file and process lib_dir = snapshot.get_lib_dir() file_name = snapshot.get_name_by_type("maya") file_path = "%s/%s" % (lib_dir, file_name) f = open( file_path, 'r') lines = f.readlines() f.close() tmp_dir = Environment.get_tmp_dir() new_file_name = "whatever.new" new_file_path = "%s/%s" % (tmp_dir, new_file_name) f2 = open( new_file_path, 'wb') for i, line in enumerate(lines): line = "%s - %s" % ( i,line) f2.write(line) f2.close() file_paths = [new_file_path] file_types = ['maya'] from pyasm.checkin import FileCheckin checkin = FileCheckin.get(this_sobject, file_paths, file_types, context=this_process) checkin.execute() my.set_event_name("task/approved") my.set_process("preprocess") my.set_pipeline_code("turntable") my.sobjects = [render] # ??? my.sobject = render my.set_as_approved()
def execute(self): person = SearchType.create('unittest/person') person.set_value('name_first', 'john') person.commit(triggers=False) person2 = SearchType.create('unittest/person') person2.set_value('name_first', 'zoe') person2.commit(triggers=False) task = Task.create(person, "unittest_person", "hello") task = Task.create(person2, "unittest_person", "hello")
def execute(my): person = SearchType.create("unittest/person") person.set_value("name_first", "john") person.commit(triggers=False) person2 = SearchType.create("unittest/person") person2.set_value("name_first", "zoe") person2.commit(triggers=False) task = Task.create(person, "unittest_person", "hello") task = Task.create(person2, "unittest_person", "hello")
def _test_child_search(self): from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','pete') person.commit() for xrange in (1, 50): Task.create(person, 'process_AA','some task', 'admin', context='process_AA') person2 = SearchType.create('unittest/person') person2.set_value('name_first','jamie') person2.commit() person2_tasks = [] for xrange in (1, 50): person2_tasks.append(Task.create(person2, 'process_BB','some task', 'admin', context='process_BB')) task_search = Search('sthpw/task') task_search.add_filters('process', ['process_AA', 'process_BB']) tasks = task_search.get_sobjects() search2 = Search('unittest/person') search2.add_relationship_filters(tasks) persons = search2.get_sobjects() search3 = Search('unittest/person') search3.add_relationship_search_filter(task_search) persons_fast = search3.get_sobjects() self.assertEquals(SObject.get_values(persons, 'id'), SObject.get_values(persons_fast, 'id')) self.assertEquals(SObject.get_values(persons_fast, 'name_first'), ['pete','jamie']) # if I retire all the tasks for person2 for task in tasks: if task.get_value('process') =='process_BB': task.retire() task_search = Search('sthpw/task') task_search.add_filters('process', ['process_AA', 'process_BB']) tasks = task_search.get_sobjects() search4 = Search('unittest/person') search4.add_relationship_search_filter(task_search) persons_fast = search4.get_sobjects() search2 = Search('unittest/person') search2.add_relationship_filters(tasks) persons = search2.get_sobjects() self.assertEquals(SObject.get_values(persons, 'id'), SObject.get_values(persons_fast, 'id')) self.assertEquals(SObject.get_values(persons_fast, 'name_first'), ['pete']) # test add_filters() with an empty array task_search = Search('sthpw/task') task_search.add_filters('process', []) tasks = task_search.get_sobjects() self.assertEquals(tasks, []) expected = '''SELECT %s"task".* FROM %s"task" WHERE "task"."id" is NULL AND ("task"."s_status" != 'retired' or "task"."s_status" is NULL) ORDER BY "task"."search_type", "task"."search_code"'''%(self.sthpw_prefix, self.sthpw_prefix) statement = task_search.get_statement() self.assertEquals(statement, expected)
def execute(my): my.search_key_list = my.kwargs.get('search_key_list') web = WebContainer.get_web() skip_duplicated = web.get_form_value('skip_duplicated') == 'on' pipeline_mode = web.get_form_value('pipeline_mode') sobjects = SearchKey.get_by_search_keys(my.search_key_list) count = 0 offset = 0 for sobject in sobjects: if isinstance(sobject, Task): raise TacticException('Creation of task for [Task] is not allowed') sk = SearchKey.get_by_sobject(sobject) if not sobject.has_value('pipeline_code'): #raise TacticException('Creation of task is not allowed for item with no pipeline_code attribute.') pipeline_code = '__default__' sobject.set_value("pipeline_code", pipeline_code) else: pipeline_code = sobject.get_value('pipeline_code') input_name = '%s|task_process'% pipeline_code contexts = [] process_names = web.get_form_values(input_name) process_names = [name for name in process_names if name] if pipeline_mode == 'context': # when pipeline_mode is context, we only specify contexts # in add_initial_tasks contexts = process_names[:] process_names = [] tasks = Task.add_initial_tasks(sobject, sobject.get_value('pipeline_code'), processes=process_names, contexts=contexts, skip_duplicate=skip_duplicated, mode=pipeline_mode, start_offset=offset) count += len(tasks) offset += 5 my.add_description("%s Tasks added in total." % count)
def execute(self): self.search_key_list = self.kwargs.get('search_key_list') web = WebContainer.get_web() skip_duplicated = web.get_form_value('skip_duplicated') == 'on' pipeline_mode = web.get_form_value('pipeline_mode') sobjects = SearchKey.get_by_search_keys(self.search_key_list) count = 0 offset = 0 for sobject in sobjects: if isinstance(sobject, Task): raise TacticException('Creation of task for [Task] is not allowed') sk = SearchKey.get_by_sobject(sobject) if not sobject.has_value('pipeline_code'): #raise TacticException('Creation of task is not allowed for item with no pipeline_code attribute.') pipeline_code = '__default__' sobject.set_value("pipeline_code", pipeline_code) else: pipeline_code = sobject.get_value('pipeline_code') input_name = '%s|task_process'% pipeline_code contexts = [] process_names = web.get_form_values(input_name) process_names = [name for name in process_names if name] if pipeline_mode == 'context': # when pipeline_mode is context, we only specify contexts # in add_initial_tasks contexts = process_names[:] process_names = [] tasks = Task.add_initial_tasks(sobject, sobject.get_value('pipeline_code'), processes=process_names, contexts=contexts, skip_duplicate=skip_duplicated, mode=pipeline_mode, start_offset=offset) count += len(tasks) offset += 5 self.add_description("%s Tasks added in total." % count)
def _test_child_search(my): from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','pete') person.commit() for xrange in (1, 50): Task.create(person, 'process_AA','some task', 'admin', context='process_AA') person2 = SearchType.create('unittest/person') person2.set_value('name_first','jamie') person2.commit() person2_tasks = [] for xrange in (1, 50): person2_tasks.append(Task.create(person2, 'process_BB','some task', 'admin', context='process_BB')) task_search = Search('sthpw/task') task_search.add_filters('process', ['process_AA', 'process_BB']) tasks = task_search.get_sobjects() search2 = Search('unittest/person') search2.add_relationship_filters(tasks) persons = search2.get_sobjects() search3 = Search('unittest/person') search3.add_relationship_search_filter(task_search) persons_fast = search3.get_sobjects() my.assertEquals(SObject.get_values(persons, 'id'), SObject.get_values(persons_fast, 'id')) my.assertEquals(SObject.get_values(persons_fast, 'name_first'), ['pete','jamie']) # if I retire all the tasks for person2 for task in tasks: if task.get_value('process') =='process_BB': task.retire() task_search = Search('sthpw/task') task_search.add_filters('process', ['process_AA', 'process_BB']) tasks = task_search.get_sobjects() search4 = Search('unittest/person') search4.add_relationship_search_filter(task_search) persons_fast = search4.get_sobjects() search2 = Search('unittest/person') search2.add_relationship_filters(tasks) persons = search2.get_sobjects() my.assertEquals(SObject.get_values(persons, 'id'), SObject.get_values(persons_fast, 'id')) my.assertEquals(SObject.get_values(persons_fast, 'name_first'), ['pete'])
def set_all_tasks(my, sobject, process, status): tasks = Task.get_by_sobject(sobject, process=process) title = status.replace("-", " ") title = title.replace("_", " ") title = Common.get_display_title(title) for task in tasks: task.set_value("status", title) task.commit()
def _test_approval(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="approval" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") my.assertEquals(0, len(tasks)) # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") my.assertEquals(1, len(tasks)) task = tasks[0] my.assertEquals("b", task.get("process")) # approve the task task.set_value("status", "approved") task.commit() my.assertEquals( "complete", sobject.get_value("b")) my.assertEquals( "complete", sobject.get_value("c"))
def handle_td(my, td): sobject = my.get_current_sobject() value = my.get_value() color = Task.get_default_color(value) if color: td.add_style("background-color: %s" % color) super(SObjectStatusElementWdg, my).handle_td(td)
def execute(self): input = self.get_input() search_key = input.get("search_key") task = Search.get_by_search_key(search_key) parent = task.get_parent() if not parent: raise TacticException("Task parent not found.") # get the definition of the trigger trigger_sobj = self.get_trigger_sobj() data = trigger_sobj.get_value("data") try: data = jsonloads(data) except: raise TacticException("Incorrect formatting of trigger [%s]." % trigger_sobj.get_value("code")) # check against source status if present src_status = data.get("src_status") if src_status: task_status = task.get_value("status") if task_status != src_status: return process_names = data.get("output") if not process_names: return # only create new task if another of the same # process does not already exist search = Search("sthpw/task") search.add_filters("process", process_names) search.add_parent_filter(parent) search.add_project_filter() tasks = search.get_sobjects() existing_processes = [x.get_value("process") for x in tasks] for process in process_names: if process in existing_processes: continue else: Task.create(parent, process, start_date=None, end_date=None)
def set_as_approved(my): '''convinience function that sets task for this process as approved''' my.set_event_name("task/approved") # get the task associated with this process tasks = Task.get_by_sobjects(my.sobjects, my.process_name) for task in tasks: task.set_value("status", "Approved") task.commit()
def execute(my): input = my.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = my.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return processes = pipeline.get_process_names() #search = Search("config/process") #search.add_filter("pipeline_code", pipeline_code) #processes = search.get_sobjects() #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, processes=processes, skip_duplicate=True, mode='standard')
def execute(my): input = my.get_input() search_key = input.get("search_key") task = Search.get_by_search_key(search_key) parent = task.get_parent() # get the definition of the trigger trigger_sobj = my.get_trigger_sobj() data = trigger_sobj.get_value("data") data = jsonloads(data) process = data.get("output") description = "" # FIXME: # find out if there is already a task of that process Task.create(parent, process, description, start_date=None, end_date=None)
def get_by_code(cls, code, allow_default=False): '''it is fatal not to have a pipeline, so put a default''' if not code: return None # first look at project specific pipeline pipeline = Search.get_by_code("config/pipeline", code) if not pipeline: pipeline = super(Pipeline,cls).get_by_code(code) if not pipeline and code == 'task': # Create a default task pipeline pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "task") from pyasm.biz import Task xml = Task.get_default_task_xml() pipeline.set_value("pipeline", xml) pipeline.set_pipeline(xml) pipeline.set_value("search_type", "sthpw/task") #pipeline.commit() if not pipeline and allow_default: search = Search(cls) search.add_filter('code', 'default') pipeline = search.get_sobject() if not pipeline: pipeline = cls.create('default', \ 'default pipeline', '') xml = pipeline.get_xml_value("pipeline") # create a default process for the table root = xml.get_root_node() element = xml.create_element("process") Xml.set_attribute(element,"name", "default_process") Xml.append_child(root, element) pipeline.set_value('pipeline', xml.get_xml()) pipeline.commit() # set the pipeline pipeline.set_pipeline(pipeline.get_value('pipeline')) Environment.add_warning("pipeline autogenerated", \ "[default] pipeline has just been created.") # Sometimes, a pipeline is instantiated without calling set_pipeline() # to be looked into if pipeline and not pipeline.get_processes(): pipeline.set_pipeline(pipeline.get_value('pipeline')) return pipeline
def get_by_code(cls, code, allow_default=False): '''it is fatal not to have a pipeline, so put a default''' if not code: return None # first look at project specific pipeline pipeline = Search.get_by_code("config/pipeline", code) if not pipeline: pipeline = super(Pipeline, cls).get_by_code(code) if not pipeline and code == 'task': # Create a default task pipeline pipeline = SearchType.create("sthpw/pipeline") pipeline.set_value("code", "task") from pyasm.biz import Task xml = Task.get_default_task_xml() pipeline.set_value("pipeline", xml) pipeline.set_pipeline(xml) pipeline.set_value("search_type", "sthpw/task") #pipeline.commit() if not pipeline and allow_default: search = Search(cls) search.add_filter('code', 'default') pipeline = search.get_sobject() if not pipeline: pipeline = cls.create('default', \ 'default pipeline', '') xml = pipeline.get_xml_value("pipeline") # create a default process for the table root = xml.get_root_node() element = xml.create_element("process") Xml.set_attribute(element, "name", "default_process") Xml.append_child(root, element) pipeline.set_value('pipeline', xml.get_xml()) pipeline.commit() # set the pipeline pipeline.set_pipeline(pipeline.get_value('pipeline')) Environment.add_warning("pipeline autogenerated", \ "[default] pipeline has just been created.") # Sometimes, a pipeline is instantiated without calling set_pipeline() # to be looked into if pipeline and not pipeline.get_processes(): pipeline.set_pipeline(pipeline.get_value('pipeline')) return pipeline
def _test_multi_task(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() for process_name, process in processes.items(): process.set_json_value( "workflow", { 'on_complete': ''' sobject.set_value('name_first', '%s') ''' % process_name, }) process.commit() task = Task.create(sobject, process="a", description="Test Task") task2 = Task.create(sobject, process="a", description="Test Task 2") task.set_value("status", "complete") task.commit() self.assertEquals(False, "b" == sobject.get_value("name_first")) task2.set_value("status", "complete") task2.commit() self.assertEquals(True, "b" == sobject.get_value("name_first"))
def execute(self): input = self.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = self.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, skip_duplicate=True, mode='standard')
def _test_multi_task(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code() ) sobject.commit() for process_name, process in processes.items(): process.set_json_value("workflow", { 'on_complete': ''' sobject.set_value('name_first', '%s') ''' % process_name, } ) process.commit() task = Task.create(sobject, process="a", description="Test Task") task2 = Task.create(sobject, process="a", description="Test Task 2") task.set_value("status", "complete") task.commit() self.assertEquals( False, "b" == sobject.get_value("name_first")) task2.set_value("status", "complete") task2.commit() self.assertEquals( True, "b" == sobject.get_value("name_first"))
def _get_updates(self): '''Create sObject and tasks that we will test to receive updates on. Current transaction is commited in _test_insert.''' Transaction.get(create=True) from pyasm.search import SearchType sobj = SearchType.create("prod/asset") sobj.set_defaults() sobj.commit() search_key = sobj.get_search_key() self.search_key = search_key search_type = sobj.get_search_type() search_code = sobj.get_value('code') tasks = Task.add_initial_tasks(sobj, pipeline_code='__default__') first_task = tasks[0] task_sk = first_task.get_search_key() self.task_sk = task_sk script = '''console.log('hello world.')''' updates = {} # Expression counts the number of incomplete tasks for sobject expr = '''@COUNT(@SOBJECT(prod/asset['code', '%s'].sthpw/task['status', 'NEQ', 'complete']))''' % search_code # Compare is True iff all tasks are all complete. compare = '''@COUNT(@SOBJECT(sthpw/task['status', 'NEQ', 'complete'])) < 1''' # Test expression by itself updates["001"] = {'expression': expr} # Test search_key and column updates["002"] = {'search_key': task_sk, 'column': 'status'} # Test compare and search_key updates["003"] = {'search_key': search_key, 'compare': compare, 'cbjs_action': script} # Test listen for search_type updates["004"] = {'search_type': search_type, 'value': True, 'cbjs_action': script} # Test expr_key and compare updates["005"] = {'expr_key': search_key, 'compare': compare} # Test search_key and expression expression = '''@COUNT(@SOBJECT(sthpw/task['status', 'NEQ', 'complete']))''' updates["006"] = {'search_key': search_key, 'expression': expression} return updates
def handle_td(my, td): sobject = my.get_current_sobject() # find the pipeline code of the task pipeline_code = sobject.get_value('pipeline_code', no_exception=True) parent_pipeline_code = '' if my.parent: parent_pipeline_code = my.parent.get_value('pipeline_code', no_exception=True) # if not find the pipeline of the parent and match the process if not pipeline_code: task_process = sobject.get_value("process") if task_process: parent = my.parent if parent: parent_pipeline_code = parent.get_value('pipeline_code', no_exception=True) pipeline = Pipeline.get_by_code(parent_pipeline_code) if pipeline: attributes = pipeline.get_process_attrs(task_process) pipeline_code = attributes.get('task_pipeline') value = my.get_value() color = Task.get_default_color(value) # If task status pipeline is chosen, # use color attribute from status (process) if pipeline_code: td.set_attr("spt_pipeline_code", pipeline_code) pipeline = Pipeline.get_by_code(pipeline_code) if pipeline: #attributes = pipeline.get_process_attrs(value) #color = attributes.get("color") process = pipeline.get_process(value) if process: color = process.get_color() if not color: process_sobject = pipeline.get_process_sobject(value) if process_sobject: color = process_sobject.get_value("color") if color: td.add_style("background-color: %s" % color) if parent_pipeline_code: td.set_attr("spt_parent_pipeline_code", parent_pipeline_code) super(TaskStatusElementWdg, my).handle_td(td)
def handle_pending(my): my.log_message(my.sobject, my.process, "pending") search = Search("config/process") search.add_filter("process", my.process) search.add_filter("pipeline_code", my.pipeline.get_code()) process_sobj = search.get_sobject() workflow = process_sobj.get_json_value("workflow") if workflow: assigned = workflow.get("assigned") else: assigned = None # check to see if the tasks exist and if they don't then create one tasks = Task.get_by_sobject(my.sobject, process=my.process) if not tasks: tasks = Task.add_initial_tasks(my.sobject, processes=[my.process], assigned=assigned) else: my.set_all_tasks(my.sobject, my.process, "pending") Trigger.call(my, "process|action", my.input)
def preprocess(self): # get the tasks and reorder by search_key tasks = Task.get_by_sobjects(self.sobjects) self.tasks_dict = {} for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = self.tasks_dict.get(search_key) if not sobject_tasks: sobject_tasks = [] self.tasks_dict[search_key] = sobject_tasks sobject_tasks.append(task)
def preprocess(my): # get the tasks and reorder by search_key tasks = Task.get_by_sobjects(my.sobjects) my.tasks_dict = {} for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = my.tasks_dict.get(search_key) if not sobject_tasks: sobject_tasks = [] my.tasks_dict[search_key] = sobject_tasks sobject_tasks.append(task)
def get_to(my): recipients = super(TaskStatusEmailHandler, my).get_to() sobj = my.sobject # it could be the parent of task: if not isinstance(sobj, Task): tasks = Task.get_by_sobject(sobj) else: tasks = [sobj] for task in tasks: assigned = my._get_login(task.get_assigned()) if assigned: recipients.add(assigned) supe = my._get_login(task.get_supervisor()) if supe: recipients.add(supe) return recipients
def handle_sobject(my, sobject, command): # the sobject here is a task if not my.check(sobject): return # the parent is the asset or shot parent = sobject.get_parent() print "Check finished" tasks = Task.get_by_sobject(parent, 'compositing') # about to commit task_ids = [] for task in tasks: if task.get_value('status') != 'Pending': task.set_value('status','Pending') task.commit() task_ids.append(task.get_id()) print "Changed task status to [Pending] for task id %s'" %str(task_ids)
def handle_sobject(self, sobject, command): # the sobject here is a task if not self.check(sobject): return # the parent is the asset or shot parent = sobject.get_parent() print "Check finished" tasks = Task.get_by_sobject(parent, 'compositing') # about to commit task_ids = [] for task in tasks: if task.get_value('status') != 'Pending': task.set_value('status','Pending') task.commit() task_ids.append(task.get_id()) print "Changed task status to [Pending] for task id %s'" %str(task_ids)
def handle_sobject(my, sobject, command): # the sobject here is a task if not my.check(sobject): return # the parent is the asset or shot parent = sobject.get_parent() print "Check finished" tasks = Task.get_by_sobject(parent, "compositing") # about to commit task_ids = [] for task in tasks: if task.get_value("status") != "Pending": task.set_value("status", "Pending") task.commit() task_ids.append(task.get_id()) print "Changed task status to [Pending] for task id %s'" % str(task_ids)
def get_display(self): web = WebContainer.get_web() task = self.get_current_sobject() id = task.get_id() if task.is_insert(): return HtmlElement.i("Dependency on insert not supported yet.") # get the sobject sobject = task.get_parent() if not sobject: return "No parent" tmp_tasks = Task.get_by_sobject(sobject) tasks = [] for tmp_task in tmp_tasks: # skip the task self if tmp_task.get_id() == id: continue # prevent direct circular dependencies if tmp_task.get_value("depend_id") == id: continue tasks.append(tmp_task) ids = [x.get_id() for x in tasks] labels = [] for task in tasks: process = task.get_value("process") description = task.get_value("description") if len(description) > 30: description = description[0:30]+"..." label = "%s - %s" % (process, description) labels.append(label) self.set_option("empty", "true") self.set_option("labels", labels) self.set_option("values", ids) return super(TaskParentInputWdg,self).get_display()
def get_display(my): web = WebContainer.get_web() task = my.get_current_sobject() id = task.get_id() if task.is_insert(): return HtmlElement.i("Dependency on insert not supported yet.") # get the sobject sobject = task.get_parent() if not sobject: return "No parent" tmp_tasks = Task.get_by_sobject(sobject) tasks = [] for tmp_task in tmp_tasks: # skip the task self if tmp_task.get_id() == id: continue # prevent direct circular dependencies if tmp_task.get_value("depend_id") == id: continue tasks.append(tmp_task) ids = [x.get_id() for x in tasks] labels = [] for task in tasks: process = task.get_value("process") description = task.get_value("description") if len(description) > 30: description = description[0:30]+"..." label = "%s - %s" % (process, description) labels.append(label) my.set_option("empty", "true") my.set_option("labels", labels) my.set_option("values", ids) return super(TaskParentInputWdg,my).get_display()
def execute(self): left_cb , right_cb = self.get_checkbox_cols() web = WebContainer.get_web() select_shots = web.get_form_values(right_cb.CB_NAME) if not select_shots: return select_tasks = web.get_form_values("sthpw_task") if not select_tasks: return # get all of the shots shots = [] for shot_search_key in select_shots: shot = Search.get_by_search_key(shot_search_key) shots.append(shot) # get all of the tasks tasks = [] for task_search_key in select_tasks: task = Search.get_by_search_key(task_search_key) tasks.append(task) # copy these tasks into the selected shots from pyasm.biz import Task for shot in shots: for task in tasks: # extract the properties of each task process = task.get_value("process") description = task.get_value("description") assigned = task.get_value("assigned") supe = task.get_value("supervisor") # create a new tasks new_task = Task.create(shot, process, description, assigned=assigned,\ supervisor=supe) self.sobjects.append(new_task) self.add_description("Added template tasks to shot(s)")
def execute(my): left_cb , right_cb = my.get_checkbox_cols() web = WebContainer.get_web() select_shots = web.get_form_values(right_cb.CB_NAME) if not select_shots: return select_tasks = web.get_form_values("sthpw_task") if not select_tasks: return # get all of the shots shots = [] for shot_search_key in select_shots: shot = Search.get_by_search_key(shot_search_key) shots.append(shot) # get all of the tasks tasks = [] for task_search_key in select_tasks: task = Search.get_by_search_key(task_search_key) tasks.append(task) # copy these tasks into the selected shots from pyasm.biz import Task for shot in shots: for task in tasks: # extract the properties of each task process = task.get_value("process") description = task.get_value("description") assigned = task.get_value("assigned") supe = task.get_value("supervisor") # create a new tasks new_task = Task.create(shot, process, description, assigned=assigned,\ supervisor=supe) my.sobjects.append(new_task) my.add_description("Added template tasks to shot(s)")
def _test_task(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() for process_name, process in processes.items(): process.set_json_value( "workflow", { #'on_in_progress': ''' #sobject.set_value('name_first', '%s') #''' % process_name, 'on_complete': ''' sobject.set_value('name_first', '%s') ''' % process_name, }) process.commit() task = Task.create(sobject, process="a", description="Test Task") # TODO: not quite sure if this should be related to "action" #task.set_value("status", "in_progress") #task.commit() #self.assertEquals( "in_progress", sobject.get_value("name_first")) task.set_value("status", "complete") task.commit() self.assertEquals("b", sobject.get_value("name_first"))
def _test_task(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code() ) sobject.commit() for process_name, process in processes.items(): process.set_json_value("workflow", { #'on_in_progress': ''' #sobject.set_value('name_first', '%s') #''' % process_name, 'on_complete': ''' sobject.set_value('name_first', '%s') ''' % process_name, } ) process.commit() task = Task.create(sobject, process="a", description="Test Task") # TODO: not quite sure if this should be related to "action" #task.set_value("status", "in_progress") #task.commit() #my.assertEquals( "in_progress", sobject.get_value("name_first")) task.set_value("status", "complete") task.commit() my.assertEquals( "b", sobject.get_value("name_first"))
def preprocess(self): if self.sobjects: tasks = Task.get_by_sobjects(self.sobjects, self.process_names) # create a data structure for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = self.data.get(search_key) if not sobject_tasks: sobject_tasks = [] self.data[search_key] = sobject_tasks sobject_tasks.append(task) status_attr = task.get_attr("status") process_count = len(status_attr.get_pipeline().get_processes()) if process_count > self.max_count: self.max_count = process_count
def preprocess(my): if my.sobjects: tasks = Task.get_by_sobjects(my.sobjects, my.process_names) # create a data structure for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = my.data.get(search_key) if not sobject_tasks: sobject_tasks = [] my.data[search_key] = sobject_tasks sobject_tasks.append(task) status_attr = task.get_attr("status") process_count = len(status_attr.get_pipeline().get_processes()) if process_count > my.max_count: my.max_count = process_count
def delete(my, log=True): '''This is for undo''' # TODO: the should probably be clearer!!!! if log == False: super(Shot, my).delete(log) return # An asset can only be deleted if only icon snapshots exist snapshots = Snapshot.get_by_sobject(my) only_icons = True for snapshot in snapshots: context = snapshot.get_value("context") if context != my.get_icon_context(): only_icons = False if not only_icons: raise TacticException("Cannot delete because snapshots exist") # only delete if not tasks have been assigned tasks = Task.get_by_sobject(my) has_assigned = False for task in tasks: assigned = task.get_value("assigned") if assigned != "" and assigned != "None": has_assigned = True if has_assigned: raise TacticException( "Cannot delete because tasks have been assigned") # delete tasks and icons for snapshot in snapshots: snapshot.delete() for task in tasks: task.delete() my.description = "Deleted '%s', search_type '%s'" % ( my.get_code(), my.get_search_type) super(Shot, my).delete(log)
def delete(my, log=True): '''This is for undo''' # TODO: the should probably be clearer!!!! if log == False: super(Asset,my).delete(log) return # An asset can only be deleted if only icon snapshots exist snapshots = Snapshot.get_by_sobject(my) only_icons = True for snapshot in snapshots: context = snapshot.get_value("context") if context != my.get_icon_context(): only_icons = False if not only_icons: raise TacticException("Cannot delete because snapshots exist") # only delete if not tasks have been assigned tasks = Task.get_by_sobject(my) has_assigned = False for task in tasks: assigned = task.get_value("assigned") if assigned != "" and assigned != "None": has_assigned = True if has_assigned: raise TacticException("Cannot delete because tasks have been assigned") # delete tasks and icons for snapshot in snapshots: snapshot.delete() for task in tasks: task.delete() my.description = "Deleted '%s', search_type '%s'" % (my.get_code(), my.get_search_type) super(Asset,my).delete(log)
def get_display(my): args = WebContainer.get_web().get_form_args() # get the args in the URL search_type = args['search_type'] search_id = args['search_id'] sobject = Search.get_by_search_key("%s|%s" % (search_type,search_id) ) widget = DivWdg() widget.add_style("width: 95%") widget.add_style("margin-left: 20px") table = TableWdg("sthpw/task", "layout_right") from pyasm.biz import Task tasks = Task.get_by_sobject(sobject) table.set_sobjects(tasks) table.set_show_property(False) widget.add(table) return widget
def get_display(my): args = WebContainer.get_web().get_form_args() # get the args in the URL search_type = args['search_type'] search_id = args['search_id'] sobject = Search.get_by_search_key("%s|%s" % (search_type, search_id)) widget = DivWdg() widget.add_style("width: 95%") widget.add_style("margin-left: 20px") table = TableWdg("sthpw/task", "layout_right") from pyasm.biz import Task tasks = Task.get_by_sobject(sobject) table.set_sobjects(tasks) table.set_show_property(False) widget.add(table) return widget
def _test_multi_db_subselect(self): from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','carin') person.commit() Task.create(person, 'subselect','some task', 'admin', status="a") from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','carin2') person.commit() Task.create(person, 'subselect','some task', 'admin', status="b") from pyasm.biz import Task person = SearchType.create('unittest/person') person.set_value('name_first','carin3') person.commit() Task.create(person, 'subselect','some task', 'admin', status="c") # find people by carin that have a task "subselect" search_person = Search("unittest/person") search_task = Search("sthpw/task") search_person.add_relationship_search_filter(search_task,use_multidb=False) search_task.add_filter("process", "subselect") sobjects = search_person.get_sobjects() self.assertEquals(len(sobjects), 3) # find people by carin that have a task "subselect", using # subselect search_person = Search("unittest/person") search_task = Search("sthpw/task") search_person.add_relationship_search_filter(search_task,use_multidb=True) search_task.add_filter("process", "subselect") sobjects = search_person.get_sobjects() self.assertEquals(len(sobjects), 3) # find task that carin has search_task = Search("sthpw/task") search_person = Search("unittest/person") search_person.add_filter('name_first','carin') search_task.add_relationship_search_filter(search_person,use_multidb=True) sobjects = search_task.get_sobjects() # test an order by on a task status (cross database join) # Note this will not work on Postgres or SQLite which do not # support cross database joins search_person = Search("unittest/person") search_person.add_order_by("sthpw/task.status", direction="desc") statement = search_person.get_statement() can_join = DatabaseImpl.can_search_types_join( \ "unittest/person", "sthpw/task") if can_join: expected = '''SELECT %s"person".* FROM %s"person" LEFT OUTER JOIN %s"task" ON "person"."code" = "task"."search_code" WHERE "task"."search_type" = 'unittest/person?project=unittest' ORDER BY "task"."status" desc''' % (self.prefix, self.prefix, self.sthpw_prefix) self.assertEquals(expected, statement) sobjects = search_person.get_sobjects() names = [x.get_value("name_first") for x in sobjects] expected = ['carin3','carint2','carin'] self.assertEquals(expected, names)
def get_tasks_wdg(my): widget = Widget() help = HelpItemWdg('Tasks', 'Tasks tab lets users view tasks assigned to him from multiple projects. For convenience, you can select a Time Preset like [this week] or [this month] to view what tasks fall within the chosen time range. Alternatively, you can click on the year, month, or week labels of the calendar to set a time range.') widget.add(help) search = Search("sthpw/task") search.add_order_by("bid_start_date") div = DivWdg(css="filter_box") div = FilterboxWdg() week_filter = my.get_week_filter() div.add(week_filter) range_checkbox = FilterCheckboxWdg("all_tasks", label="Show All Assigned Tasks") range_flag = range_checkbox.get_value() div.add(range_checkbox) hint = HintWdg("If not checked, only the tasks that fall within the defined date range are displayed") div.add(hint) project_filter = ProjectFilterWdg() project_filter.get_navigator().set_submit_onchange(False) project_code = project_filter.get_value() div.add_advanced_filter(project_filter) div.add_advanced_filter(HtmlElement.br()) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) search.add_project_filter(project_code) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values() ) widget.add(div) user = Environment.get_user_name() search.add_filter("assigned", user) # add a date filter # TODO: should somehow get this from CalendarBarWdg if not range_flag: from pyasm.widget import CalendarBarWdg left_bound_hid = HiddenWdg('cal_left_control_hid') left_bound_hid.set_persistence() cal_left = left_bound_hid.get_value() right_bound_hid = HiddenWdg('cal_right_control_hid') right_bound_hid.set_persistence() cal_right = right_bound_hid.get_value() if not cal_left or not cal_right: # TODO: should be this month start_year = "2007" start_month_str = "Jan" end_year = "2007" end_month_str = "Dec" else: start_year, start_month_str = cal_left.split(":") end_year, end_month_str = cal_right.split(":") months = CalendarBarWdg.MONTHS start_month = 1 end_month = 12 if not start_year: date = Date() start_year = date.get_year() end_year = date.get_year() try: start_month = months.index(start_month_str)+1 except ValueError: pass try: end_month = months.index(end_month_str)+2 except ValueError: pass if end_month == 13: end_month = 1 end_year = int(end_year)+1 start_date = "%s-%0.2d-01" % (start_year, start_month) end_date = "%s-%0.2d-01" % (end_year, end_month) preset_week = HiddenWdg('cal_week_hid').get_value() if preset_week: # handle cross-year scenario if int(preset_week) == 1 and start_month == 12: start_year = int(start_year) + 1 day_list = Calendar.get_monthday_time(\ start_year, int(preset_week), month_digit=True)[0] year = day_list[2][0] month = int(day_list[0]) month_day = day_list[1] start_date = "%s-%0.2d-%s" % (year, month, month_day) start_date_obj = Date(db_date=start_date) start_date_obj.add_days(7) end_date = start_date_obj.get_db_date() search.add_where(''' ( (bid_start_date >= '%s' and bid_start_date <= '%s') or (bid_end_date >= '%s' and bid_end_date <= '%s') or (bid_start_date <= '%s' and bid_end_date >='%s')) ''' % (start_date, end_date, start_date, end_date, start_date, end_date) ) table = TableWdg("sthpw/task", "my_task") sobjects = search.get_sobjects() sorted_tasks = Task.sort_tasks(sobjects) table.set_sobjects(sorted_tasks) widget.add(table) return widget