def postprocess(self): value = self.get_value() if not value: return from pyasm.biz import Task Task.add_initial_tasks(self.sobject)
def execute(my): input = my.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = my.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, skip_duplicate=True, mode='standard')
def postprocess(my): value = my.get_value() if not value: return from pyasm.biz import Task Task.add_initial_tasks(my.sobject)
def execute(my): print "EXECUTING sample command" # create the render render = SearchType.create("prod/render") render.set_parent(my.prev_command.sobject) render.set_value("pipeline_code", "turntable") render.commit() Task.add_initial_tasks(render) prev_sobject = my.prev_command.sobject prev_process = "model" this_sobject = my.prev_command.sobject this_process = "turntable" # get the deliverable snapshot = Snapshot.get_latest_by_sobject(prev_sobject, prev_process) if not snapshot: return # once we have this snapshot, open the file and process lib_dir = snapshot.get_lib_dir() file_name = snapshot.get_name_by_type("maya") file_path = "%s/%s" % (lib_dir, file_name) f = open(file_path, 'r') lines = f.readlines() f.close() tmp_dir = Environment.get_tmp_dir() new_file_name = "whatever.new" new_file_path = "%s/%s" % (tmp_dir, new_file_name) f2 = open(new_file_path, 'wb') for i, line in enumerate(lines): line = "%s - %s" % (i, line) f2.write(line) f2.close() file_paths = [new_file_path] file_types = ['maya'] from pyasm.checkin import FileCheckin checkin = FileCheckin.get(this_sobject, file_paths, file_types, context=this_process) checkin.execute() my.set_event_name("task/approved") my.set_process("preprocess") my.set_pipeline_code("turntable") my.sobjects = [render] # ??? my.sobject = render my.set_as_approved()
def postprocess(self): web = WebContainer.get_web() add_initial_tasks = web.get_form_value("add_initial_tasks") if add_initial_tasks != "on": return Task.add_initial_tasks(self.sobject)
def postprocess(my): web = WebContainer.get_web() add_initial_tasks = web.get_form_value("add_initial_tasks") if add_initial_tasks != "on": return Task.add_initial_tasks(my.sobject)
def execute(my): print "EXECUTING sample command" # create the render render = SearchType.create("prod/render") render.set_parent(my.prev_command.sobject) render.set_value("pipeline_code", "turntable") render.commit() Task.add_initial_tasks(render) prev_sobject = my.prev_command.sobject prev_process = "model" this_sobject = my.prev_command.sobject this_process = "turntable" # get the deliverable snapshot = Snapshot.get_latest_by_sobject(prev_sobject, prev_process) if not snapshot: return # once we have this snapshot, open the file and process lib_dir = snapshot.get_lib_dir() file_name = snapshot.get_name_by_type("maya") file_path = "%s/%s" % (lib_dir, file_name) f = open( file_path, 'r') lines = f.readlines() f.close() tmp_dir = Environment.get_tmp_dir() new_file_name = "whatever.new" new_file_path = "%s/%s" % (tmp_dir, new_file_name) f2 = open( new_file_path, 'wb') for i, line in enumerate(lines): line = "%s - %s" % ( i,line) f2.write(line) f2.close() file_paths = [new_file_path] file_types = ['maya'] from pyasm.checkin import FileCheckin checkin = FileCheckin.get(this_sobject, file_paths, file_types, context=this_process) checkin.execute() my.set_event_name("task/approved") my.set_process("preprocess") my.set_pipeline_code("turntable") my.sobjects = [render] # ??? my.sobject = render my.set_as_approved()
def execute(self): self.search_key_list = self.kwargs.get('search_key_list') web = WebContainer.get_web() skip_duplicated = web.get_form_value('skip_duplicated') == 'on' pipeline_mode = web.get_form_value('pipeline_mode') sobjects = SearchKey.get_by_search_keys(self.search_key_list) count = 0 offset = 0 for sobject in sobjects: if isinstance(sobject, Task): raise TacticException('Creation of task for [Task] is not allowed') sk = SearchKey.get_by_sobject(sobject) if not sobject.has_value('pipeline_code'): #raise TacticException('Creation of task is not allowed for item with no pipeline_code attribute.') pipeline_code = '__default__' sobject.set_value("pipeline_code", pipeline_code) else: pipeline_code = sobject.get_value('pipeline_code') input_name = '%s|task_process'% pipeline_code contexts = [] process_names = web.get_form_values(input_name) process_names = [name for name in process_names if name] if pipeline_mode == 'context': # when pipeline_mode is context, we only specify contexts # in add_initial_tasks contexts = process_names[:] process_names = [] tasks = Task.add_initial_tasks(sobject, sobject.get_value('pipeline_code'), processes=process_names, contexts=contexts, skip_duplicate=skip_duplicated, mode=pipeline_mode, start_offset=offset) count += len(tasks) offset += 5 self.add_description("%s Tasks added in total." % count)
def execute(my): my.search_key_list = my.kwargs.get('search_key_list') web = WebContainer.get_web() skip_duplicated = web.get_form_value('skip_duplicated') == 'on' pipeline_mode = web.get_form_value('pipeline_mode') sobjects = SearchKey.get_by_search_keys(my.search_key_list) count = 0 offset = 0 for sobject in sobjects: if isinstance(sobject, Task): raise TacticException('Creation of task for [Task] is not allowed') sk = SearchKey.get_by_sobject(sobject) if not sobject.has_value('pipeline_code'): #raise TacticException('Creation of task is not allowed for item with no pipeline_code attribute.') pipeline_code = '__default__' sobject.set_value("pipeline_code", pipeline_code) else: pipeline_code = sobject.get_value('pipeline_code') input_name = '%s|task_process'% pipeline_code contexts = [] process_names = web.get_form_values(input_name) process_names = [name for name in process_names if name] if pipeline_mode == 'context': # when pipeline_mode is context, we only specify contexts # in add_initial_tasks contexts = process_names[:] process_names = [] tasks = Task.add_initial_tasks(sobject, sobject.get_value('pipeline_code'), processes=process_names, contexts=contexts, skip_duplicate=skip_duplicated, mode=pipeline_mode, start_offset=offset) count += len(tasks) offset += 5 my.add_description("%s Tasks added in total." % count)
def execute(my): input = my.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = my.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return processes = pipeline.get_process_names() #search = Search("config/process") #search.add_filter("pipeline_code", pipeline_code) #processes = search.get_sobjects() #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, processes=processes, skip_duplicate=True, mode='standard')
def execute(self): input = self.get_input() search_key = input.get("search_key") update_data = input.get("update_data") if not search_key or search_key.startswith('sthpw/'): return mode = input.get("mode") if mode not in ['insert']: return sobject = self.get_caller() pipeline_code = sobject.get_value("pipeline_code", no_exception=True) if not pipeline_code: return from pyasm.biz import Pipeline, Task from pyasm.search import SearchType pipeline = Pipeline.get_by_code(pipeline_code) if not pipeline: return if pipeline.get_value("autocreate_tasks", no_exception=True) not in ['true', True]: return #import time #start = time.time() Task.add_initial_tasks(sobject, pipeline_code=pipeline_code, skip_duplicate=True, mode='standard')
def _get_updates(self): '''Create sObject and tasks that we will test to receive updates on. Current transaction is commited in _test_insert.''' Transaction.get(create=True) from pyasm.search import SearchType sobj = SearchType.create("prod/asset") sobj.set_defaults() sobj.commit() search_key = sobj.get_search_key() self.search_key = search_key search_type = sobj.get_search_type() search_code = sobj.get_value('code') tasks = Task.add_initial_tasks(sobj, pipeline_code='__default__') first_task = tasks[0] task_sk = first_task.get_search_key() self.task_sk = task_sk script = '''console.log('hello world.')''' updates = {} # Expression counts the number of incomplete tasks for sobject expr = '''@COUNT(@SOBJECT(prod/asset['code', '%s'].sthpw/task['status', 'NEQ', 'complete']))''' % search_code # Compare is True iff all tasks are all complete. compare = '''@COUNT(@SOBJECT(sthpw/task['status', 'NEQ', 'complete'])) < 1''' # Test expression by itself updates["001"] = {'expression': expr} # Test search_key and column updates["002"] = {'search_key': task_sk, 'column': 'status'} # Test compare and search_key updates["003"] = {'search_key': search_key, 'compare': compare, 'cbjs_action': script} # Test listen for search_type updates["004"] = {'search_type': search_type, 'value': True, 'cbjs_action': script} # Test expr_key and compare updates["005"] = {'expr_key': search_key, 'compare': compare} # Test search_key and expression expression = '''@COUNT(@SOBJECT(sthpw/task['status', 'NEQ', 'complete']))''' updates["006"] = {'search_key': search_key, 'expression': expression} return updates
def handle_pending(my): my.log_message(my.sobject, my.process, "pending") search = Search("config/process") search.add_filter("process", my.process) search.add_filter("pipeline_code", my.pipeline.get_code()) process_sobj = search.get_sobject() workflow = process_sobj.get_json_value("workflow") if workflow: assigned = workflow.get("assigned") else: assigned = None # check to see if the tasks exist and if they don't then create one tasks = Task.get_by_sobject(my.sobject, process=my.process) if not tasks: tasks = Task.add_initial_tasks(my.sobject, processes=[my.process], assigned=assigned) else: my.set_all_tasks(my.sobject, my.process, "pending") Trigger.call(my, "process|action", my.input)
def execute(my): # set all task to pending pipeline = my.input.get("pipeline") process = my.input.get("process") sobject = my.input.get("sobject") process_obj = pipeline.get_process(process) node_type = process_obj.get_type() #print "pending: ", process, node_type my.run_callback(pipeline, process, "pending") if node_type not in ["node", "manual"]: my.set_all_tasks(sobject, process, "pending") if node_type in ["action", "condition"]: Trigger.call(my, "process|action", output=my.input) elif node_type in ["approval"]: # check to see if the tasks exist and if they don't then create one tasks = Task.get_by_sobject(sobject, process=process) if not tasks: tasks = Task.add_initial_tasks(sobject, processes=[process]) else: my.set_all_tasks(sobject, process, "pending") elif node_type in ["hierarchy"]: search = Search("config/process") search.add_filter("pipeline_code", pipeline.get_code()) search.add_filter("process", process) process_sobj = search.get_sobject() process_code = process_sobj.get_code() search = Search("sthpw/pipeline") search.add_filter("parent_process", process_code) subpipeline = search.get_sobject() if not subpipeline: return child_processes = subpipeline.get_processes() #child_pipeline = process_obj.get_child_pipeline() #child_processes = child_pipeline.get_processes() if child_processes: first_process = child_processes[0] first_name = first_process.get_name() input = { 'pipeline': subpipeline, 'sobject': sobject, 'process': first_process.get_name(), } event = "process|pending" Trigger.call(my, event, input)