def handle_revise(my): my.log_message(my.sobject, my.process, "revise") my.run_callback(my.pipeline, my.process, "revise") # set all tasks in the process to revise my.set_all_tasks(my.sobject, my.process, "revise") process_obj = pipeline.get_process(my.process) # send revise single to previous processes input_processes = pipeline.get_input_processes(my.process) for input_process in input_processes: input_process = input_process.get_name() if my.process_parts: input_process = "%s.%s" % (my.process_parts[0], input_process) input = { 'pipeline': my.pipeline, 'sobject': my.sobject, 'process': input_process } event = "process|revise" Trigger.call(my, event, input)
def execute(my): process = my.input.get("process") sobject = my.input.get("sobject") pipeline = my.input.get("pipeline") process_obj = pipeline.get_process(process) node_type = process_obj.get_type() print "Revise: ", process, node_type my.run_callback(pipeline, process, "revise") if node_type in ['manual', 'node']: my.set_all_tasks(sobject, process, "revise") if node_type in ['action']: my.set_all_tasks(sobject, process, "revise") if node_type in ['approval','action','condition']: input_processes = pipeline.get_input_processes(process) for input_process in input_processes: input_process = input_process.get_name() input = { 'pipeline': pipeline, 'sobject': sobject, 'process': input_process } event = "process|revise" Trigger.call(my, event, input)
def _test_js(my): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("a") process.set_json_value("workflow", { 'cbjs_action': ''' console.log("This is javascript"); console.log(input); return false ''' } ) process.commit() process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": "pending" } import time start = time.time() Trigger.call(my, "process|pending", output)
def _test_custom_status(self): task_pipeline_xml = ''' <pipeline> <process name="Pending"/> <process name="Do It"/> <process name="Fix it" mapping="revise"/> <process name="Push Back" mapping="reject"/> <process name="Revise"/> <process name="Go to Do It" direction="output" status="Do It"/> <process name="Accept" mapping="complete"/> </pipeline> ''' task_pipeline, task_processes = self.get_pipeline(task_pipeline_xml) task_pipeline.set_value("code", "custom_task") task_pipeline.commit() # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") pipeline_xml = ''' <pipeline> <process task_pipeline="custom_task" type="manual" name="a"/> <process task_pipeline="custom_task" type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) # Run the pipeline process = "b" status = "Push Back" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": status } Trigger.call(self, "process|custom", output) self.assertEquals("reject", sobject.get_value("b")) self.assertEquals("revise", sobject.get_value("a")) # Run the pipeline process = "a" status = "Go to Do It" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": status } Trigger.call(self, "process|custom", output) self.assertEquals("Do It", sobject.get_value("b"))
def _test_manual(self): print "test manual" # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) # nothing should have run self.assertEquals("pending", sobject.get_value("a")) self.assertEquals(False, sobject.get_value("b"))
def execute(my): process = my.input.get("process") sobject = my.input.get("sobject") pipeline = my.input.get("pipeline") process_obj = pipeline.get_process(process) node_type = process_obj.get_type() my.run_callback(pipeline, process, "revise") if node_type in ["condition", "action", "approval"]: my.set_all_tasks(sobject, process, "") input_processes = pipeline.get_input_processes(process) for input_process in input_processes: input_process = input_process.get_name() input = { 'pipeline': pipeline, 'sobject': sobject, 'process': input_process } event = "process|revise" Trigger.call(my, event, input) else: my.set_all_tasks(sobject, process, my.get_status())
def _test_messaging(self): # create a dummy sobject city = SearchType.create("unittest/city") city_pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' city_pipeline, city_processes = self.get_pipeline(city_pipeline_xml) city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() # Run the pipeline process = "a" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(self, "process|pending", output) for process in city_processes: key = "%s|%s|status" % (city.get_search_key(), process) search = Search("sthpw/message") search.add_filter("code", key) sobject = search.get_sobject() message = sobject.get_value("message") self.assertEquals("complete", message)
def _test_manual(my): print "test manual" # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) # nothing should have run my.assertEquals( "pending", sobject.get_value("a")) my.assertEquals( False, sobject.get_value("b"))
def handle_complete(my): # run a nodes complete trigger status = "complete" my.log_message(my.sobject, my.process, status) my.run_callback(my.pipeline, my.process, status) process_obj = my.pipeline.get_process(my.process) # call the process|pending event for all output processes output_processes = my.pipeline.get_output_processes(my.process) for output_process in output_processes: output_process = output_process.get_name() if my.process_parts: output_process = "%s.%s" % (my.process_parts[0], output_process) output = { 'pipeline': my.pipeline, 'sobject': my.sobject, 'process': output_process } event = "process|pending" Trigger.call(my, event, output)
def _test_multi_input(my): # Disabled for now return # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) #search = Search("sthpw/message") #sobjects = search.get_sobjects() #for sobject in sobjects: # sobject.delete() # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b1"/> <process type="action" name="b2"/> <process type="action" name="b3"/> <process type="action" name="b4"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> <connect from="b4" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("c") process.set_json_value("workflow", { 'on_action': ''' print "c: running action" ''' } ) process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def _test_multi_input(my): # Disabled for now. This is not working #return # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) #search = Search("sthpw/message") #sobjects = search.get_sobjects() #for sobject in sobjects: # sobject.delete() # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b1"/> <process type="action" name="b2"/> <process type="action" name="b3"/> <process type="action" name="b4"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> <connect from="b4" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("c") process.set_json_value("workflow", { 'on_action': ''' print "c: running action" ''' } ) process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def execute(my): process = my.input.get("process") sobject = my.input.get("sobject") pipeline = my.input.get("pipeline") process_obj = pipeline.get_process(process) node_type = process_obj.get_type() #print "complete: ", process, node_type status = my.get_status() # run a nodes complete trigger #event = "process|complete|%s" % process #Trigger.call(my, event, output=my.input) my.run_callback(pipeline, process, status) process_obj = pipeline.get_process(process) node_type = process_obj.get_type() if node_type in ["action", "approval", "manual", "node", "hierarchy"]: # call the process|pending event for all output processes output_processes = pipeline.get_output_processes(process) for output_process in output_processes: output_process = output_process.get_name() output = { 'pipeline': pipeline, 'sobject': sobject, 'process': output_process } event = "process|pending" Trigger.call(my, event, output) if node_type in ["action", "condition"]: my.set_all_tasks(sobject, process, "complete") parent_process = pipeline.get_value("parent_process") #print "parent: ", parent_process if parent_process: output_processes = pipeline.get_output_processes(process) if not output_processes: # look at the parent pipelline parent_process_sobj = Search.get_by_code("config/process", parent_process) parent_pipeline_code = parent_process_sobj.get_value("pipeline_code") parent_pipeline = Search.get_by_code("sthpw/pipeline", parent_pipeline_code) parent_process = parent_process_sobj.get_value("process") output = { 'pipeline': parent_pipeline, 'sobject': sobject, 'process': parent_process, } event = "process|complete" Trigger.call(my, event, output)
def _test_hierarchy(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="hierarchy" name="b"/> <process type="hierarchy" name="c"/> <process type="action" name="d"/> <connect from="a" to="b"/> <connect from="b" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # create the sub pipeline subpipeline_xml = ''' <pipeline> <process type="input" name="start"/> <process type="action" name="suba"/> <process type="action" name="subb"/> <process type="action" name="subc"/> <process type="output" name="end"/> <connect from="start" to="suba"/> <connect from="suba" to="subb"/> <connect from="subb" to="subc"/> <connect from="subc" to="end"/> </pipeline> ''' subpipeline, subprocesses = self.get_pipeline(subpipeline_xml) #subpipeline.set_value("parent_process", parent_process.get_code()) subpipeline.commit() subpipeline_code = subpipeline.get_code() p = processes.get("b") p.set_value("subpipeline_code", subpipeline_code) p.commit() p = processes.get("c") p.set_value("subpipeline_code", subpipeline_code) p.commit() # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) self.assertEquals("complete", sobject.get_value("a")) self.assertEquals("complete", sobject.get_value("b")) self.assertEquals("complete", sobject.get_value("c")) self.assertEquals("complete", sobject.get_value("start")) self.assertEquals("complete", sobject.get_value("suba")) self.assertEquals("complete", sobject.get_value("subb")) self.assertEquals("complete", sobject.get_value("subc")) self.assertEquals("complete", sobject.get_value("end"))
def _test_progress_reject(self): # FIXME: it is not completely clear what should happen when a progress # node recieves a revise message. return # create a dummy sobject city = SearchType.create("unittest/city") people = [] person_pipeline_xml = ''' <pipeline> <process type="action" name="p1"/> </pipeline> ''' person_pipeline, person_processes = self.get_pipeline( person_pipeline_xml, search_type="unittest/person") person_pipeline_code = person_pipeline.get_value("code") city_pipeline_xml = ''' <pipeline> <process type="progress" name="c1" pipeline_code="%s" search_type="unittest/person" process="p1" status="complete"/> <process type="approval" name="c2"/> <connect from="c1" to="c2"/> </pipeline> ''' % person_pipeline_code city_pipeline, city_processes = self.get_pipeline( city_pipeline_xml, search_type="unittest/city") city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() from pyasm.common import Container Container.put("process_listeners", None) for name in ['Beth', 'Cindy', 'John']: person = SearchType.create("unittest/person") person.set_value("name_first", name) person.set_value("pipeline_code", person_pipeline.get_code()) person.set_value("city_code", city.get_code()) person.commit() person.set_value("p1", "complete") people.append(person) process = "c2" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(self, "process|reject", output) for person in people: self.assertEquals("revise", person.get_value("p1"))
def _test_choice(my): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) sobject.set_value("c", False) sobject.set_value("d", False) sobject.set_value("e", False) pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="condition" name="b"/> <process type="action" name="c"/> <process type="action" name="d"/> <process type="action" name="e"/> <connect from="a" to="b"/> <connect from="b" to="c" from_attr="stream1"/> <connect from="b" to="d" from_attr="stream2"/> <connect from="b" to="e" from_attr="stream3"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("b") process.set_json_value("workflow", { 'on_action': ''' # ... some code to determine True or False return ['stream1', 'stream3'] ''', 'on_complete': ''' sobject.set_value('b', "complete") ''' } ) process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) my.assertEquals( "complete", sobject.get_value("a")) my.assertEquals( "complete", sobject.get_value("b")) my.assertEquals( "complete", sobject.get_value("c")) my.assertEquals( False, sobject.get_value("d")) my.assertEquals( "complete", sobject.get_value("e"))
def handle_pending(my): # DISABLE for now #if not my.check_inputs(): # return # simply calls action my.log_message(my.sobject, my.process, "pending") my.set_all_tasks(my.sobject, my.process, "pending") my.run_callback(my.pipeline, my.process, "pending") Trigger.call(my, "process|action", output=my.input)
def execute(my): key = "enable_workflow_engine" from prod_setting import ProdSetting setting = ProdSetting.get_value_by_key(key) if setting not in [True, 'true']: return # find the node in the pipeline task = my.get_caller() sobject = task.get_parent() if not sobject: return pipeline = None process_code = task.get_value("process_code", no_exception=True) if process_code: process_sobj = Search.get_by_code("config/process", process_code) if process_sobj: pipeline_code = process_sobj.get_value("pipeline_code") pipeline = Pipeline.get_by_code("sthpw/pipeline", pipeline_code) if not pipeline: pipeline = Pipeline.get_by_sobject(sobject) if not pipeline: return process_name = task.get_value("process") status = task.get_value("status") process = pipeline.get_process(process_name) if not process: # we don't have enough info here return node_type = process.get_type() process_name = process.get_name() event = "process|%s" % status.lower() output = { 'sobject': sobject, 'pipeline': pipeline, 'process': process_name, } Trigger.call(task, event, output=output)
def _test_hierarchy(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="hierarchy" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) parent_process = processes.get("b") print "parent: ", pipeline.get_code() sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() # create the sub pipeline subpipeline_xml = ''' <pipeline> <process type="action" name="suba"/> <process type="action" name="subb"/> <process type="action" name="subc"/> <connect from="suba" to="subb"/> <connect from="subb" to="subc"/> </pipeline> ''' subpipeline, subprocesses = my.get_pipeline(subpipeline_xml) subpipeline.set_value("parent_process", parent_process.get_code()) subpipeline.commit() print "sub: ", subpipeline.get_code() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) my.assertEquals( "complete", sobject.get_value("a")) my.assertEquals( "complete", sobject.get_value("b")) my.assertEquals( "complete", sobject.get_value("c")) my.assertEquals( "complete", sobject.get_value("suba")) my.assertEquals( "complete", sobject.get_value("subb")) my.assertEquals( "complete", sobject.get_value("subc"))
def _test_multi_input(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b1"/> <process type="action" name="b2"/> <process type="action" name="b3"/> <process type="action" name="b4"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> <connect from="b4" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) process = processes.get("c") process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(self, "process|pending", output) self.assertEquals( "complete", sobject.get_value("a")) self.assertEquals( "complete", sobject.get_value("b1")) self.assertEquals( "complete", sobject.get_value("b2")) self.assertEquals( "complete", sobject.get_value("b3")) self.assertEquals( "complete", sobject.get_value("b4")) self.assertEquals( "complete", sobject.get_value("c")) self.assertEquals( "complete", sobject.get_value("d"))
def add(key, cache): CACHE[key] = cache # when adding a new event, register its events events = cache.get_refresh_events() from pyasm.command import Trigger for event in events: #print "registering: ", event trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.command.SearchTypeCacheTrigger") Trigger.append_static_trigger(trigger)
def execute(self): web = WebContainer.get_web() # get the input names input_names = web.get_form_value( SerialStatusWdg.STATUS_CMD_INPUT).split('|') values = [] for input_name in input_names: value = web.get_form_value(input_name) if value: values.append(web.get_form_value(input_name)) # FIXME: HARDCODED Value for status column!!!! column = "status" for value in values: # get the sobject to be updated search_type, id, status = value.split("|") search = Search(search_type) search.add_id_filter(id) self.sobject = search.get_sobject() status_attr = self.sobject.get_attr(column) cur_status = status_attr.get_current_process() if cur_status == status: continue status_attr.set_status(status) update_column = 'time_update' if update_column in self.sobject.get_attr_names(): self.sobject.set_value(update_column, Sql.get_timestamp_now(), quoted=False) self.sobject.commit() # if this is successful, the store it in the status_log status_log = SObjectFactory.create("sthpw/status_log") status_log.set_value("login", Environment.get_user_name()) status_log.set_value("search_type", search_type) status_log.set_value("search_id", id) #status_log.set_value("status", "%s to %s" % (cur_status, status) ) status_log.commit() status_log.set_value("from_status", cur_status) status_log.set_value("to_status", status) # Call the finaled trigger Trigger.call(self, status)
def call_triggers(my): # call the done trigger for checkin from pyasm.command import Trigger output = {} snapshot = my.get_snapshot() output['search_key'] = SearchKey.build_by_sobject(snapshot) output['update_data'] = snapshot.data.copy() output['snapshot'] = snapshot.get_sobject_dict() output['files'] = [x.get_sobject_dict() for x in my.file_objects] # DEPRECATED #Trigger.call(my, "checkin/done", output) prefix = my.get_trigger_prefix() # Add the checkin triggers base_search_type = my.sobject.get_base_search_type() Trigger.call(my, prefix, output) Trigger.call(my, "%s|%s" % (prefix, base_search_type), output) Trigger.call(my, "%s|%s|%s" % (prefix, base_search_type, my.context), output) # get the process (assumption here) Trigger.call(my, "%s|%s" % (prefix, base_search_type), output, process=my.process)
def __init__(self, **kwargs): super(TransactionQueueManager, self).__init__(**kwargs) trigger = TransactionQueueServersTrigger() trigger.execute() self.servers = Container.get("TransactionQueueServers") # add a static trigger event = "change|sthpw/sync_server" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "tactic.command.TransactionQueueServersTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True)
def execute(my): web = WebContainer.get_web() # get the input names input_names = web.get_form_value(SerialStatusWdg.STATUS_CMD_INPUT).split('|') values = [] for input_name in input_names: value = web.get_form_value(input_name) if value: values.append(web.get_form_value(input_name)) # FIXME: HARDCODED Value for status column!!!! column = "status" for value in values: # get the sobject to be updated search_type,id,status = value.split("|") search = Search(search_type) search.add_id_filter(id) my.sobject = search.get_sobject() status_attr = my.sobject.get_attr(column) cur_status = status_attr.get_current_process() if cur_status == status: continue status_attr.set_status(status) update_column = 'time_update' if update_column in my.sobject.get_attr_names(): my.sobject.set_value(update_column, Sql.get_timestamp_now(), quoted=False) my.sobject.commit() # if this is successful, the store it in the status_log status_log = SObjectFactory.create("sthpw/status_log") status_log.set_value("login", Environment.get_user_name() ) status_log.set_value("search_type", search_type) status_log.set_value("search_id", id) #status_log.set_value("status", "%s to %s" % (cur_status, status) ) status_log.commit() status_log.set_value("from_status", cur_status) status_log.set_value("to_status", status) # Call the finaled trigger Trigger.call(my, status)
def _test_multi_input_complete(self): # DISABLE until check_inputs is called return # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="manual" name="b1"/> <process type="manual" name="b2"/> <process type="manual" name="b3"/> <process type="action" name="c"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) process = processes.get("c") process.commit() # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) # Run the pipeline process = "b1" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|complete", output) self.assertEquals("complete", sobject.get_value("a")) self.assertEquals("complete", sobject.get_value("b1")) self.assertEquals("pending", sobject.get_value("b2")) self.assertEquals("pending", sobject.get_value("b3")) # THIS WILL FAIL until we implement this correctly self.assertEquals("pending", sobject.get_value("c"))
def _test_trigger(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("a") process.set_value("workflow", "") process.commit() folder = Common.generate_alphanum_key() Trigger.clear_db_cache() event = "process|action" trigger = SearchType.create("config/trigger") trigger.set_value("event", event) trigger.set_value("process", process.get_code()) trigger.set_value("mode", "same process,same transaction") trigger.set_value("script_path", "%s/process_trigger" % folder) trigger.commit() script = SearchType.create("config/custom_script") script.set_value("folder", folder) script.set_value("title", "process_trigger") script.set_value("script", ''' print "---" for key, value in input.items(): print key, value print "---" print "process: ", input.get("process") ''') script.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def _test_action_process(my): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) sobject.set_value("c", False) sobject.set_value("d", False) sobject.set_value("e", False) pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b"/> <process type="action" name="c"/> <process type="action" name="d"/> <process type="action" name="e"/> <connect from="a" to="b"/> <connect from="b" to="c"/> <connect from="b" to="d"/> <connect from="c" to="e"/> <connect from="d" to="e"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": "pending" } import time start = time.time() Trigger.call(my, "process|pending", output) #print "time: ", time.time() - start my.assertEquals( "complete", sobject.get_value("a")) my.assertEquals( "complete", sobject.get_value("b")) my.assertEquals( "complete", sobject.get_value("c")) my.assertEquals( "complete", sobject.get_value("d")) # TODO: this got called twice ... not what we want : fix later my.assertEquals( "complete", sobject.get_value("e"))
def _test_input(my): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="condition" name="b"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b"/> <connect from="b" to="c" from_attr="success"/> <connect from="b" to="d" from_attr="success"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) # check input values process = processes.get("b") process.set_json_value("workflow", { 'on_action': ''' inputs = input.get("inputs") sobject.set_value("b_input", inputs[0]); outputs = input.get("outputs") sobject.set_value("b_output", ",".join(outputs)) sobject.set_value("test", "test") ''' } ) process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) # make sure we have the same sobject my.assertEquals( "test", sobject.get_value("test") ) my.assertEquals( "a", sobject.get_value("b_input")) my.assertEquals( "c,d", sobject.get_value("b_output"))
def _test_hierarchy(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="hierarchy" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) parent_process = processes.get("b") print "parent: ", pipeline.get_code() sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() # create the sub pipeline subpipeline_xml = ''' <pipeline> <process type="action" name="suba"/> <process type="action" name="subb"/> <process type="action" name="subc"/> <connect from="suba" to="subb"/> <connect from="subb" to="subc"/> </pipeline> ''' subpipeline, subprocesses = my.get_pipeline(subpipeline_xml) subpipeline.set_value("parent_process", parent_process.get_code()) subpipeline.commit() print "sub: ", subpipeline.get_code() # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(my, "process|pending", output) my.assertEquals("complete", sobject.get_value("a")) my.assertEquals("complete", sobject.get_value("b")) my.assertEquals("complete", sobject.get_value("c")) my.assertEquals("complete", sobject.get_value("suba")) my.assertEquals("complete", sobject.get_value("subb")) my.assertEquals("complete", sobject.get_value("subc"))
def call_triggers(self): # call the done trigger for checkin from pyasm.command import Trigger output = {} snapshot = self.get_snapshot() output['search_key'] = SearchKey.build_by_sobject(snapshot) output['update_data'] = snapshot.data.copy() output['snapshot'] = snapshot.get_sobject_dict() output['files'] = [x.get_sobject_dict() for x in self.file_objects] # DEPRECATED #Trigger.call(self, "checkin/done", output) prefix = self.get_trigger_prefix() # Add the checkin triggers base_search_type = self.sobject.get_base_search_type() Trigger.call(self, prefix, output) Trigger.call(self, "%s|%s" % (prefix, base_search_type), output) Trigger.call(self, "%s|%s|%s" % (prefix, base_search_type, self.context), output) # get the process (assumption here) and call both on process and process code process = self.process pipeline = None if process: Trigger.call(self, "%s|%s" % (prefix, base_search_type), output, process=process) pipeline_code = self.sobject.get_value("pipeline_code", no_exception=True) if pipeline_code: pipeline = Pipeline.get_by_code(pipeline_code) if pipeline and process: search = Search("config/process") search.add_filter("pipeline_code", pipeline_code) search.add_filter("process", process) process_sobj = search.get_sobject() if process_sobj: process_code = process_sobj.get_code() Trigger.call(self, "%s|%s" % (prefix, base_search_type), output, process=process_code)
def handle_action(my): my.log_message(my.sobject, my.process, "in_progress") process_obj = my.pipeline.get_process(my.process) # get the node's triggers search = Search("config/process") search.add_filter("process", my.process) search.add_filter("pipeline_code", my.pipeline.get_code()) process_sobj = search.get_sobject() #process_sobj = my.pipeline.get_process_sobject(my.process) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} action = triggers.get("on_action") cbjs_action = triggers.get("cbjs_action") action_path = triggers.get("on_action_path") kwargs, input = my.build_trigger_input() if action or action_path: from tactic.command import PythonCmd if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=action_path, input=input, **kwargs) ret_val = cmd.execute() elif cbjs_action: from tactic.command import JsCmd if cbjs_action: cmd = JsCmd(code=cbjs_action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call an action trigger Trigger.call(my, "process|action", input, process=process_sobj.get_code()) Trigger.call(my, "process|complete", my.input)
def add_static_triggers(cls): # event sthpw/trigger from pyasm.command import Trigger event = "change|sthpw/task" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) #trigger.set_value("mode", "same process,same transaction") trigger.set_value("class_name", "tactic.command.RelatedTaskUpdateTrigger") Trigger.append_static_trigger(trigger) event = "change|sthpw/task|status" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) #trigger.set_value("mode", "same process,same transaction") trigger.set_value("class_name", "tactic.command.TaskCompleteTrigger") Trigger.append_static_trigger(trigger)
def _test_approval(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="approval" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") my.assertEquals(0, len(tasks)) # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") my.assertEquals(1, len(tasks)) task = tasks[0] my.assertEquals("b", task.get("process")) # approve the task task.set_value("status", "approved") task.commit() my.assertEquals( "complete", sobject.get_value("b")) my.assertEquals( "complete", sobject.get_value("c"))
def _test_multi_input(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b1"/> <process type="action" name="b2"/> <process type="action" name="b3"/> <process type="action" name="b4"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> <connect from="b4" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) process = processes.get("c") process.commit() # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) self.assertEquals("complete", sobject.get_value("a")) self.assertEquals("complete", sobject.get_value("b1")) self.assertEquals("complete", sobject.get_value("b2")) self.assertEquals("complete", sobject.get_value("b3")) self.assertEquals("complete", sobject.get_value("b4")) self.assertEquals("complete", sobject.get_value("c")) self.assertEquals("complete", sobject.get_value("d"))
def handle_pending(my): my.log_message(my.sobject, my.process, "pending") search = Search("config/process") search.add_filter("pipeline_code", my.pipeline.get_code()) search.add_filter("process", my.process) process_sobj = search.get_sobject() process_code = process_sobj.get_code() # use child process subpipeline_code = process_sobj.get_value("subpipeline_code") if subpipeline_code: subpipeline = Search.get_by_code("sthpw/pipeline", subpipeline_code) else: search = Search("sthpw/pipeline") search.add_filter("parent_process", process_code) subpipeline = search.get_sobject() if not subpipeline: return # get the input nodes child_processes = subpipeline.get_processes(type=['input']) if not child_processes: child_processes = subpipeline.get_processes() if child_processes: first_process = child_processes[0] first_name = first_process.get_name() full_name = "%s.%s" % (my.process, first_name) input = { 'pipeline': subpipeline, 'sobject': my.sobject, 'process': full_name, } event = "process|pending" Trigger.call(my, event, input)
def run_callback(my, pipeline, process, status): # get the node triggers # TODO: make this more efficient search = Search("config/process") search.add_filter("pipeline_code", pipeline.get_code()) search.add_filter("process", process) process_sobj = search.get_sobject() #print "callback process: ", process, pipeline.get_code() if not process_sobj: raise TacticException('Process item [%s] has not been created. Please save your pipeline in the Project Workflow Editor to refresh the processes.'%process) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} ret_val = None action = triggers.get("on_%s" % status) js_action = triggers.get("cbjs_%s" % status) action_path = triggers.get("on_%s_path" % status) kwargs, input = my.build_trigger_input() if action or action_path: from tactic.command import PythonCmd if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() elif js_action: from tactic.command import JsCmd if action: cmd = JsCmd(code=action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call a trigger event = "process|%s" % status # how to get the value here? process_code = process_sobj.get_code() triggers = Trigger.call(my, event, kwargs, process=process_code) if triggers: ret_val = triggers[0].get_ret_val() return ret_val
def __init__(my): my.job = None my.jobs = [] my.check_interval = 1 my.max_jobs = 2 trigger = TransactionQueueServersTrigger() trigger.execute() my.servers = Container.get("TransactionQueueServers") # add a static trigger event = "change|sthpw/sync_server" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "tactic.command.TransactionQueueServersTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) super(TransactionQueueManager, my).__init__()
def execute(my): # get the cache list login_cache = Cache.get("logins") logins = login_cache.get_attr("logins") print logins # in memory triggers? events = login_cache.get_events() for event in events: trigger_sobj = SearchType.create("sthpw/trigger") trigger_sobj.set_value("event", event) trigger_sobj.set_value("class_name", "pyasm.search.cache.CacheTrigger") Trigger.append_static_trigger(trigger_sobj) login = logins[0] print "email [%s]" % login.get_value("email") login.set_value("email", "*****@*****.**") print "email [%s]" % login.get_value("email") login.commit()
def run_callback(my, pipeline, process, status): # get the node triggers # TODO: make this more efficient search = Search("config/process") search.add_filter("pipeline_code", pipeline.get_code()) search.add_filter("process", process) process_sobj = search.get_sobject() print "callback process: ", process, pipeline.get_code() assert(process_sobj) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} ret_val = None action = triggers.get("on_%s" % status) js_action = triggers.get("cbjs_%s" % status) action_path = triggers.get("on_%s_path" % status) kwargs, input = my.build_trigger_input() if action or action_path: if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() elif js_action: from tactic.command import JsCmd if action: cmd = JsCmd(code=action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call a trigger event = "process|%s" % status # how to get the value here? process_code = process_sobj.get_code() triggers = Trigger.call(my, event, kwargs, process=process_code) if triggers: ret_val = triggers[0].get_ret_val() return ret_val
def _test_multi_input_reject(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) sobject.set_value("a1", "complete") sobject.set_value("a2", "complete") sobject.set_value("a3", "complete") sobject.set_value("b", "pending") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a1"/> <process type="action" name="a2"/> <process type="action" name="a3"/> <process type="approval" name="b"/> <connect from="a1" to="b"/> <connect from="a2" to="b"/> <connect from="a3" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # Run the pipeline process = "b" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "reject_process": ['a1', 'a3'] } Trigger.call(self, "process|reject", output) self.assertEquals( "revise", sobject.get_value("a1")) self.assertEquals( "complete", sobject.get_value("a2")) self.assertEquals( "revise", sobject.get_value("a3"))
def call_triggers(my): # call the done trigger for checkin from pyasm.command import Trigger output = {} snapshot = my.get_snapshot() output['search_key'] = SearchKey.build_by_sobject(snapshot) output['update_data'] = snapshot.data.copy() output['snapshot'] = snapshot.get_sobject_dict() output['files'] = [x.get_sobject_dict() for x in my.file_objects] # DEPRECATED #Trigger.call(my, "checkin/done", output) prefix = my.get_trigger_prefix() # Add the checkin triggers base_search_type = my.sobject.get_base_search_type() Trigger.call(my, prefix, output) Trigger.call(my, "%s|%s" % (prefix, base_search_type), output) Trigger.call(my, "%s|%s|%s" % (prefix, base_search_type, my.context), output) # get the process (assumption here) and call both on process and process code process = my.process pipeline = None if process: Trigger.call(my, "%s|%s" % (prefix, base_search_type), output, process=process) pipeline_code = my.sobject.get_value("pipeline_code", no_exception=True) if pipeline_code: pipeline = Pipeline.get_by_code(pipeline_code) if pipeline and process: search = Search("config/process") search.add_filter("pipeline_code", pipeline_code) search.add_filter("process", process) process_sobj = search.get_sobject() if process_sobj: process_code = process_sobj.get_code() Trigger.call(my, "%s|%s" % (prefix, base_search_type), output, process=process_code)
def _test_multi_input_reject(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) sobject.set_value("a1", "complete") sobject.set_value("a2", "complete") sobject.set_value("a3", "complete") sobject.set_value("b", "pending") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a1"/> <process type="action" name="a2"/> <process type="action" name="a3"/> <process type="approval" name="b"/> <connect from="a1" to="b"/> <connect from="a2" to="b"/> <connect from="a3" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # Run the pipeline process = "b" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "reject_process": ['a1', 'a3'] } Trigger.call(self, "process|reject", output) self.assertEquals("revise", sobject.get_value("a1")) self.assertEquals("complete", sobject.get_value("a2")) self.assertEquals("revise", sobject.get_value("a3"))
def execute(self): # get the cache list login_cache = Cache.get("logins") logins = login_cache.get_attr("logins") print logins # in memory triggers? events = login_cache.get_events() for event in events: trigger_sobj = SearchType.create("sthpw/trigger") trigger_sobj.set_value("event", event) trigger_sobj.set_value("class_name", "pyasm.search.cache.CacheTrigger") Trigger.append_static_trigger(trigger_sobj) login = logins[0] print "email [%s]" % login.get_value("email") login.set_value("email", "*****@*****.**") print "email [%s]" % login.get_value("email") login.commit()
def handle_complete(my): my.log_message(my.sobject, my.process, "complete") my.run_callback(my.pipeline, my.process, "complete") search = Search("config/process") search.add_filter("subpipeline_code", my.pipeline.get_code()) if my.process_parts: search.add_filter("process", my.process_parts[0]) supprocess_sobj = search.get_sobject() suppipeline_code = supprocess_sobj.get_value("pipeline_code") supprocess = supprocess_sobj.get_value("process") suppipeline = Search.get_by_code("sthpw/pipeline", suppipeline_code) output = { 'pipeline': suppipeline, 'sobject': my.sobject, 'process': supprocess } event = "process|complete" Trigger.call(my, event, output)
def _test_messaging(my): # create a dummy sobject city = SearchType.create("unittest/city") city_pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' city_pipeline, city_processes = my.get_pipeline(city_pipeline_xml) city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() # Run the pipeline process = "a" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(my, "process|pending", output) for process in city_processes: key = "%s|%s|status" % (city.get_search_key(), process) search = Search("sthpw/message") search.add_filter("code", key) sobject = search.get_sobject() message = sobject.get_value("message") my.assertEquals("complete", message)
def execute(my): event = "change|config/widget_config" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) event = "change|sthpw/schema" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) # when the palette column of the project changes event = "change|sthpw/project|palette" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) # when the palette column of the project changes event = "change|sthpw/pref_setting" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) event = "change|sthpw/login_in_group" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) event = "change|sthpw/login_group" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.SidebarTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) # FIXME: should this really be a web_init trigger. This needs # to be run even from batch commands event = "change|sthpw/task|status" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.StatusLogTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) event = "insert|sthpw/login" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) trigger.set_value("class_name", "pyasm.web.web_init.DisplayNameTrigger") trigger.set_value("mode", "same process,same transaction") Trigger.append_static_trigger(trigger, startup=True) #from tactic.command.queue import JobTask #JobTask.start() from pyasm.biz import Snapshot Snapshot.add_integral_trigger()
def execute(self): file_path = self.kwargs.get("path") site = self.kwargs.get("site") project_code = self.kwargs.get("project_code") base_dir = self.kwargs.get("base_dir") search_type = self.kwargs.get("search_type") process = self.kwargs.get("process") watch_script_path = self.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = self.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' % (base_dir) self.create_checkin_log() # Define asset type of the file asset_type = self.get_asset_type(file_path) description = "drop folder check-in of %s" % file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ ['name', '=', file_name], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.extract_keywords_from_path(relative_path) keywords = " ".join(keywords) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) """ #TEST: simulate different check-in duration from random import randint sec = randint(1, 5) print "checking in for ", sec, "sec" server.eval("@SOBJECT(sthpw/login)") import shutil dir_name,base_name = os.path.split(file_path) dest_dir = 'C:/ProgramData/Southpaw/watch_temp' if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.move(file_path, '%s/%s'%(dest_dir, base_name)) time.sleep(sec) # move back the file in a few seconds shutil.move('%s/%s'%(dest_dir, base_name), file_path) """ server_return_value = server.simple_checkin( search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path, search_type=search_type, drop_path=file_path, search_key=search_key) cmd.execute() except Exception as e: print "Error occurred", e error_message = str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-" * 50 print stacktrace_str version_num = 'Error:' system_time = strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise else: transaction.commit() #server.finish() if server_return_value: # Create the TACTIC_log file to record every check-in. # Search for all required data checkin_time = server_return_value.get('timestamp') version_nu = server_return_value.get('version') version_num = str(version_nu) try: value = parser.parse(checkin_time) value = value.strftime("%Y/%m/%d %H:%M") except: value = checkin_time pre_log = file_name + (50 - len(file_name)) * ' ' + value + ( 33 - len(value)) * ' ' + version_num + ( 15 - len(version_num)) * ' ' + 'ok\n' # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() # Invoke Trigger:called_triggers from pyasm.command import Trigger Trigger.call_all_triggers() # Delete the source file after check-in step. print "File handled." if os.path.exists(file_path): if os.path.isdir(file_path): os.rmdirs(file_path) else: os.unlink(file_path) print "Source file [%s] deleted: " % file_name
def handle_condition_node(my, sobject, pipeline, process, triggers): ret_val = my.run_callback(pipeline, process, "action") # if a None return value was given, then probably no condition exists # yet, so just let if flow through if ret_val == None: ret_val = True # run the completion trigger for this node Trigger.call(my, "process|complete", my.input) if ret_val == True: success_cbk = triggers.get("on_success") if success_cbk: cmd = PythonCmd(code=success_cbk, sobject=sobject) cmd.execute() return else: event = "process|pending" attr = "success" direction = "output" processes = pipeline.get_output_processes(process, from_attr=attr) if not processes: attr = None elif ret_val == False: fail_cbk = triggers.get("on_fail") if fail_cbk: cmd = PythonCmd(code=fail_cbk, sobject=sobject) cmd.execute() return else: event = "process|revise" # check to see if there is an output process attr = "fail" processes = pipeline.get_output_processes(process, from_attr=attr) if processes: direction = "output" else: direction = "input" attr = None else: event = "process|pending" if isinstance(ret_val, basestring): ret_val = [ret_val] output_processes = [] for attr in ret_val: outputs = pipeline.get_output_processes(process, from_attr=attr) if outputs: output_processes.extend(outputs) # if there are no output attrs, then check the node names if not output_processes: outputs = pipeline.get_output_processes(process) for output in outputs: if output.get_name() in ret_val: output_processes.append(output) for output_process in output_processes: output_process_name = output_process.get_name() output = { 'sobject': sobject, 'pipeline': pipeline, 'process': output_process_name, } Trigger.call(my, event, output) return # by default, go back to incoming or outcoming if direction == "input": processes = pipeline.get_input_processes(process, to_attr=attr) else: processes = pipeline.get_output_processes(process, from_attr=attr) for process in processes: process_name = process.get_name() output = { 'sobject': sobject, 'pipeline': pipeline, 'process': process_name, } Trigger.call(my, event, output)