def create(my): project = Project.get_by_code(my.project_code) if project: my.delete() print "Setting up a basic Sample3d project" # create the project create_cmd = CreateProjectCmd( project_code=my.project_code, project_title="Sample 3D") #, project_type="unittest") create_cmd.execute() # install the unittest plugin installer = PluginInstaller(relative_dir="TACTIC/internal/sample3d", verbose=False) installer.execute() # add 30 shots for x in xrange(30): shot = SearchType.create("prod/shot") shot.set_value('name', 'shot%s' % x) shot.set_value('sequence_code', 'SEQ_01') shot.commit(triggers=False) seq = SearchType.create("prod/sequence") seq.set_value('code', 'SEQ_01') seq.commit(triggers=False)
def create(self): project = Project.get_by_code(self.project_code) if project: self.delete() print "Setting up a basic Sample3d project" # create the project create_cmd = CreateProjectCmd(project_code=self.project_code, project_title="Sample 3D") #, project_type="unittest") create_cmd.execute() # install the unittest plugin installer = PluginInstaller(relative_dir="TACTIC/internal/sample3d", verbose=False) installer.execute() # add 30 shots for x in xrange(30): shot = SearchType.create("prod/shot") shot.set_value('name','shot%s'%x) shot.set_value('sequence_code','SEQ_01') shot.commit(triggers=False) if not Search.eval("@SOBJECT(prod/sequence['code','SEQ_01'])"): seq = SearchType.create("prod/sequence") seq.set_value('code','SEQ_01') seq.commit(triggers=False)
def _test_base_dir_alias(my): Config.set_value( "checkin", "asset_base_dir", { 'default': '/tmp/tactic/default', 'alias': '/tmp/tactic/alias', 'alias2': '/tmp/tactic/alias2', }) asset_dict = Environment.get_asset_dirs() default_dir = asset_dict.get("default") my.assertEquals("/tmp/tactic/default", default_dir) aliases = asset_dict.keys() # "plugins" is assumed in some branch if 'plugins' in aliases: my.assertEquals(4, len(aliases)) else: my.assertEquals(3, len(aliases)) my.assertNotEquals(None, "alias" in aliases) # create a naming naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias") naming.set_value("dir_naming", "alias") naming.set_value("file_naming", "text.txt") naming.set_value("base_dir_alias", "alias") naming.commit() # create 2nd naming where naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias2") naming.set_value("dir_naming", "alias2") naming.set_value("base_dir_alias", "alias2") naming.set_value("file_naming", "text.txt") naming.set_value("checkin_type", "auto") naming.commit() my.clear_naming() # create a new test.txt file for context in ['alias', 'alias2']: file_path = "./test.txt" file = open(file_path, 'w') file.write("whatever") file.close() checkin = FileCheckin(my.person, file_path, context=context) checkin.execute() snapshot = checkin.get_snapshot() lib_dir = snapshot.get_lib_dir() expected = "/tmp/tactic/%s/%s" % (context, context) my.assertEquals(expected, lib_dir) path = "%s/text.txt" % (lib_dir) exists = os.path.exists(path) my.assertEquals(True, exists)
def _test_time(self): ''' test timezone related behavior''' sobject = SearchType.create('sthpw/task') sobject.set_value('project_code', 'unittest') sobject.set_value('bid_start_date', '2014-11-11 05:00:00') time = sobject.get_value('bid_start_date') self.assertEquals(time, '2014-11-11 05:00:00') sobject.commit() time = sobject.get_value('bid_start_date') self.assertEquals(time, '2014-11-11 05:00:00') from pyasm.search import DbContainer sql = DbContainer.get('sthpw') db_value = sql.do_query( 'SELECT bid_start_date from task where id = %s' % sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database self.assertEquals(db_value[0][0].strftime('%Y-%m-%d %H:%M:%S %Z'), '2014-11-11 00:00:00 ') # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval( '@SOBJECT(unittest/person?project=unittest["id", "%s"])' % sobject.get_id(), single=True) obj2 = Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])' % sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])' % sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])' % sobject.get_id(), single=True) # EST and GMT diff is 5 hours self.assertEquals(task.get_value('bid_start_date'), '2014-11-11 05:00:00') # test NOW() auto conversion sobj = SearchType.create('sthpw/note') sobj.set_value('process', 'TEST') sobj.set_value('note', '123') self.assertEquals(sobj.get_value('timestamp'), "") sobj.commit() # this is local commited time converted back to GMT committed_time = sobj.get_value('timestamp') from dateutil import parser committed_time = parser.parse(committed_time) from pyasm.common import SPTDate now = SPTDate.now() diff = now - committed_time # should be roughly the same minute, not hours apart self.assertEquals(diff.seconds < 60, True)
def _test_progress_reject(self): # FIXME: it is not completely clear what should happen when a progress # node recieves a revise message. return # create a dummy sobject city = SearchType.create("unittest/city") people = [] person_pipeline_xml = ''' <pipeline> <process type="action" name="p1"/> </pipeline> ''' person_pipeline, person_processes = self.get_pipeline( person_pipeline_xml, search_type="unittest/person") person_pipeline_code = person_pipeline.get_value("code") city_pipeline_xml = ''' <pipeline> <process type="progress" name="c1" pipeline_code="%s" search_type="unittest/person" process="p1" status="complete"/> <process type="approval" name="c2"/> <connect from="c1" to="c2"/> </pipeline> ''' % person_pipeline_code city_pipeline, city_processes = self.get_pipeline( city_pipeline_xml, search_type="unittest/city") city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() from pyasm.common import Container Container.put("process_listeners", None) for name in ['Beth', 'Cindy', 'John']: person = SearchType.create("unittest/person") person.set_value("name_first", name) person.set_value("pipeline_code", person_pipeline.get_code()) person.set_value("city_code", city.get_code()) person.commit() person.set_value("p1", "complete") people.append(person) process = "c2" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(self, "process|reject", output) for person in people: self.assertEquals("revise", person.get_value("p1"))
def _test_base_dir_alias(my): Config.set_value("checkin", "asset_base_dir", { 'default': '/tmp/tactic/default', 'alias': '/tmp/tactic/alias', 'alias2': '/tmp/tactic/alias2', }); asset_dict = Environment.get_asset_dirs() default_dir = asset_dict.get("default") my.assertEquals( "/tmp/tactic/default", default_dir) aliases = asset_dict.keys() # "plugins" is assumed in some branch if 'plugins' in aliases: my.assertEquals( 4, len(aliases)) else: my.assertEquals( 3, len(aliases)) my.assertNotEquals( None, "alias" in aliases ) # create a naming naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias") naming.set_value("dir_naming", "alias") naming.set_value("file_naming", "text.txt") naming.set_value("base_dir_alias", "alias") naming.commit() # create 2nd naming where naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias2") naming.set_value("dir_naming", "alias2") naming.set_value("base_dir_alias", "alias2") naming.set_value("file_naming", "text.txt") naming.set_value("checkin_type", "auto") naming.commit() my.clear_naming() # create a new test.txt file for context in ['alias', 'alias2']: file_path = "./test.txt" file = open(file_path, 'w') file.write("whatever") file.close() checkin = FileCheckin(my.person, file_path, context=context) checkin.execute() snapshot = checkin.get_snapshot() lib_dir = snapshot.get_lib_dir() expected = "/tmp/tactic/%s/%s" % (context, context) my.assertEquals(expected, lib_dir) path = "%s/text.txt" % (lib_dir) exists = os.path.exists(path) my.assertEquals(True, exists)
def get_message(my): search_type_obj = my.sobject.get_search_type_obj() title = search_type_obj.get_title() subject = my.get_subject() notification_message = my.notification.get_value("message") if notification_message: # parse it through the expression sudo = Sudo() parser = ExpressionParser() snapshot = my.input.get('snapshot') env_sobjects = {} # turn prev_data and update_data from input into sobjects prev_data = SearchType.create("sthpw/virtual") id_col = prev_data.get_id_col() if id_col: del prev_data.data[id_col] prev_dict = my.input.get("prev_data") if prev_dict: for name, value in prev_dict.items(): if value != None: prev_data.set_value(name, value) update_data = SearchType.create("sthpw/virtual") id_col = update_data.get_id_col() if id_col: del update_data.data[id_col] update_dict = my.input.get("update_data") if update_dict: for name, value in update_dict.items(): if value != None: update_data.set_value(name, value) if snapshot: env_sobjects = { 'snapshot': snapshot } env_sobjects['prev_data'] = prev_data env_sobjects['update_data'] = update_data notification_message = parser.eval(notification_message, my.sobject, env_sobjects=env_sobjects, mode='string') del sudo return notification_message message = "%s %s" % (title, my.sobject.get_name()) message = '%s\n\nReport from transaction:\n%s\n' % (message, subject) return message
def execute(my): import types transaction_xml = my.kwargs.get("transaction_xml") file_mode = my.kwargs.get("file_mode") if not file_mode: file_mode = 'delayed' # if the first argument is a dictionary, then the whole # transaction sobject was passed through # NOTE: this is now the default if type(transaction_xml) == types.DictType: transaction_dict = transaction_xml transaction_xml = transaction_dict.get("transaction") timestamp = transaction_dict.get("timestamp") login = transaction_dict.get("login") # recreate the transaction transaction = SearchType.create("sthpw/transaction_log") for name, value in transaction_dict.items(): if name.startswith("__"): continue if name == 'id': continue if value == None: continue transaction.set_value(name, value) elif isinstance(transaction_xml, SObject): transaction = transaction_xml else: # Create a fake transaction. # This is only used for test purposes transaction = SearchType.create("sthpw/transaction_log") if transaction_xml: transaction.set_value("transaction", transaction_xml) else: print "WARNING: transaction xml is empty" transaction.set_value("login", "admin") # commit the new transaction. This is the only case where # a transaction will not have a code, so it has to be committed. # The other case do not need to be committed because they will # already have codes and the transaction is committed in # RedoCmd # try: transaction.commit() except Exception, e: print "Failed to commit transaction [%s]: It may already exist. Skipping." % transaction.get_code( ) print str(e) return
def get_message(my): search_type_obj = my.sobject.get_search_type_obj() title = search_type_obj.get_title() subject = my.get_subject() notification_message = my.notification.get_value("message") if notification_message: # parse it through the expression sudo = Sudo() parser = ExpressionParser() snapshot = my.input.get('snapshot') env_sobjects = {} # turn prev_data and update_data from input into sobjects prev_data = SearchType.create("sthpw/virtual") id_col = prev_data.get_id_col() if id_col: del prev_data.data[id_col] prev_dict = my.input.get("prev_data") if prev_dict: for name, value in prev_dict.items(): if value != None: prev_data.set_value(name, value) update_data = SearchType.create("sthpw/virtual") id_col = update_data.get_id_col() if id_col: del update_data.data[id_col] update_dict = my.input.get("update_data") if update_dict: for name, value in update_dict.items(): if value != None: update_data.set_value(name, value) if snapshot: env_sobjects = {'snapshot': snapshot} env_sobjects['prev_data'] = prev_data env_sobjects['update_data'] = update_data notification_message = parser.eval(notification_message, my.sobject, env_sobjects=env_sobjects, mode='string') del sudo return notification_message message = "%s %s" % (title, my.sobject.get_name()) message = '%s\n\nReport from transaction:\n%s\n' % (message, subject) return message
def execute(my): import types transaction_xml = my.kwargs.get("transaction_xml") file_mode = my.kwargs.get("file_mode") if not file_mode: file_mode = 'delayed' # if the first argument is a dictionary, then the whole # transaction sobject was passed through # NOTE: this is now the default if type(transaction_xml) == types.DictType: transaction_dict = transaction_xml transaction_xml = transaction_dict.get("transaction") timestamp = transaction_dict.get("timestamp") login = transaction_dict.get("login") # recreate the transaction transaction = SearchType.create("sthpw/transaction_log") for name, value in transaction_dict.items(): if name.startswith("__"): continue if name == 'id': continue if value == None: continue transaction.set_value(name, value) elif isinstance(transaction_xml, SObject): transaction = transaction_xml else: # Create a fake transaction. # This is only used for test purposes transaction = SearchType.create("sthpw/transaction_log") if transaction_xml: transaction.set_value("transaction", transaction_xml) else: print "WARNING: transaction xml is empty" transaction.set_value("login", "admin") # commit the new transaction. This is the only case where # a transaction will not have a code, so it has to be committed. # The other case do not need to be committed because they will # already have codes and the transaction is committed in # RedoCmd # try: transaction.commit() except Exception, e: print "Failed to commit transaction [%s]: It may already exist. Skipping." % transaction.get_code() print str(e) return
def create(name, desc, search_type, xml=None, code=None, color=None): '''will only create if it does not exist, otherwise it just updates''' if code: sobject = Pipeline.get_by_code(code) else: sobject = None if sobject == None: #sobject = Pipeline( Pipeline.SEARCH_TYPE ) sobject = SearchType.create(Pipeline.SEARCH_TYPE) else: return sobject if not xml: xml = Xml() xml.create_doc('pipeline') if isinstance(xml, basestring): xml_string = xml xml = Xml() xml.read_string(xml_string) sobject.set_value("pipeline", xml.get_xml()) sobject.set_pipeline(xml.to_string()) sobject.set_value('timestamp', Sql.get_default_timestamp_now(), quoted=False) if code: sobject.set_value('code', code.strip()) sobject.set_value('name', name.strip()) sobject.set_value('search_type', search_type) sobject.set_value('description', desc) if color: sobject.set_value("color", color) sobject.commit() process_names = sobject.get_process_names() for i, process_name in enumerate(process_names): process = SearchType.create("config/process") process.set_value("pipeline_code", sobject.get_code()) process.set_value("process", process_name) process.set_value("sort_order", i) process.set_value("subcontext_options", "(main)") process.commit() return sobject
def get_pipeline(my, pipeline_xml, add_tasks=False): pipeline = SearchType.create("sthpw/pipeline") pipeline.set_pipeline(pipeline_xml) pipeline_id = random.randint(0, 10000000) #pipeline.set_value("code", "test%s" % pipeline_id) #pipeline.set_id(pipeline_id) #pipeline.set_value("id", pipeline_id) pipeline.set_value("pipeline", pipeline_xml) pipeline.commit() process_names = pipeline.get_process_names() # delete the processes search = Search("config/process") search.add_filters("process", process_names) processes = search.get_sobjects() for process in processes: process.delete() # create new processes processes_dict = {} for process_name in process_names: # define the process nodes process = SearchType.create("config/process") process.set_value("process", process_name) process.set_value("pipeline_code", pipeline.get_code()) process.set_json_value( "workflow", { 'on_complete': ''' sobject.set_value('%s', "complete") ''' % process_name, 'on_approve': ''' sobject.set_value('%s', "approve") ''' % process_name, }) process.commit() processes_dict[process_name] = process # Note: we don't have an sobject yet if add_tasks: task = SaerchType.create("sthpw/task") task.set_parent(sobject) task.set_value("process", process_name) task.commit() return pipeline, processes_dict
def get_pipeline(my, pipeline_xml, add_tasks=False): pipeline = SearchType.create("sthpw/pipeline") pipeline.set_pipeline(pipeline_xml) pipeline_id = random.randint(0, 10000000) #pipeline.set_value("code", "test%s" % pipeline_id) #pipeline.set_id(pipeline_id) #pipeline.set_value("id", pipeline_id) pipeline.set_value("pipeline", pipeline_xml) pipeline.commit() process_names = pipeline.get_process_names() # delete the processes search = Search("config/process") search.add_filters("process", process_names) processes = search.get_sobjects() for process in processes: process.delete() # create new processes processes_dict = {} for process_name in process_names: # define the process nodes process = SearchType.create("config/process") process.set_value("process", process_name) process.set_value("pipeline_code", pipeline.get_code()) process.set_json_value("workflow", { 'on_complete': ''' sobject.set_value('%s', "complete") ''' % process_name, 'on_approve': ''' sobject.set_value('%s', "approve") ''' % process_name, } ) process.commit() processes_dict[process_name] = process # Note: we don't have an sobject yet if add_tasks: task = SaerchType.create("sthpw/task") task.set_parent(sobject) task.set_value("process", process_name) task.commit() return pipeline, processes_dict
def _test_sobject_hierarchy(my): # FIXME: this functionality has been disabled until further notice return snapshot_type = SearchType.create("sthpw/snapshot_type") snapshot_type.set_value("code", "maya_model") snapshot_type.commit() snapshot_type = SearchType.create("prod/snapshot_type") snapshot_type.set_value("code", "maya_model") snapshot_type.commit() snapshot_type = SnapshotType.get_by_code("maya_model")
def _test_time(my): ''' test timezone related behavior''' sobject = SearchType.create('sthpw/task') sobject.set_value('project_code','unittest') sobject.set_value('bid_start_date', '2014-11-11 05:00:00') time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') sobject.commit() time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') from pyasm.search import DbContainer sql = DbContainer.get('sthpw') db_value = sql.do_query('SELECT bid_start_date from task where id = %s'%sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database my.assertEquals(db_value[0][0].strftime('%Y-%m-%d %H:%M:%S %Z'), '2014-11-11 00:00:00 ') # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval('@SOBJECT(unittest/person?project=unittest["id", "%s"])'%sobject.get_id(), single=True) obj2= Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])'%sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])'%sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])'%sobject.get_id(), single=True) # EST and GMT diff is 5 hours my.assertEquals(task.get_value('bid_start_date'), '2014-11-11 05:00:00') # test NOW() auto conversion sobj = SearchType.create('sthpw/note') sobj.set_value('process','TEST') sobj.set_value('note','123') my.assertEquals(sobj.get_value('timestamp'), "") sobj.commit() # this is local commited time converted back to GMT committed_time = sobj.get_value('timestamp') from dateutil import parser committed_time = parser.parse(committed_time) from pyasm.common import SPTDate now = SPTDate.now() diff = now - committed_time # should be roughly the same minute, not hours apart my.assertEquals(diff.seconds < 60, True)
def _test_trigger(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("a") process.set_value("workflow", "") process.commit() folder = Common.generate_alphanum_key() Trigger.clear_db_cache() event = "process|action" trigger = SearchType.create("config/trigger") trigger.set_value("event", event) trigger.set_value("process", process.get_code()) trigger.set_value("mode", "same process,same transaction") trigger.set_value("script_path", "%s/process_trigger" % folder) trigger.commit() script = SearchType.create("config/custom_script") script.set_value("folder", folder) script.set_value("title", "process_trigger") script.set_value("script", ''' print "---" for key, value in input.items(): print key, value print "---" print "process: ", input.get("process") ''') script.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def _test_js(my): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("a") process.set_json_value("workflow", { 'cbjs_action': ''' console.log("This is javascript"); console.log(input); return false ''' } ) process.commit() process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": "pending" } import time start = time.time() Trigger.call(my, "process|pending", output)
def postprocess(self): web = WebContainer.get_web() value = web.get_form_value( self.get_input_name() ) if not value: return # get all fo the sobjects from the search keys instance_type = self.get_option("instance_type") # path is used for self-relating in an instance table src_path = self.get_option("path") #src_sobject = self.sobject search = Search(self.sobject.get_search_type()) search.add_id_filter(self.sobject.get_id()) src_sobject = search.get_sobject() # this is passed in from EditCmd in insert mode parent_key = self.get_option('parent_key') # in some rare cases we have project as the parent_key if parent_key and self.is_insert and 'sthpw/project' not in parent_key: # this is the parent dst_sobject = SearchKey.get_by_search_key(parent_key) # add all the new sobjects #instances = dst_sobject.get_related_sobject(instance_type) instance = SearchType.create(instance_type) instance.add_related_connection(src_sobject, dst_sobject, src_path=src_path) instance.commit()
def create(sobject, value, prev_value=None): if prev_value == value: return # if this is successful, the store it in the status_log #search_type = sobject.get_search_type() #search_id = sobject.get_id() #search_code = sobject.get_value("code") status_log = SearchType.create("sthpw/status_log") status_log.set_value("login", Environment.get_user_name()) status_log.set_sobject_value(sobject) #status_log.set_value("search_type", search_type) #status_log.set_value("search_code", search_id, no_exception=True) #status_log.set_value("search_id", search_code, no_exception=True) if prev_value: status_log.set_value("from_status", prev_value) status_log.set_value("to_status", value) project_code = Project.get_project_name() status_log.set_value("project_code", project_code) status_log.commit(cache=False) return status_log
def _test_custom_status(self): task_pipeline_xml = ''' <pipeline> <process name="Pending"/> <process name="Do It"/> <process name="Fix it" mapping="revise"/> <process name="Push Back" mapping="reject"/> <process name="Revise"/> <process name="Go to Do It" direction="output" status="Do It"/> <process name="Accept" mapping="complete"/> </pipeline> ''' task_pipeline, task_processes = self.get_pipeline(task_pipeline_xml) task_pipeline.set_value("code", "custom_task") task_pipeline.commit() # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") pipeline_xml = ''' <pipeline> <process task_pipeline="custom_task" type="manual" name="a"/> <process task_pipeline="custom_task" type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) # Run the pipeline process = "b" status = "Push Back" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": status } Trigger.call(self, "process|custom", output) self.assertEquals("reject", sobject.get_value("b")) self.assertEquals("revise", sobject.get_value("a")) # Run the pipeline process = "a" status = "Go to Do It" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": status } Trigger.call(self, "process|custom", output) self.assertEquals("Do It", sobject.get_value("b"))
def _test_manual(self): print "test manual" # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) # nothing should have run self.assertEquals("pending", sobject.get_value("a")) self.assertEquals(False, sobject.get_value("b"))
def get_display(self): top = DivWdg() self.set_as_panel(top) sobject = SearchType.create("sthpw/virtual") sobject.set_value("mon", "3") sobject.set_value("tue", "2") sobject.set_value("wed", "5") config = ''' <week> <element name="week"/> <element name="parent"/> <element name="category"/> <element name="description"/> <element name="mon"/> <element name="tue"/> <element name="wed"/> <element name="thu"/> <element name="fri"/> <element name="total"/> </week> ''' table = TableLayoutWdg(search_type='sthpw/virtual', view='week') top.add(table) return top
def _test_messaging(self): # create a dummy sobject city = SearchType.create("unittest/city") city_pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' city_pipeline, city_processes = self.get_pipeline(city_pipeline_xml) city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() # Run the pipeline process = "a" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(self, "process|pending", output) for process in city_processes: key = "%s|%s|status" % (city.get_search_key(), process) search = Search("sthpw/message") search.add_filter("code", key) sobject = search.get_sobject() message = sobject.get_value("message") self.assertEquals("complete", message)
def execute(my): my.init() # create the filters my.filters = [] """ for element_name in my.config.get_element_names(): filter = my.config.get_display_widget(element_name) my.filters.append(filter) # make sure there is at least one filter defined assert my.filters """ config = "<config>\n" config += "<filter>\n" # get all of the serialized versions of the filters """ for filter in my.filters: config += filter.serialize() + "\n" """ filter_data = FilterData.get() json = filter_data.serialize() value_type = "json" config += "<values type='%s'>%s</values>\n" % (value_type, json) config += "</filter>\n" config += "</config>\n" # format the xml xml = Xml() xml.read_string(config) if not my.view: saved_view = "saved_search:%s" % my.search_type else: saved_view = my.view # if my.view.startswith("saved_search:"): # saved_view = my.view # else: # saved_view = "saved_search:%s" % my.view # use widget config instead search = Search("config/widget_config") search.add_filter("view", saved_view) search.add_filter("search_type", my.search_type) if my.personal: search.add_user_filter() config = search.get_sobject() if not config: config = SearchType.create("config/widget_config") config.set_value("view", saved_view) config.set_value("search_type", my.search_type) if my.personal: config.set_user() config.set_value("config", xml.to_string()) config.commit()
def handle_config2(my): """for db column search config stuff, not used yet""" web = WebContainer.get_web() search_type = "SearchTypeSchema" view = "definition" config_search_type = "config/widget_config" search = Search(config_search_type) search.add_filter("search_type", search_type) search.add_filter("view", view) config = search.get_sobject() if not config: config = SearchType.create(config_search_type) config.set_value("search_type", search_type) config.set_value("view", view) xml = config.get_xml_value("config", "config") root = xml.get_root_node() # reinitialize config._init() # build a new config view_node = xml.create_element(view) root.appendChild(view_node) config_mode = web.get_form_value("config_mode") if config_mode == "advanced": config_string = web.get_form_value("config_xml") else: config_data_type = web.get_form_value("config_data_type") if config_data_type == "Other...": config_data_type = web.get_form_value("config_data_type_custom") config_nullable = web.get_form_value("config_nullable") # TAKEN FROM API: should be centralized or something from tactic.ui.panel import SideBarBookmarkMenuWdg config_view = SideBarBookmarkMenuWdg.get_config(search_type, view) node = config_view.get_element_node(my.element_name) if node: config_xml = config_view.get_xml() node = config_view.get_element_node(my.element_name) Xml.set_attribute(node, "data_type", config_data_type) Xml.set_attribute(node, "nullable", config_nullable) Xml.set_attribute(node, "new", "True") config_string = config_xml.to_string(node) else: config_string = """ <element name="%s" data_type="%s" nullable="%s" new="True"/> """ % ( my.element_name, config_data_type, config_nullable, ) config.append_xml_element(my.element_name, config_string) config.commit_config()
def _test_time(my): """ test timezone related behavior""" sobject = SearchType.create("sthpw/task") sobject.set_value("project_code", "unittest") sobject.set_value("bid_start_date", "2014-11-11 05:00:00") time = sobject.get_value("bid_start_date") my.assertEquals(time, "2014-11-11 05:00:00") sobject.commit() time = sobject.get_value("bid_start_date") my.assertEquals(time, "2014-11-11 05:00:00") from pyasm.search import DbContainer sql = DbContainer.get("sthpw") db_value = sql.do_query("SELECT bid_start_date from task where id = %s" % sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database my.assertEquals(db_value[0][0].strftime("%Y-%m-%d %H:%M:%S %Z"), "2014-11-11 00:00:00 ") # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval('@SOBJECT(unittest/person?project=unittest["id", "%s"])' % sobject.get_id(), single=True) obj2 = Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])' % sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])' % sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])' % sobject.get_id(), single=True) # EST and GMT diff is 5 hours my.assertEquals(task.get_value("bid_start_date"), "2014-11-11 05:00:00")
def create(sobject, value, prev_value=None): if prev_value == value: return # if this is successful, the store it in the status_log #search_type = sobject.get_search_type() #search_id = sobject.get_id() #search_code = sobject.get_value("code") status_log = SearchType.create("sthpw/status_log") status_log.set_value("login", Environment.get_user_name() ) status_log.set_sobject_value(sobject) #status_log.set_value("search_type", search_type) #status_log.set_value("search_code", search_id, no_exception=True) #status_log.set_value("search_id", search_code, no_exception=True) if prev_value: status_log.set_value("from_status", prev_value) status_log.set_value("to_status", value) project_code = Project.get_project_name() status_log.set_value("project_code", project_code) status_log.commit(cache=False) return status_log
def execute(my): my.init() # create the filters my.filters = [] """ for element_name in my.config.get_element_names(): filter = my.config.get_display_widget(element_name) my.filters.append(filter) # make sure there is at least one filter defined assert my.filters """ config = "<config>\n" config += "<filter>\n" # get all of the serialized versions of the filters """ for filter in my.filters: config += filter.serialize() + "\n" """ filter_data = FilterData.get() json = filter_data.serialize() value_type = "json" config += "<values type='%s'>%s</values>\n" % (value_type, json) config += "</filter>\n" config += "</config>\n" # format the xml xml = Xml() xml.read_string(config) if not my.view: saved_view = "saved_search:%s" % my.search_type else: saved_view = my.view # if my.view.startswith("saved_search:"): # saved_view = my.view # else: # saved_view = "saved_search:%s" % my.view # use widget config instead search = Search('config/widget_config') search.add_filter("view", saved_view) search.add_filter("search_type", my.search_type) if my.personal: search.add_user_filter() config = search.get_sobject() if not config: config = SearchType.create('config/widget_config') config.set_value("view", saved_view) config.set_value("search_type", my.search_type) if my.personal: config.set_user() config.set_value("config", xml.to_string()) config.commit()
def _test_manual(my): print "test manual" # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") sobject.set_value("a", False) sobject.set_value("b", False) pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output) # nothing should have run my.assertEquals( "pending", sobject.get_value("a")) my.assertEquals( False, sobject.get_value("b"))
def execute(my): import os path = my.kwargs.get("path") path = path.replace("\\", "/") basename = os.path.basename(path) upload_dir = Environment.get_upload_dir() path = "%s/%s" % (upload_dir, basename) f = open(path, 'r') transactions = [] xml = [] for line in f: if line == '\n': transactions.append(xml) xml = [] continue xml.append(line.strip()) transactions.append(xml) for transaction in transactions: value = "\n".join(transaction) # we have the transaction # recreate the log transacton_log = SearchType.create("sthpw/transaction_log") transaction_log.set_value("transaction", transaction) break
def create(cls, sobject, process, description="", assigned="", supervisor="",\ status=None, depend_id=None, project_code=None, pipeline_code='', \ start_date=None, end_date=None, context='', bid_duration=8): task = SearchType.create( cls.SEARCH_TYPE ) task.set_parent(sobject) task.set_value("process", process ) if description: task.set_value("description", description ) if assigned != None: task.set_value("assigned", assigned) if supervisor != None: task.set_value("supervisor", supervisor) if not project_code: project_code = sobject.get_project_code() task.set_value("project_code", project_code ) task.set_value("pipeline_code", pipeline_code) if not status: pipeline = task.get_pipeline() process_names = pipeline.get_process_names() if process_names: status = process_names[0] if status: task.set_value("status", status) if bid_duration: task.set_value("bid_duration", bid_duration) if start_date: task.set_value("bid_start_date", start_date) if end_date: task.set_value("bid_end_date", end_date) # auto map context as process as the default #if not context: # context = process # let get_defaults() set the context properly instead of auto-map if context: task.set_value("context", context) # DEPRECATED if depend_id: task.set_value("depend_id", depend_id) # created by if task.has_value('login'): user = Environment.get_user_name() task.set_value('login', user) task.commit(triggers=True) # log the status creation event StatusLog.create(task, status) return task
def postprocess(my): web = WebContainer.get_web() value = web.get_form_value(my.get_input_name()) if not value: return # get all fo the sobjects from the search keys instance_type = my.get_option("instance_type") # path is used for self-relating in an instance table src_path = my.get_option("path") #src_sobject = my.sobject search = Search(my.sobject.get_search_type()) search.add_id_filter(my.sobject.get_id()) src_sobject = search.get_sobject() # this is passed in from EditCmd in insert mode parent_key = my.get_option('parent_key') # in some rare cases we have project as the parent_key if parent_key and my.is_insert and 'sthpw/project' not in parent_key: # this is the parent dst_sobject = SearchKey.get_by_search_key(parent_key) # add all the new sobjects #instances = dst_sobject.get_related_sobject(instance_type) instance = SearchType.create(instance_type) instance.add_related_connection(src_sobject, dst_sobject, src_path=src_path) instance.commit()
def execute(self): # save prefix local_prefix = self.get_value("local_prefix") self.server_prefix = Config.get_value("install", "server") if not local_prefix and not self.server_prefix: raise TacticException("Cannot have empty local server prefix") if local_prefix and local_prefix != self.server_prefix: Config.set_value("install", "server", local_prefix) Config.save_config() self.project_code = self.get_value("project") if not self.project_code: self.project_code = Project.get_project_code() # create a share share = SearchType.create("sthpw/sync_server") self.handle_info(share) self.handle_sync_mode(share) share.commit()
def copy_sobject(my, sobject, dst_search_type, context=None, checkin_mode='inplace'): new_sobject = SearchType.create(dst_search_type) search_type = SearchType.get(dst_search_type) columns = SearchType.get_columns(dst_search_type) data = sobject.get_data() for name, value in data.items(): if name in ['id', 'code', 'pipeline_code']: continue if name not in columns: continue if not value: continue new_sobject.set_value(name, value) new_sobject.commit() # get all of the current snapshots and file paths associated if not context: snapshots = Snapshot.get_all_current_by_sobject(sobject) else: snapshots = [Snapshot.get_current_by_sobject(sobject, context)] if not snapshots: return msgs = [] for snapshot in snapshots: #file_paths = snapshot.get_all_lib_paths() file_paths_dict = snapshot.get_all_paths_dict() file_types = file_paths_dict.keys() if not file_types: continue # make sure the paths match the file_types file_paths = [file_paths_dict.get(x)[0] for x in file_types] mode = checkin_mode # checkin the files (inplace) try: context = snapshot.get_value('context') checkin = FileCheckin(new_sobject, context=context, file_paths=file_paths, file_types=file_types, mode=mode) checkin.execute() #print "done: ", context, new_sobject.get_related_sobjects("sthpw/snapshot") except CheckinException, e: msgs.append('Post-process Check-in Error for %s: %s ' % (context, e.__str__()))
def copy_sobject(my, sobject, dst_search_type, context=None, checkin_mode='inplace'): new_sobject = SearchType.create(dst_search_type) search_type = SearchType.get(dst_search_type) columns = SearchType.get_columns(dst_search_type) data = sobject.get_data() for name, value in data.items(): if name in ['id','pipeline_code']: continue if name not in columns: continue if not value: continue if name == "code": value = Common.get_next_sobject_code(sobject, 'code') if not value: continue new_sobject.set_value(name, value) if SearchType.column_exists(dst_search_type, "project_code"): project_code = Project.get_project_code() new_sobject.set_value("project_code", project_code) new_sobject.commit() # get all of the current snapshots and file paths associated if not context: snapshots = Snapshot.get_all_current_by_sobject(sobject) else: snapshots = [Snapshot.get_current_by_sobject(sobject, context)] if not snapshots: return msgs = [] for snapshot in snapshots: #file_paths = snapshot.get_all_lib_paths() file_paths_dict = snapshot.get_all_paths_dict() file_types = file_paths_dict.keys() if not file_types: continue # make sure the paths match the file_types file_paths = [file_paths_dict.get(x)[0] for x in file_types] mode = checkin_mode # checkin the files (inplace) try: context = snapshot.get_value('context') checkin = FileCheckin(new_sobject, context=context, file_paths=file_paths, file_types=file_types, mode=mode) checkin.execute() #print "done: ", context, new_sobject.get_related_sobjects("sthpw/snapshot") except CheckinException, e: msgs.append('Post-process Check-in Error for %s: %s ' %(context, e.__str__()))
def execute(my): view = my.kwargs.get("view") content = my.kwargs.get("content") search = Search("config/widget_config") search.add_filter("category", "HelpWdg") search.add_filter("view", view) config = search.get_sobject() if not config: config = SearchType.create("config/widget_config") config.set_value("category", "HelpWdg") config.set_value("view", view) config_xml = ''' <config> <%s> <html><div><![CDATA[ %s ]]></div></html> </%s> </config> ''' % (view, content, view) config.set_value("config", config_xml) config.commit()
def get_display(my): top = DivWdg() my.set_as_panel(top) sobject = SearchType.create("sthpw/virtual") sobject.set_value("mon", "3") sobject.set_value("tue", "2") sobject.set_value("wed", "5") config = ''' <week> <element name="week"/> <element name="parent"/> <element name="category"/> <element name="description"/> <element name="mon"/> <element name="tue"/> <element name="wed"/> <element name="thu"/> <element name="fri"/> <element name="total"/> </week> ''' table = TableLayoutWdg(search_type='sthpw/virtual', view='week') top.add(table) return top
def get_display(self): top = self.top top.add_class("spt_ingestion_top") self.set_as_panel(top) inner = DivWdg() top.add(inner) inner.add_color("background", "background") inner.add_border() inner.add_style("padding: 10px") self.session_code = self.get_value("session_code") if self.session_code: self.session = Search.get_by_code("config/ingest_session", self.session_code) else: self.session_code = "session101" self.session = SearchType.create("config/ingest_session") self.session.set_value("code", self.session_code) base_dir = self.get_value("base_dir") if base_dir: self.session.set_value("base_dir", base_dir) location = self.get_value("location") if location: self.session.set_value("location", location) else: self.session.set_value("location", "local") self.session.commit() self.paths = self.get_value("paths") nav_div = DivWdg() inner.add(nav_div) nav_div.add(self.get_nav_wdg()) inner.add("<hr/>") table = ResizableTableWdg() inner.add(table) table.add_color("color", "color") left = table.add_cell() left_div = DivWdg() left.add(left_div) left_div.add(self.get_session_wdg()) left_div.add_style("padding: 10px") left_div.add_style("height: 100%") left_div.add_style("min-height: 500px") left_div.add_border() left_div.set_round_corners(corners=["TL", "TR"]) left_div.add_class("SPT_RESIZABLE") right = table.add_cell() right.add(self.get_content_wdg()) return top
def import_default_side_bar(self): code = Search.eval( "@GET(config/widget_config['code','WIDGET_CONFIG000000'].code)", single=True) if code: print "Default side bar already exists!" return project_code = self.kwargs.get('project_code') # It looks like project=XXX on SearchType.create does not work Project.set_project(project_code) config = SearchType.create("config/widget_config?project=%s" % project_code) config.set_value("code", "WIDGET_CONFIG000000") config.set_value("category", "SideBarWdg") config.set_value("search_type", "SideBarWdg") config.set_value("view", "project_view") xml = '''<?xml version='1.0' encoding='UTF-8'?> <config> <project_view> <element name='_home' title='Examples'/> </project_view> </config> ''' config.set_value("config", xml) config.commit()
def test_time(): from pyasm.search import SearchType sobj = SearchType.create('sthpw/note') sobj.set_value('process', 'TEST') sobj.set_value('note', '123') sobj.commit() sobj.set_value('note', 'new note') sobj.commit() # check change_timestamp change_t = Search.eval( "@SOBJECT(sthpw/change_timestamp['search_type','sthpw/note']['search_code','%s'])" % sobj.get_code(), single=True) if change_t: change_t_timestamp = change_t.get('timestamp') change_t_timestamp = parser.parse(change_t_timestamp) from pyasm.common import SPTDate now = SPTDate.now() diff = now - change_t_timestamp # should be roughly the same minute, not hours apart print "Change timestamp diff is ", diff.seconds
def _test_task(self): project = Project.get() # create a new task task = SearchType.create("sthpw/task") task.set_parent(project) task.set_value("code", "XXX001") task.set_value("process", "unittest") task.set_value("description", "unittest") # set a time with no timezone. A timestamp with no timezone should # assume GMT. test_time = '2011-11-11 00:00:00' task.set_value("timestamp", test_time) # asset that the time has not changed timestamp = task.get_value("timestamp") self.assertEquals(timestamp, test_time) task.commit() # get the task back from the databse search = Search("sthpw/task") search.add_filter("code", "XXX001") task = search.get_sobject() # make sure the time has not changed. This value should is assumed # to be in GMT timestamp = task.get_value("timestamp") self.assertEquals(timestamp, test_time) task.delete()
def add(command, kwargs, queue_type, priority, description, message_code=None): queue = SearchType.create("sthpw/queue") queue.set_value("project_code", Project.get_project_code()) #queue.set_sobject_value(sobject) queue.set_value("queue", queue_type) queue.set_value("state", "pending") queue.set_value("login", Environment.get_user_name()) queue.set_value("command", command) data = jsondumps(kwargs) queue.set_value("data", data) if message_code: queue.set_value("message_code", message_code) queue.set_value("priority", priority) queue.set_value("description", description) queue.set_user() queue.commit() return queue
def _test_task(my): project = Project.get() # create a new task task = SearchType.create("sthpw/task") task.set_parent(project) task.set_value("code", "XXX001") task.set_value("process", "unittest") task.set_value("description", "unittest") # set a time with no timezone. A timestamp with no timezone should # assume GMT. test_time = '2011-11-11 00:00:00' task.set_value("timestamp", test_time) # asset that the time has not changed timestamp = task.get_value("timestamp") my.assertEquals(timestamp, test_time) task.commit() # get the task back from the databse search = Search("sthpw/task") search.add_filter("code", "XXX001") task = search.get_sobject() # make sure the time has not changed. This value should is assumed # to be in GMT timestamp = task.get_value("timestamp") my.assertEquals(timestamp, test_time) task.delete()
def postprocess(self): web = WebContainer.get_web() values = web.get_form_values("select_key") if not values or values == ['']: return dst_sobject = self.sobject project_code = Project.get_project_code() for value in values: src_sobject = Search.get_by_search_key(value) if not src_sobject: continue connection = SearchType.create("sthpw/connection") connection.set_value("src_search_type", src_sobject.get_search_type()) connection.set_value("dst_search_type", dst_sobject.get_search_type()) connection.set_value("src_search_id", src_sobject.get_id()) connection.set_value("dst_search_id", dst_sobject.get_id()) connection.set_value("context", "reference") connection.set_value("project_code", project_code) connection.commit()
def execute(self): # disabling for now print "caching user ..." web = WebContainer.get_web() login = self.kwargs.get("login") login_sobj = Search.eval("@SOBJECT(sthpw/login['login','%s'])" % login, show_retired=True) if login_sobj: print "login %s already exists" % login return # cache the user try: from ad_authenticate import ADAuthenticate authenticate = ADAuthenticate() login_sobj = SearchType.create("sthpw/login") login_sobj.set_value("login", login) authenticate.add_user_info(login_sobj, password=None) login_sobj.commit() except Exception, e: print "Error: ", str(e)
def setUp(my): # start batch environment Batch() from pyasm.web.web_init import WebInit WebInit().execute() my.sample3d_env = Sample3dEnvironment(project_code='sample3d') my.sample3d_env.create() my.test_env = UnittestEnvironment() my.test_env.create() # set up the proper project_type, with the use the ProdDirNaming and ProdFileNaming search = Search('sthpw/project') search.add_filter('code', 'unittest') my.sobj = search.get_sobject() # store the original setting in your database my.original_proj_type_dict = {'dir_naming_cls': my.sobj.get_value('dir_naming_cls'), 'file_naming_cls': my.sobj.get_value('file_naming_cls') } #my.transaction = Transaction.get(create=True) if my.sobj: my.sobj.set_value('dir_naming_cls', 'pyasm.prod.biz.ProdDirNaming') my.sobj.set_value('file_naming_cls', 'pyasm.prod.biz.ProdFileNaming') my.sobj.commit() else: my.sobj = SearchType.create('sthpw/project_type') my.sobj.set_value('dir_naming_cls', 'pyasm.prod.biz.ProdDirNaming') my.sobj.set_value('file_naming_cls', 'pyasm.prod.biz.ProdFileNaming') my.sobj.set_value('code', 'unittest') my.sobj.commit()
def postprocess(self): search_type = self.get_option("search_type") column = self.get_option("column") search_type = "construction/login_in_trade" column = "trade_code" value = self.get_value(self.name) sobject = self.sobject search = Search(search_type) search.add_relationship_filter(sobject) related = search.get_sobject() if not related: related = SearchType.create(search_type) related.set_parent(sobject) if not value: related.delete() else: related.set_value(column, value) related.commit()
def on_insertX(self): # Copy this to the config/pipeline table. Currently this table # is not being used, however, pipelines really should be defined # there. It is an unfortunate historical wart that pipelines # are stored in the sthpw database. In some future release of # TACTIC, the pipeline table in the sthpw database will be deprecated # This copy will ensure that over time, the impact of this move over # will be minimized if self.SEARCH_TYPE == "config/pipeline": return search = Search("config/pipeline") search.add_filter("code", self.get_code()) pipeline = search.get_sobject() if not pipeline: pipeline = SearchType.create("config/pipeline") for name, value in self.get_data().items(): if name.startswith("__"): continue if name in ["id", "project_code"]: continue if not value: continue pipeline.set_value(name, value) pipeline.commit(triggers="none")
def import_default_side_bar(self): code = Search.eval("@GET(config/widget_config['code','WIDGET_CONFIG000000'].code)", single=True) if code: print "Default side bar already exists!" return project_code = self.kwargs.get('project_code') # It looks like project=XXX on SearchType.create does not work Project.set_project(project_code) config = SearchType.create("config/widget_config?project=%s" % project_code) config.set_value("code", "WIDGET_CONFIG000000") config.set_value("category", "SideBarWdg") config.set_value("search_type", "SideBarWdg") config.set_value("view", "project_view") xml = '''<?xml version='1.0' encoding='UTF-8'?> <config> <project_view> <element name='_home' title='Examples'/> </project_view> </config> ''' config.set_value("config", xml) config.commit()
def add(command, kwargs, queue_type, priority, description, message_code=None): queue = SearchType.create("sthpw/queue") queue.set_value("project_code", Project.get_project_code()) #queue.set_sobject_value(sobject) if not queue_type: queue_type = "default" queue.set_value("queue", queue_type) queue.set_value("state", "pending") queue.set_value("login", Environment.get_user_name()) queue.set_value("command", command) data = jsondumps(kwargs) queue.set_value("data", data) if message_code: queue.set_value("message_code", message_code) if not priority: priority = 9999 queue.set_value("priority", priority) if description: queue.set_value("description", description) queue.set_user() queue.commit() return queue
def register(my, name, db_resource, tables=None): db_name = db_resource.get_database() project_code = "db_resource/%s" % db_name # create a virtual project project = SearchType.create("sthpw/project") # FIXME: what is this conflicts with an existing project project.set_value("code", project_code) project.set_value("db_resource", db_resource) # put this sobject into the cache from pyasm.biz import Project key = "sthpw/project|%s" % project_code Project.cache_sobject(key, project) if tables == None: # introspect and resister all of the tables sql = DbContainer.get(db_resource) table_info = sql.get_table_info() if table_info.get("spt_search_type"): print "has search_type" tables = table_info.keys() for table in tables: # FIXME: how is this created search_type = "table/%s?project=%s" % (table, db_name) #search_type_obj = SearchType.create("config/search_type") search_type_obj = SearchType.create("sthpw/search_type") search_type_obj.set_value("search_type", "table/%s" % table) search_type_obj.set_value("title", Common.get_display_title(table) ) search_type_obj.set_value("table_name", table) search_type_obj.set_value("database", db_name) SearchType.set_virtual(search_type, search_type_obj) my.db_resources[name] = db_resource
def _test_multi_input(my): # Disabled for now return # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) #search = Search("sthpw/message") #sobjects = search.get_sobjects() #for sobject in sobjects: # sobject.delete() # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b1"/> <process type="action" name="b2"/> <process type="action" name="b3"/> <process type="action" name="b4"/> <process type="action" name="c"/> <process type="action" name="d"/> <connect from="a" to="b1"/> <connect from="a" to="b2"/> <connect from="a" to="b3"/> <connect from="a" to="b4"/> <connect from="b1" to="c"/> <connect from="b2" to="c"/> <connect from="b3" to="c"/> <connect from="b4" to="c"/> <connect from="c" to="d"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("c") process.set_json_value("workflow", { 'on_action': ''' print "c: running action" ''' } ) process.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def add_static_triggers(cls): # event sthpw/trigger from pyasm.command import Trigger event = "change|sthpw/task" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) #trigger.set_value("mode", "same process,same transaction") trigger.set_value("class_name", "tactic.command.RelatedTaskUpdateTrigger") Trigger.append_static_trigger(trigger) event = "change|sthpw/task|status" trigger = SearchType.create("sthpw/trigger") trigger.set_value("event", event) #trigger.set_value("mode", "same process,same transaction") trigger.set_value("class_name", "tactic.command.TaskCompleteTrigger") Trigger.append_static_trigger(trigger)
def execute(my): my.search_type = my.kwargs.get("search_type") my.element_name = my.kwargs.get("element_name") assert my.search_type assert my.element_name interval = my.kwargs.get('interval') if not interval: interval = 120 data_type = my.kwargs.get('data_type') if not data_type: data_type = 'float' class_name = 'tactic.ui.app.aggregate_wdg.AggregateCmd' priority = None user = Environment.get_user_name() # these interval jobs need to have a specific code code = "aggregate|%s|%s" % (my.search_type, my.element_name) # check to see if the job exists job = Search.get_by_code("sthpw/queue", code) if not job: job = SearchType.create("sthpw/queue") job.set_value("code", code) job.set_value("project_code", Project.get_project_code() ) job.set_value("class_name", class_name) job.set_value("command", class_name) job.set_value("login", user) job.set_value("queue", 'interval') # this is meaningless job.set_value("priority", 9999) # not sure what to do here if it already exists job.set_value("state", 'pending') # add a column to the table from pyasm.command import ColumnAddCmd from pyasm.search import AlterTable column_name = my.element_name cmd = ColumnAddCmd(my.search_type, column_name, data_type) cmd.execute() # modify the table #alter = AlterTable(my.search_type) #alter.modify(my.search_type, data_type) #print alter.get_statements() job.set_value("serialized", str(my.kwargs) ) job.set_value("interval", interval) job.commit()
def execute(my): login = Environment.get_user_name() users = my.kwargs.get("users") everyone = [login] everyone.extend(users) # find out if there already is a subscription between this user # and others search = Search("sthpw/subscription") search.add_filter("login", login) search.add_filter("category", "chat") login_subscriptions = search.get_sobjects() keys = [x.get_value("message_code") for x in login_subscriptions] create = True # find the subscriptions for each user with the same keys for user in users: search = Search("sthpw/subscription") search.add_filters("message_code", keys) search.add_filter("login", user) user_subscriptions = search.get_sobjects() if user_subscriptions: create = False # create a new subscription if create: key = Common.generate_random_key() message = SearchType.create("sthpw/message") message.set_value("code", key) message.set_value("login", login) message.set_value("category", "chat") message.set_value("message", "Welcome!!!") message.commit() # create a subscription for each person for person in everyone: subscription = SearchType.create("sthpw/subscription") subscription.set_value("message_code", key) subscription.set_value("login", person) subscription.set_value("category", "chat") subscription.commit()