def create(self): project = Project.get_by_code(self.project_code) if project: self.delete() print "Setting up a basic Sample3d project" # create the project create_cmd = CreateProjectCmd(project_code=self.project_code, project_title="Sample 3D") #, project_type="unittest") create_cmd.execute() # install the unittest plugin installer = PluginInstaller(relative_dir="TACTIC/internal/sample3d", verbose=False) installer.execute() # add 30 shots for x in xrange(30): shot = SearchType.create("prod/shot") shot.set_value('name','shot%s'%x) shot.set_value('sequence_code','SEQ_01') shot.commit(triggers=False) if not Search.eval("@SOBJECT(prod/sequence['code','SEQ_01'])"): seq = SearchType.create("prod/sequence") seq.set_value('code','SEQ_01') seq.commit(triggers=False)
def copy_sobject(my, sobject, dst_search_type, context=None, checkin_mode='inplace'): new_sobject = SearchType.create(dst_search_type) search_type = SearchType.get(dst_search_type) columns = SearchType.get_columns(dst_search_type) data = sobject.get_data() for name, value in data.items(): if name in ['id','pipeline_code']: continue if name not in columns: continue if not value: continue if name == "code": value = Common.get_next_sobject_code(sobject, 'code') if not value: continue new_sobject.set_value(name, value) if SearchType.column_exists(dst_search_type, "project_code"): project_code = Project.get_project_code() new_sobject.set_value("project_code", project_code) new_sobject.commit() # get all of the current snapshots and file paths associated if not context: snapshots = Snapshot.get_all_current_by_sobject(sobject) else: snapshots = [Snapshot.get_current_by_sobject(sobject, context)] if not snapshots: return msgs = [] for snapshot in snapshots: #file_paths = snapshot.get_all_lib_paths() file_paths_dict = snapshot.get_all_paths_dict() file_types = file_paths_dict.keys() if not file_types: continue # make sure the paths match the file_types file_paths = [file_paths_dict.get(x)[0] for x in file_types] mode = checkin_mode # checkin the files (inplace) try: context = snapshot.get_value('context') checkin = FileCheckin(new_sobject, context=context, file_paths=file_paths, file_types=file_types, mode=mode) checkin.execute() #print "done: ", context, new_sobject.get_related_sobjects("sthpw/snapshot") except CheckinException, e: msgs.append('Post-process Check-in Error for %s: %s ' %(context, e.__str__()))
def __init__(my, search_type, config_base, input_prefix='', config=None): if type(search_type) in types.StringTypes: my.search_type_obj = SearchType.get(search_type) my.search_type = search_type elif isinstance(search_type, SearchType): my.search_type_obj = search_type my.search_type = my.search_type_obj.get_base_key() elif inspect.isclass(search_type) and issubclass(search_type, SObject): my.search_type_obj = SearchType.get(search_type.SEARCH_TYPE) my.search_type = my.search_type_obj.get_base_key() else: raise LayoutException('search_type must be a string or an sobject') my.config = config my.config_base = config_base my.input_prefix = input_prefix my.element_names = [] my.element_titles = [] from pyasm.web import DivWdg my.top = DivWdg() # Layout widgets compartmentalize their widgets in sections for drawing my.sections = {} super(BaseConfigWdg,my).__init__()
def _test_base_dir_alias(my): Config.set_value("checkin", "asset_base_dir", { 'default': '/tmp/tactic/default', 'alias': '/tmp/tactic/alias', 'alias2': '/tmp/tactic/alias2', }); asset_dict = Environment.get_asset_dirs() default_dir = asset_dict.get("default") my.assertEquals( "/tmp/tactic/default", default_dir) aliases = asset_dict.keys() # "plugins" is assumed in some branch if 'plugins' in aliases: my.assertEquals( 4, len(aliases)) else: my.assertEquals( 3, len(aliases)) my.assertNotEquals( None, "alias" in aliases ) # create a naming naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias") naming.set_value("dir_naming", "alias") naming.set_value("file_naming", "text.txt") naming.set_value("base_dir_alias", "alias") naming.commit() # create 2nd naming where naming = SearchType.create("config/naming") naming.set_value("search_type", "unittest/person") naming.set_value("context", "alias2") naming.set_value("dir_naming", "alias2") naming.set_value("base_dir_alias", "alias2") naming.set_value("file_naming", "text.txt") naming.set_value("checkin_type", "auto") naming.commit() my.clear_naming() # create a new test.txt file for context in ['alias', 'alias2']: file_path = "./test.txt" file = open(file_path, 'w') file.write("whatever") file.close() checkin = FileCheckin(my.person, file_path, context=context) checkin.execute() snapshot = checkin.get_snapshot() lib_dir = snapshot.get_lib_dir() expected = "/tmp/tactic/%s/%s" % (context, context) my.assertEquals(expected, lib_dir) path = "%s/text.txt" % (lib_dir) exists = os.path.exists(path) my.assertEquals(True, exists)
def get_message(my): search_type_obj = my.sobject.get_search_type_obj() title = search_type_obj.get_title() subject = my.get_subject() notification_message = my.notification.get_value("message") if notification_message: # parse it through the expression sudo = Sudo() parser = ExpressionParser() snapshot = my.input.get('snapshot') env_sobjects = {} # turn prev_data and update_data from input into sobjects prev_data = SearchType.create("sthpw/virtual") id_col = prev_data.get_id_col() if id_col: del prev_data.data[id_col] prev_dict = my.input.get("prev_data") if prev_dict: for name, value in prev_dict.items(): if value != None: prev_data.set_value(name, value) update_data = SearchType.create("sthpw/virtual") id_col = update_data.get_id_col() if id_col: del update_data.data[id_col] update_dict = my.input.get("update_data") if update_dict: for name, value in update_dict.items(): if value != None: update_data.set_value(name, value) if snapshot: env_sobjects = { 'snapshot': snapshot } env_sobjects['prev_data'] = prev_data env_sobjects['update_data'] = update_data notification_message = parser.eval(notification_message, my.sobject, env_sobjects=env_sobjects, mode='string') del sudo return notification_message message = "%s %s" % (title, my.sobject.get_name()) message = '%s\n\nReport from transaction:\n%s\n' % (message, subject) return message
def execute(my): collection_key = my.kwargs.get("collection_key") search_keys = my.kwargs.get("search_keys") collection = Search.get_by_search_key(collection_key) if not collection: raise Exception("Collection does not exist") search_type = collection.get_base_search_type() parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) search = Search(collection_type) search.add_filter("parent_code", collection.get_code()) items = search.get_sobjects() search_codes = [x.get_value("search_code") for x in items] search_codes = set(search_codes) has_keywords = SearchType.column_exists(search_type, "keywords") if has_keywords: collection_keywords = collection.get_value("keywords", no_exception=True) collection_keywords = collection_keywords.split(" ") collection_keywords = set(collection_keywords) # create new items sobjects = Search.get_by_search_keys(search_keys) for sobject in sobjects: if sobject.get_code() in search_codes: continue new_item = SearchType.create(collection_type) new_item.set_value("parent_code", collection.get_code()) new_item.set_value("search_code", sobject.get_code()) new_item.commit() # copy the metadata of the collection if has_keywords: keywords = sobject.get_value("keywords") keywords = keywords.split(" ") keywords = set(keywords) keywords = keywords.union(collection_keywords) keywords = " ".join(keywords) sobject.set_value("keywords", keywords) sobject.commit()
def execute(my): search_type = my.kwargs.get("search_type") column_info = SearchType.get_column_info(search_type) values = my.kwargs.get("values") # get the definition config for this search_type from pyasm.search import WidgetDbConfig config = WidgetDbConfig.get_by_search_type(search_type, "definition") if not config: config = SearchType.create("config/widget_config") config.set_value("search_type", search_type) config.set_value("view", "definition") config.commit() config._init() for data in values: name = data.get("name") name = name.strip() if name == '': continue try: name.encode('ascii') except UnicodeEncodeError: raise TacticException('Column name needs to be in English. Non-English characters can be used in Title when performing [Edit Column Definition] afterwards.') if column_info.get(name): raise CommandException("Column [%s] is already defined" % name) format = data.get("format") fps = data.get("fps") data_type = data.get("data_type") from pyasm.command import ColumnAddCmd cmd = ColumnAddCmd(search_type, name, data_type) cmd.execute() #(my, search_type, attr_name, attr_type, nullable=True): class_name = 'tactic.ui.table.FormatElementWdg' options = { 'format': format, 'type': data_type, 'fps': fps } # add a new widget to the definition config.append_display_element(name, class_name, options=options) config.commit_config()
def set_templates(self): project_code = WebContainer.get_web().get_full_context_name() if project_code == "default": project_code = Project.get_default_project() try: SearchType.set_global_template("project", project_code) except SecurityException as e: print("WARNING: ", e)
def execute(my): import types transaction_xml = my.kwargs.get("transaction_xml") file_mode = my.kwargs.get("file_mode") if not file_mode: file_mode = 'delayed' # if the first argument is a dictionary, then the whole # transaction sobject was passed through # NOTE: this is now the default if type(transaction_xml) == types.DictType: transaction_dict = transaction_xml transaction_xml = transaction_dict.get("transaction") timestamp = transaction_dict.get("timestamp") login = transaction_dict.get("login") # recreate the transaction transaction = SearchType.create("sthpw/transaction_log") for name, value in transaction_dict.items(): if name.startswith("__"): continue if name == 'id': continue if value == None: continue transaction.set_value(name, value) elif isinstance(transaction_xml, SObject): transaction = transaction_xml else: # Create a fake transaction. # This is only used for test purposes transaction = SearchType.create("sthpw/transaction_log") if transaction_xml: transaction.set_value("transaction", transaction_xml) else: print "WARNING: transaction xml is empty" transaction.set_value("login", "admin") # commit the new transaction. This is the only case where # a transaction will not have a code, so it has to be committed. # The other case do not need to be committed because they will # already have codes and the transaction is committed in # RedoCmd # try: transaction.commit() except Exception, e: print "Failed to commit transaction [%s]: It may already exist. Skipping." % transaction.get_code() print str(e) return
def set_templates(my): if my.context: context = my.context else: context = WebContainer.get_web().get_full_context_name() try: SearchType.set_global_template("project", context) except SecurityException, e: print "WARNING: ", e
def check(my): my.search_type = my.kwargs.get("search_type") my.values = my.kwargs.get("values") my.db_resource = SearchType.get_db_resource_by_search_type(my.search_type) my.database = my.db_resource.get_database() my.search_type_obj = SearchType.get(my.search_type) if my.database != Project.get_project_code() and my.database !='sthpw': raise TacticException('You are not allowed to delete the sType [%s] from another project [%s].' %(my.search_type, my.database)) return False return True
def check(self): self.search_type = self.kwargs.get("search_type") self.values = self.kwargs.get("values") self.db_resource = SearchType.get_db_resource_by_search_type(self.search_type) self.database = self.db_resource.get_database() self.search_type_obj = SearchType.get(self.search_type) if self.database != Project.get_project_code() and self.database !='sthpw': raise TacticException('You are not allowed to delete the sType [%s] from another project [%s].' %(self.search_type, self.database)) return False return True
def do_search(my): '''this widget has its own search mechanism''' web = WebContainer.get_web() # get the sobject that is to be edited id = my.search_id # if no id is given, then create a new one for insert search = None sobject = None search_type_base = SearchType.get(my.search_type).get_base_key() if my.mode == "insert": sobject = SearchType.create(my.search_type) my.current_id = -1 # prefilling default values if available value_keys = web.get_form_keys() if value_keys: for key in value_keys: value = web.get_form_value(key) sobject.set_value(key, value) else: search = Search(my.search_type) # figure out which id to search for if web.get_form_value("do_edit") == "Edit/Next": search_ids = web.get_form_value("%s_search_ids" %search_type_base) if search_ids == "": my.current_id = id else: search_ids = search_ids.split("|") next = search_ids.index(str(id)) + 1 if next == len(search_ids): next = 0 my.current_id = search_ids[next] last_search = Search(my.search_type) last_search.add_id_filter( id ) my.last_sobject = last_search.get_sobject() else: my.current_id = id search.add_id_filter( my.current_id ) sobject = search.get_sobject() if not sobject and my.current_id != -1: raise EditException("No SObject found") # set all of the widgets to contain this sobject my.set_sobjects( [sobject], search )
def get_pipeline(my, pipeline_xml, add_tasks=False): pipeline = SearchType.create("sthpw/pipeline") pipeline.set_pipeline(pipeline_xml) pipeline_id = random.randint(0, 10000000) #pipeline.set_value("code", "test%s" % pipeline_id) #pipeline.set_id(pipeline_id) #pipeline.set_value("id", pipeline_id) pipeline.set_value("pipeline", pipeline_xml) pipeline.commit() process_names = pipeline.get_process_names() # delete the processes search = Search("config/process") search.add_filters("process", process_names) processes = search.get_sobjects() for process in processes: process.delete() # create new processes processes_dict = {} for process_name in process_names: # define the process nodes process = SearchType.create("config/process") process.set_value("process", process_name) process.set_value("pipeline_code", pipeline.get_code()) process.set_json_value("workflow", { 'on_complete': ''' sobject.set_value('%s', "complete") ''' % process_name, 'on_approve': ''' sobject.set_value('%s', "approve") ''' % process_name, } ) process.commit() processes_dict[process_name] = process # Note: we don't have an sobject yet if add_tasks: task = SaerchType.create("sthpw/task") task.set_parent(sobject) task.set_value("process", process_name) task.commit() return pipeline, processes_dict
def get_logins_by_id(note_id): login_in_group = SearchType.get(LoginInGroup.SEARCH_TYPE) group_note = SearchType.get(GroupNotification.SEARCH_TYPE) search = Search(Login.SEARCH_TYPE) query_str = '' if isinstance(note_id, list): query_str = "in (%s)" %",".join([str(id) for id in note_id]) else: query_str = "= %d" %note_id search.add_where('''"login" in (select "login" from "%s" where "login_group" in (select "login_group" from "%s" where "notification_id" %s)) ''' % (login_in_group.get_table(), group_note.get_table(), query_str)) return search.get_sobjects()
def _test_sobject_hierarchy(my): # FIXME: this functionality has been disabled until further notice return snapshot_type = SearchType.create("sthpw/snapshot_type") snapshot_type.set_value("code", "maya_model") snapshot_type.commit() snapshot_type = SearchType.create("prod/snapshot_type") snapshot_type.set_value("code", "maya_model") snapshot_type.commit() snapshot_type = SnapshotType.get_by_code("maya_model")
def get_display(my): widget = Widget() div = DivWdg(css="filter_box") show_span = SpanWdg(css="med") show_span.add("Show All Types: ") checkbox = FilterCheckboxWdg("show_all_types") checkbox.set_persistence() show_span.add(checkbox) show_all_types = checkbox.get_value() div.add(show_span) span = SpanWdg(css="med") span.add("Search Type: ") select = SelectWdg("filter|search_type") select.add_empty_option("-- Select --") project = Project.get() project_type = project.get_base_type() search = Search("sthpw/search_object") if show_all_types: search.add_where( """ namespace = '%s' or namespace = '%s' or search_type in ('sthpw/task') """ % (project_type, project.get_code()) ) else: # show only the custom ones search.add_filter("namespace", project.get_code()) search.add_order_by("title") sobjects = search.get_sobjects() select.set_sobjects_for_options(sobjects, "search_type", "title") # select.set_option("query", "sthpw/search_object|search_type|title") select.set_persistence() select.add_event("onchange", "document.form.submit()") search_type = select.get_value() span.add(select) div.add(span) # make sure the current selection exists try: SearchType.get(search_type) except SearchException, e: return div
def _test_time(my): ''' test timezone related behavior''' sobject = SearchType.create('sthpw/task') sobject.set_value('project_code','unittest') sobject.set_value('bid_start_date', '2014-11-11 05:00:00') time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') sobject.commit() time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') from pyasm.search import DbContainer sql = DbContainer.get('sthpw') db_value = sql.do_query('SELECT bid_start_date from task where id = %s'%sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database my.assertEquals(db_value[0][0].strftime('%Y-%m-%d %H:%M:%S %Z'), '2014-11-11 00:00:00 ') # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval('@SOBJECT(unittest/person?project=unittest["id", "%s"])'%sobject.get_id(), single=True) obj2= Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])'%sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])'%sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])'%sobject.get_id(), single=True) # EST and GMT diff is 5 hours my.assertEquals(task.get_value('bid_start_date'), '2014-11-11 05:00:00') # test NOW() auto conversion sobj = SearchType.create('sthpw/note') sobj.set_value('process','TEST') sobj.set_value('note','123') my.assertEquals(sobj.get_value('timestamp'), "") sobj.commit() # this is local commited time converted back to GMT committed_time = sobj.get_value('timestamp') from dateutil import parser committed_time = parser.parse(committed_time) from pyasm.common import SPTDate now = SPTDate.now() diff = now - committed_time # should be roughly the same minute, not hours apart my.assertEquals(diff.seconds < 60, True)
def _test_trigger(my): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = my.get_pipeline(pipeline_xml) process = processes.get("a") process.set_value("workflow", "") process.commit() folder = Common.generate_alphanum_key() Trigger.clear_db_cache() event = "process|action" trigger = SearchType.create("config/trigger") trigger.set_value("event", event) trigger.set_value("process", process.get_code()) trigger.set_value("mode", "same process,same transaction") trigger.set_value("script_path", "%s/process_trigger" % folder) trigger.commit() script = SearchType.create("config/custom_script") script.set_value("folder", folder) script.set_value("title", "process_trigger") script.set_value("script", ''' print "---" for key, value in input.items(): print key, value print "---" print "process: ", input.get("process") ''') script.commit() # Run the pipeline process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process } Trigger.call(my, "process|pending", output)
def execute(my): assert my.db_resource assert my.table database = my.db_resource.get_database() from pyasm.search import Insert, Select, DbContainer, Search, Sql # get the data if not my.sobjects: search = Search("sthpw/search_object") # BAD assumption #search.add_filter("table", my.table) # create a search_type. This is bad assumption cuz it assumes project-specific search_type # should call set_search_type() if not my.search_type: my.search_type = "%s/%s" % (my.database, my.table) search.add_filter("search_type", my.search_type) my.search_type_obj = search.get_sobject() if not my.search_type_obj: if my.no_exception == False: raise SqlException("Table [%s] does not have a corresponding search_type" % my.table) else: return search_type = my.search_type_obj.get_base_key() search = Search(my.search_type) search.set_show_retired(True) my.sobjects = search.get_sobjects() # get the info for the table column_info = SearchType.get_column_info(my.search_type) for sobject in my.sobjects: print my.delimiter insert = Insert() insert.set_database(my.database) insert.set_table(my.table) data = sobject.data for name, value in data.items(): if name in my.ignore_columns: continue if not my.include_id and name == "id": insert.set_value("id", '"%s_id_seq".nextval' % table, quoted=False) #insert.set_value(name, value, quoted=False) elif value == None: continue else: # replace all of the \ with double \\ insert.set_value(name, value) print "%s" % insert.get_statement() print my.end_delimiter print
def get_display(self): project = SearchType.get_project() self.set_option("labels", "%s|general" % (project)) self.set_option("values", "%s|gen" % (project)) return super(AssetLibraryTypeWdg,self).get_display()
def get_by_search_type(cls, search_type, project_code=''): # make sure this is a be search type assert search_type search_type_obj = SearchType.get(search_type) if not search_type_obj: return [] search_type = search_type_obj.get_base_key() cache_key = "%s|%s" % (search_type, project_code) # commenting out until we have a full implementation of # project pipelines """ search = Search("config/pipeline") if search_type: search.add_filter("search_type", search_type) search.add_project_filter(project_code) pipelines = cls.get_by_search(search, cache_key, is_multi=True) """ search = Search("sthpw/pipeline") if search_type: search.add_filter("search_type", search_type) search.add_project_filter(project_code) pipelines = cls.get_by_search(search, cache_key, is_multi=True) if not pipelines: return [] for pipe in pipelines: code = pipe.get_code() cls.cache_sobject('sthpw/pipeline|%s' %code, pipe) return pipelines
def has_table(my, search_type): if isinstance(search_type, basestring): search_type = SearchType.get(search_type) # in search type database == project project_code = search_type.get_project_code() # get the db_resource for this project db_resource = my.get_project_db_resource() # get the table table = search_type.get_table() if not table: return False try: # looking up a database's tables other than the current one sql = DbContainer.get(db_resource) tables = sql.get_tables() has_table = table in tables except Exception, e: print "WARNING: in Project.has_table(): table [%s] not found" % table print "Message: ", e has_table = False
def get_child_codes(self, parent_collection_code, search_type): ''' All of the children's codes down the relationship tree of the collection will be returned. ''' from pyasm.biz import Project project = Project.get() sql = project.get_sql() impl = project.get_database_impl() search_codes = [] parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) # Check if connection between asset and asset_in_asset is in place if collection_type not in SearchType.get_related_types(search_type): return search_codes stmt = impl.get_child_codes_cte(collection_type, search_type, parent_collection_code) results = sql.do_query(stmt) for result in results: result = "".join(result) search_codes.append(result) return search_codes
def add(command, kwargs, queue_type, priority, description, message_code=None): queue = SearchType.create("sthpw/queue") queue.set_value("project_code", Project.get_project_code()) #queue.set_sobject_value(sobject) if not queue_type: queue_type = "default" queue.set_value("queue", queue_type) queue.set_value("state", "pending") queue.set_value("login", Environment.get_user_name()) queue.set_value("command", command) data = jsondumps(kwargs) queue.set_value("data", data) if message_code: queue.set_value("message_code", message_code) if not priority: priority = 9999 queue.set_value("priority", priority) if description: queue.set_value("description", description) queue.set_user() queue.commit() return queue
def get_columns(my, required_only=False): if my.search_type == 'sthpw/virtual': return [] search_type_obj = SearchType.get(my.search_type) table = search_type_obj.get_table() from pyasm.biz import Project db_resource = Project.get_db_resource_by_search_type(my.search_type) database_name = db_resource.get_database() db = DbContainer.get(db_resource) # table may not exist try: all_columns = db.get_columns(table) columns = [] if required_only: nullables = db.get_column_nullables(table) for column in all_columns: null_ok = nullables.get(column) if not null_ok: columns.append(column) # if there are no required columns if not columns: columns = all_columns else: columns = all_columns except SqlException: Environment.add_warning('missing table', 'Table [%s] does not exist in database [%s]' %(table, database_name)) return [] return columns
def execute(my): view = my.kwargs.get("view") content = my.kwargs.get("content") search = Search("config/widget_config") search.add_filter("category", "HelpWdg") search.add_filter("view", view) config = search.get_sobject() if not config: config = SearchType.create("config/widget_config") config.set_value("category", "HelpWdg") config.set_value("view", view) config_xml = ''' <config> <%s> <html><div><![CDATA[ %s ]]></div></html> </%s> </config> ''' % (view, content, view) config.set_value("config", config_xml) config.commit()
def setUp(my): # start batch environment Batch() from pyasm.web.web_init import WebInit WebInit().execute() my.sample3d_env = Sample3dEnvironment(project_code='sample3d') my.sample3d_env.create() my.test_env = UnittestEnvironment() my.test_env.create() # set up the proper project_type, with the use the ProdDirNaming and ProdFileNaming search = Search('sthpw/project') search.add_filter('code', 'unittest') my.sobj = search.get_sobject() # store the original setting in your database my.original_proj_type_dict = {'dir_naming_cls': my.sobj.get_value('dir_naming_cls'), 'file_naming_cls': my.sobj.get_value('file_naming_cls') } #my.transaction = Transaction.get(create=True) if my.sobj: my.sobj.set_value('dir_naming_cls', 'pyasm.prod.biz.ProdDirNaming') my.sobj.set_value('file_naming_cls', 'pyasm.prod.biz.ProdFileNaming') my.sobj.commit() else: my.sobj = SearchType.create('sthpw/project_type') my.sobj.set_value('dir_naming_cls', 'pyasm.prod.biz.ProdDirNaming') my.sobj.set_value('file_naming_cls', 'pyasm.prod.biz.ProdFileNaming') my.sobj.set_value('code', 'unittest') my.sobj.commit()
def postprocess(self): search_type = self.get_option("search_type") column = self.get_option("column") search_type = "construction/login_in_trade" column = "trade_code" value = self.get_value(self.name) sobject = self.sobject search = Search(search_type) search.add_relationship_filter(sobject) related = search.get_sobject() if not related: related = SearchType.create(search_type) related.set_parent(sobject) if not value: related.delete() else: related.set_value(column, value) related.commit()
def _test_task(my): project = Project.get() # create a new task task = SearchType.create("sthpw/task") task.set_parent(project) task.set_value("code", "XXX001") task.set_value("process", "unittest") task.set_value("description", "unittest") # set a time with no timezone. A timestamp with no timezone should # assume GMT. test_time = '2011-11-11 00:00:00' task.set_value("timestamp", test_time) # asset that the time has not changed timestamp = task.get_value("timestamp") my.assertEquals(timestamp, test_time) task.commit() # get the task back from the databse search = Search("sthpw/task") search.add_filter("code", "XXX001") task = search.get_sobject() # make sure the time has not changed. This value should is assumed # to be in GMT timestamp = task.get_value("timestamp") my.assertEquals(timestamp, test_time) task.delete()
def _test_time(my): ''' test timezone related behavior''' sobject = SearchType.create('sthpw/task') sobject.set_value('project_code', 'unittest') sobject.set_value('bid_start_date', '2014-11-11 05:00:00') time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') sobject.commit() time = sobject.get_value('bid_start_date') my.assertEquals(time, '2014-11-11 05:00:00') from pyasm.search import DbContainer sql = DbContainer.get('sthpw') db_value = sql.do_query( 'SELECT bid_start_date from task where id = %s' % sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database my.assertEquals(db_value[0][0].strftime('%Y-%m-%d %H:%M:%S %Z'), '2014-11-11 00:00:00 ') # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval( '@SOBJECT(unittest/person?project=unittest["id", "%s"])' % sobject.get_id(), single=True) obj2 = Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])' % sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])' % sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])' % sobject.get_id(), single=True) # EST and GMT diff is 5 hours my.assertEquals(task.get_value('bid_start_date'), '2014-11-11 05:00:00')
def get_text_new_context_menu(self): search_type_obj = SearchType.get(self.search_type) title = search_type_obj.get_title() menu = Menu(width=180) menu.set_allow_icons(False) menu_item = MenuItem(type='title', label='Actions') menu.add(menu_item) menu_item = MenuItem(type='action', label="Update line") menu.add(menu_item) menu_item.add_behavior({ 'type': 'click_up', 'cbjs_action': ''' var activator = spt.smenu.get_activator(bvr); var line_el = activator.getParent(".spt_new_line"); var prev_line_el = line_el.getPrevious(".spt_line"); prev_line_el.setStyle("border", "solid 1px red"); var content = prev_line_el.getElement(".spt_line_content"); alert(content.innerHTML); var prev_line_el = prev_line_el.getPrevious(".spt_line"); prev_line_el.setStyle("border", "solid 1px red"); var next_line_el = line_el.getNext(".spt_line"); next_line_el.setStyle("border", "solid 1px red"); var next_line_el = next_line_el.getNext(".spt_line"); next_line_el.setStyle("border", "solid 1px red"); ''' }) return menu
def execute(self): column = self.get_option("column") if not column: column = self.name # get the value # NOTE: not sure why this was "column". The value will come # through with the name of the element. The "column" options # tells the action which column to set the value to #value = self.get_value(column) value = self.get_value(self.name) # check if there is an expression on the update expr = self.get_option("expression") # check for parent action to save search_type and search_id or parent_code etc separately # this is usually already taken care of in EditCmd parent_key_action = self.get_option('parent_key') == 'true' if expr: vars = {'VALUE': value} Search.eval(expr, self.sobject, vars=vars) else: search_type = self.sobject.get_search_type() col_type = SearchType.get_tactic_type(search_type, column) value = self.convert_value(col_type, value) if value == None: pass elif parent_key_action: self.sobject.add_relationship(value) else: self.sobject.set_value(column, value) if self.commit_flag == True: self.sobject.commit()
def _test_approval(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="approval" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") self.assertEquals(0, len(tasks)) # Run the pipeline process = "a" output = {"pipeline": pipeline, "sobject": sobject, "process": process} Trigger.call(self, "process|pending", output) # ensure there are not tasks tasks = Task.get_by_sobject(sobject, process="b") self.assertEquals(1, len(tasks)) task = tasks[0] self.assertEquals("b", task.get("process")) # approve the task task.set_value("status", "approved") task.commit() self.assertEquals("complete", sobject.get_value("b")) self.assertEquals("complete", sobject.get_value("c"))
def get_sobjects(my, group_names): # get the project sobjects sobjects = Project.get().get_search_types() sobject = SearchType.create("sthpw/virtual") sobject.set_value("title", "ALL PROJECTS") sobject.set_value("_extra_data", {"is_all": True}) sobject.set_value("id", 1) sobject.set_value("code", "*") sobjects.insert(0, sobject) # process all of the groups and find out which sobjects security = Environment.get_security() rules_dict = {} for sobject in sobjects: for group_name in group_names: access_rules = rules_dict.get(group_name) if access_rules == None: #!!!!!! #group = LoginGroup.get_by_group_name(group_name) group = my.servers.get(group_name) access_rules = group.get_xml_value("access_rules") rules_dict[group_name] = access_rules node = access_rules.get_node( "rules/rule[@group='search_type' and @code='%s']" % sobject.get_code()) if node is not None: sobject.set_value("_%s" % group_name, True) else: sobject.set_value("_%s" % group_name, False) return sobjects
def set_sobjects(self, sobjects): from pyasm.search import Search, SObject, Insert, SearchType self.sobjects = sobjects first = self.sobjects[0] # get the database project_code = first.get_project_code() from pyasm.biz import Project project = Project.get_by_code(project_code) if not project: raise Exception( "SObject [%s] has a project_code [%s] that does not exist" % (first.get_search_key(), project_code)) self.db_resource = project.get_project_db_resource() # get the search_type self.search_type = first.get_base_search_type() self.search_type_obj = SearchType.get(self.search_type) self.table = self.search_type_obj.get_table()
def create(pipeline_name, desc, pipe_search_type): '''will only create if it does not exist, otherwise it just updates''' sobject = Pipeline.get_by_name(pipeline_name) if sobject == None: #sobject = Pipeline( Pipeline.SEARCH_TYPE ) sobject = SearchType.create(Pipeline.SEARCH_TYPE) else: return sobject xml = Xml() xml.create_doc('pipeline') root = xml.get_root_node() #Xml.set_attribute(root, 'type', type) sobject.set_value("pipeline", xml.get_xml()) sobject.set_value('timestamp', Sql.get_default_timestamp_now(), quoted=False) sobject.set_value('code', pipeline_name) sobject.set_value('search_type', pipe_search_type) sobject.set_value('description', desc) sobject.commit() return sobject
def test_all(self): authenticate = ADAuthenticate() # put in a valid user login_name = 'supervisor' password = '******' exists = authenticate.verify(login_name, password) self.assertEquals(exists, True) login = SearchType.create("sthpw/login") login.set_value("login", login_name) authenticate.add_user_info(login, password) # check the user data display_name = authenticate.get_user_data("display_name") self.assertEquals("Smith, Joe", display_name) license_type = authenticate.get_user_data("license_type") #self.assertEquals("user", license_type) # check the login sobject license_type = login.get_value("license_type")
def get_display(my): sobject = my.get_current_sobject() name = my.get_name() value = my.get_value() if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if data_type == "timestamp" or my.name == "timestamp": if value == 'now': value = '' elif value: date = parser.parse(value) value = SPTDate.add_gmt_timezone(date) value = str(value) else: value = '' return value
def execute(self): search_type_obj = SearchType.get(self.search_type) db_resource = Project.get_db_resource_by_search_type(self.search_type) sql = DbContainer.get(db_resource) impl = sql.get_database_impl() data_type = self.get_data_type(self.search_type, self.attr_type) # if there is no type, then no column is created for widget_config if self.attr_type == "Date Range": column1 = "%s_start_date" % self.attr_name column2 = "%s_end_date" % self.attr_name self._add_column(column1, data_type) self._add_column(column2, data_type) elif type != "": self._add_column(self.attr_name, data_type) self.add_description("Added attribute '%s' of type '%s'" % (self.attr_name, self.attr_type) )
def get_display(self, widget): """run through the full web app pipeline""" if widget == None: raise WebAppException("No top level widget defined") # add to the access log # FIXME: this does not get committed if there is an exception. The # transaction will back out. access_log_flag = False access_log = None if access_log_flag: access_log = SearchType.create("sthpw/access_log") access_log.set_value("url", "www.yahoo.com") access_log.set_value("start_time", Sql.get_timestamp_now(), quoted=False) access_log.commit() start = time.time() # do a security check on the widget # DEPRECATED widget.check_security() # draw all of the widgets widget = widget.get_display() if widget: Widget.get_display(widget) if access_log_flag: access_log.set_value("end_time", Sql.get_timestamp_now(), quoted=False) duration = time.time() - start duration = float(int(duration * 1000)) / 1000 access_log.set_value("duration", str(duration)) access_log.commit()
def _test_multi_input_reject(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") code = "test%s" % Common.generate_alphanum_key() sobject.set_value("code", code) sobject.set_value("a1", "complete") sobject.set_value("a2", "complete") sobject.set_value("a3", "complete") sobject.set_value("b", "pending") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a1"/> <process type="action" name="a2"/> <process type="action" name="a3"/> <process type="approval" name="b"/> <connect from="a1" to="b"/> <connect from="a2" to="b"/> <connect from="a3" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) # Run the pipeline process = "b" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "reject_process": ['a1', 'a3'] } Trigger.call(self, "process|reject", output) self.assertEquals("revise", sobject.get_value("a1")) self.assertEquals("complete", sobject.get_value("a2")) self.assertEquals("revise", sobject.get_value("a3"))
def execute(my): keys = ["#:", "msgid", "msgstr"] code_line = "" msgid = "" msgstr = "" file = open(my.path, 'r') for line in file.readlines(): line = line.rstrip() if line.startswith("#:") and line != "#:": tmp, code_line = line.split(" ", 1) continue elif line.startswith("msgid"): tmp, msgid = line.split("msgid", 1) msgid = msgid.lstrip(' "') msgid = msgid.rstrip('"') if not msgid: continue # look up the string first search = Search("sthpw/translation") search.add_filter("msgid", msgid) search.add_filter("language", my.language) translation = search.get_sobject() if not translation: print "New: ", msgid translation = SearchType.create("sthpw/translation") translation.set_value("msgid", msgid) translation.set_value("language", my.language) code_line = code_line.replace('\\', '/') translation.set_value("line", code_line) translation.commit()
def _test_multi_task(self): # create a dummy sobject sobject = SearchType.create("unittest/person") pipeline_xml = ''' <pipeline> <process name="a"/> <process type="action" name="b"/> <connect from="a" to="b"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) sobject.set_value("pipeline_code", pipeline.get_code()) sobject.commit() for process_name, process in processes.items(): process.set_json_value( "workflow", { 'on_complete': ''' sobject.set_value('name_first', '%s') ''' % process_name, }) process.commit() task = Task.create(sobject, process="a", description="Test Task") task2 = Task.create(sobject, process="a", description="Test Task 2") task.set_value("status", "complete") task.commit() self.assertEquals(False, "b" == sobject.get_value("name_first")) task2.set_value("status", "complete") task2.commit() self.assertEquals(True, "b" == sobject.get_value("name_first"))
def postprocess(my): web = WebContainer.get_web() values = web.get_form_values("select_key") if not values or values == ['']: return dst_sobject = my.sobject project_code = Project.get_project_code() for value in values: src_sobject = Search.get_by_search_key(value) if not src_sobject: continue connection = SearchType.create("sthpw/connection") connection.set_value("src_search_type", src_sobject.get_search_type() ) connection.set_value("dst_search_type", dst_sobject.get_search_type() ) connection.set_value("src_search_id", src_sobject.get_id() ) connection.set_value("dst_search_id", dst_sobject.get_id() ) connection.set_value("context", "reference") connection.set_value("project_code", project_code) connection.commit()
def execute(self): rule_code = self.kwargs.get("rule_code") if rule_code: search = Search("config/ingest_rule") search.add_filter("code", rule_code) rule = search.get_sobject() else: rule = SearchType.create("config/ingest_rule") # explicitly save the columns columns = ['base_dir', 'rule', 'title'] for column in columns: rule.set_value(column, self.kwargs.get(column) ) # not sure if we want to save the entire kwargs?? kwargs_str = jsondumps(self.kwargs) rule.set_value("data", kwargs_str) rule.commit() return
def get_dirs_with_naming(search_key=None): from pyasm.biz import Snapshot from pyasm.biz import Project from pyasm.search import SearchType sobjects = server.server._get_sobjects(search_key) sobject = sobjects[0] file_object = SearchType.create('sthpw/file') from pyasm.biz import Pipeline pipelines = Pipeline.get_by_search_type(sobject.get_base_search_type()) processes = pipelines[0].get_process_names() dir_naming = Project.get_dir_naming() dir_naming.set_sobject(sobject) dir_naming.set_file_object(file_object) dirs_list = [] for process in processes: snapshot = Snapshot.create(sobject, snapshot_type='file', context=process, commit=False, version=-1) dir_naming.set_snapshot(snapshot) dirs_list.append(dir_naming.get_dir('relative')) return dirs_list
def prod_render(my): # NOT IMPLEMENTED because my.get_ext() can't handle frame ranges # code # XG001_BG1_anim_v001.png.0006 parts = [] parent = my.sobject.get_parent() parent_code = parent.get_code() search_type = SearchType.get(my.sobject.get_value("search_type")) base_search_type = search_type.get_base_search_type() if base_search_type == 'prod/layer': # layer name is sufficient parent_code = parent.get_value('name') parts.append(parent_code) ''' parent_snapshot_code = my.sobject.get_value("snapshot_code") if parent_snapshot_code: parent_snapshot = Snapshot.get_by_code(parent_snapshot_code) context = parent_snapshot.get_value("context") parts.append(context) ''' file_type = my.get_file_type() if file_type in ['web', 'icon']: parts.append(file_type) version = my.snapshot.get_value("version") if not version: version = 1 version = "v%0.3d" % int(version) ext = my.get_ext() filename = "_".join(parts) filename = "%s_%s.####%s" % (filename, version, ext) return filename
def _test_messaging(my): # create a dummy sobject city = SearchType.create("unittest/city") city_pipeline_xml = ''' <pipeline> <process type="action" name="a"/> <process type="action" name="b"/> <process type="action" name="c"/> <connect from="a" to="b"/> <connect from="b" to="c"/> </pipeline> ''' city_pipeline, city_processes = my.get_pipeline(city_pipeline_xml) city.set_value("pipeline_code", city_pipeline.get_code()) city.commit() # Run the pipeline process = "a" output = { "pipeline": city_pipeline, "sobject": city, "process": process } Trigger.call(my, "process|pending", output) for process in city_processes: key = "%s|%s|status" % (city.get_search_key(), process) search = Search("sthpw/message") search.add_filter("code", key) sobject = search.get_sobject() message = sobject.get_value("message") my.assertEquals("complete", message)
def is_new_group(self, prev_sobj, sobj): '''check if this task belong to a new parent ''' if not prev_sobj: return True # let the widget determine if it is new is_new = self.widget.is_new_group(prev_sobj, sobj) if is_new != None: return is_new # if it is None, use the default prev_value = prev_sobj.get_value(self.column) sobj_value = sobj.get_value(self.column, no_exception=True) # Now check for timestamp values and if found group by each calendar day (as default behavior), # otherwise you get one grouping for each row if the time part of the timestamp is not the same ... # st = sobj.get_search_type() value_type = SearchType.get_tactic_type(st, self.column) if value_type == 'timestamp': from dateutil import parser prev_date = parser.parse(prev_value) # returns a datetime object sobj_date = parser.parse(sobj_value) # returns a datetime object prev_value = "%s-%s-%s" % (prev_date.year, str( prev_date.month).zfill(2), str(prev_date.day).zfill(2)) sobj_value = "%s-%s-%s" % (sobj_date.year, str( sobj_date.month).zfill(2), str(sobj_date.day).zfill(2)) # compare search key here if prev_value == sobj_value: return False else: return True
def get_test_report_wdg(my): widget = Widget() search_type_filter = TextWdg("search_type_filter") widget.add("Search Type: ") widget.add(search_type_filter) # look at the config file and generate one search_type = "prod/asset" search_type_obj = SearchType.get(search_type) config = WidgetConfigView.get_by_search_type(search_type_obj, "table") element_names = config.get_element_names() print "element: ", element_names config = ''' <config> <table> <element name="code"/> <element name="name"/> <element name="description"/> </table> </config> ''' xml = Xml() xml.read_string(config) config = WidgetConfig(view="table", xml=xml) table = TableWdg(search_type) search = Search(search_type) sobjects = search.get_sobjects() table.set_sobjects(sobjects) widget.add(table) return widget
def execute(my): # save prefix local_prefix = my.get_value("local_prefix") my.server_prefix = Config.get_value("install", "server") if not local_prefix and not my.server_prefix: raise TacticException("Cannot have empty local server prefix") if local_prefix and local_prefix != my.server_prefix: Config.set_value("install", "server", local_prefix) Config.save_config() my.project_code = my.get_value("project") if not my.project_code: my.project_code = Project.get_project_code() # create a share share = SearchType.create("sthpw/sync_server") my.handle_info(share) my.handle_sync_mode(share) share.commit()
def _test_js(self): # create a dummy sobject sobject = SearchType.create("sthpw/virtual") sobject.set_value("code", "test") # simple condition pipeline_xml = ''' <pipeline> <process type="action" name="a"/> </pipeline> ''' pipeline, processes = self.get_pipeline(pipeline_xml) process = processes.get("a") process.set_json_value( "workflow", { 'cbjs_action': ''' console.log("This is javascript"); console.log(input); return false ''' }) process.commit() process = "a" output = { "pipeline": pipeline, "sobject": sobject, "process": process, "status": "pending" } import time start = time.time() Trigger.call(self, "process|pending", output)
def get_display(my): sobject = my.get_current_sobject() name = my.get_name() value = my.get_value() if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if data_type in ["timestamp","time"] or my.name == "timestamp": if value == 'now': value = '' elif value: date = parser.parse(value) # we want to match what's in the db which is server local timezone if not SPTDate.has_timezone(value): value = SPTDate.convert_to_local(value) #value = SPTDate.add_gmt_timezone(date) value = str(value) else: value = '' return value
def execute(self): import os path = self.kwargs.get("path") path = path.replace("\\", "/") basename = os.path.basename(path) upload_dir = Environment.get_upload_dir() path = "%s/%s" % (upload_dir, basename) f = open(path, 'r') transactions = [] xml = [] for line in f: if line == '\n': transactions.append(xml) xml = [] continue xml.append(line.strip()) transactions.append(xml) for transaction in transactions: value = "\n".join(transaction) # we have the transaction # recreate the log transacton_log = SearchType.create("sthpw/transaction_log") transaction_log.set_value("transaction", transaction) break
def _test_create(self): search = Search("unittest/person") persons = search.get_sobjects() person = persons[0] snapshot_type = "file" snapshot = Snapshot.create(person, context="publish", snapshot_type=snapshot_type) version = snapshot.get_value("version") self.assertEquals( 1, version ) search_type = snapshot.get_value("search_type") self.assertEquals( search_type, person.get_search_type() ) search_code = snapshot.get_value("search_code") self.assertEquals( search_code, person.get_value("code") ) # also check search_id if SearchType.column_exists("sthpw/snapshot", "search_id"): search_code = snapshot.get_value("search_id") self.assertEquals( search_code, person.get_value("id") ) test_person = snapshot.get_sobject() self.assertEquals(test_person.get_code(), person.get_code())
def get_columns(self, required_only=False): if self.search_type == 'sthpw/virtual': return [] search_type_obj = SearchType.get(self.search_type) table = search_type_obj.get_table() from pyasm.biz import Project db_resource = Project.get_db_resource_by_search_type(self.search_type) database_name = db_resource.get_database() db = DbContainer.get(db_resource) # table may not exist try: all_columns = db.get_columns(table) columns = [] if required_only: nullables = db.get_column_nullables(table) for column in all_columns: null_ok = nullables.get(column) if not null_ok: columns.append(column) # if there are no required columns if not columns: columns = all_columns else: columns = all_columns except SqlException: Environment.add_warning( 'missing table', 'Table [%s] does not exist in database [%s]' % (table, database_name)) return [] return columns
def _test_time(): from pyasm.search import SearchType sobj = SearchType.create('sthpw/note') sobj.set_value('process','TEST') sobj.set_value('note','123') sobj.commit() sobj.set_value('note', 'new note') sobj.commit() # check change_timestamp change_t = Search.eval("@SOBJECT(sthpw/change_timestamp['search_type','sthpw/note']['search_code','%s'])"%sobj.get_code(), single=True) if change_t: change_t_timestamp = change_t.get('timestamp') change_t_timestamp = parser.parse(change_t_timestamp) from pyasm.common import SPTDate now = SPTDate.now() diff = now - change_t_timestamp # should be roughly the same minute, not hours apart print "Change timestamp diff is ", diff.seconds