def preprocess(my): # protect against the case where there is a single sobject that # is an insert (often seen in "insert") if my.is_preprocessed == True: return skip = False if len(my.sobjects) == 1: if not my.sobjects[0].has_value("search_type"): skip = True if not skip: search_types = SObject.get_values(my.sobjects, 'search_type', unique=True) try: search_codes = SObject.get_values(my.sobjects, 'search_code', unique=True) search_ids = None except Exception, e: print "WARNING: ", e search_ids = SObject.get_values(my.sobjects, 'search_id', unique=True) search_codes = None
def preprocess(self): search_type_list = SObject.get_values(self.sobjects, 'search_type', unique=True) search_id_dict = {} self.ref_sobject_cache = {} # initialize the search_id_dict for type in search_type_list: search_id_dict[type] = [] # cache it first for sobject in self.sobjects: search_type = sobject.get_value('search_type') search_id_list = search_id_dict.get(search_type) search_id_list.append(sobject.get_value('search_id')) from pyasm.search import SearchException for key, value in search_id_dict.items(): try: ref_sobjects = Search.get_by_id(key, value) sobj_dict = SObject.get_dict(ref_sobjects) except SearchException, e: print "WARNING: search_type [%s] with id [%s] does not exist" % ( key, value) print str(e) sobj_dict = {} # store a dict of dict with the search_type as key self.ref_sobject_cache[key] = sobj_dict
def preprocess(my): search_type_list = SObject.get_values(my.sobjects, 'search_type', unique=True) search_id_dict = {} my.ref_sobject_cache = {} # initialize the search_id_dict for type in search_type_list: search_id_dict[type] = [] # cache it first for sobject in my.sobjects: search_type = sobject.get_value('search_type') search_id_list = search_id_dict.get(search_type) search_id_list.append(sobject.get_value('search_id')) from pyasm.search import SearchException for key, value in search_id_dict.items(): try: ref_sobjects = Search.get_by_id(key, value) sobj_dict = SObject.get_dict(ref_sobjects) except SearchException, e: print "WARNING: search_type [%s] with id [%s] does not exist" % (key, value) print str(e) sobj_dict = {} # store a dict of dict with the search_type as key my.ref_sobject_cache[key] = sobj_dict
def get_display(my): widget = DivWdg(id='link_view_select') widget.add_class("link_view_select") if my.refresh: widget = Widget() else: my.set_as_panel(widget) views = [] if my.search_type: from pyasm.search import WidgetDbConfig search = Search(WidgetDbConfig.SEARCH_TYPE) search.add_filter("search_type", my.search_type) search.add_regex_filter("view", "link_search:|saved_search:", op="NEQI") search.add_order_by('view') widget_dbs = search.get_sobjects() views = SObject.get_values(widget_dbs, 'view') labels = [view for view in views] views.insert(0, 'table') labels.insert(0, 'table (Default)') st_select = SelectWdg('new_link_view', label='View: ') st_select.set_option('values', views) st_select.set_option('labels', labels) widget.add(st_select) return widget
def preprocess(self): self.is_preprocessed = True # get all of the instances search = Search("prod/shot_instance") # if not used in a TableWdg, only get the shot instances for one asset if not self.parent_wdg: search.add_filter('asset_code', self.get_current_sobject().get_code()) search.add_order_by("shot_code") instances = search.get_sobjects() self.asset_instances = instances self.instances = {} for instance in instances: asset_code = instance.get_value("asset_code") list = self.instances.get(asset_code) if not list: list = [] self.instances[asset_code] = list list.append(instance) search = Search("prod/shot") search.add_filters("code", [x.get_value('shot_code') for x in instances]) shots = search.get_sobjects() self.shots = SObject.get_dict(shots, ["code"]) self.shots_list = shots
def get_display(my): top = DivWdg() value = my.get_value() widget_type = my.get_option("type") if widget_type in ['integer', 'float', 'timecode', 'currency']: top.add_style("float: right") my.justify = "right" elif widget_type in ['date','time']: name = my.get_name() if value and not SObject.is_day_column(name): value = SPTDate.convert_to_local(value) value = str(value) else: top.add_style("float: left") my.justify = "left" top.add_style("padding-right: 3px") top.add_style("min-height: 15px") format = my.get_option('format') value = my.get_format_value( value, format ) top.add(value) return top
def preprocess(my): my.is_preprocessed = True # get all of the instances search = Search("prod/shot_instance") # if not used in a TableWdg, only get the shot instances for one asset if not my.parent_wdg: search.add_filter('asset_code', my.get_current_sobject().get_code()) search.add_order_by("shot_code") instances = search.get_sobjects() my.asset_instances = instances my.instances = {} for instance in instances: asset_code = instance.get_value("asset_code") list = my.instances.get(asset_code) if not list: list = [] my.instances[asset_code] = list list.append(instance) search = Search("prod/shot") search.add_filters( "code", [x.get_value('shot_code') for x in instances] ) shots = search.get_sobjects() my.shots = SObject.get_dict(shots, ["code"]) my.shots_list = shots
def get_display(my): widget = DivWdg(id='link_view_select') widget.add_class("link_view_select") if my.refresh: widget = Widget() else: my.set_as_panel(widget) views = [] if my.search_type: from pyasm.search import WidgetDbConfig search = Search( WidgetDbConfig.SEARCH_TYPE ) search.add_filter("search_type", my.search_type) search.add_regex_filter("view", "link_search:|saved_search:", op="NEQI") search.add_order_by('view') widget_dbs = search.get_sobjects() views = SObject.get_values(widget_dbs, 'view') labels = [view for view in views] views.insert(0, 'table') labels.insert(0, 'table (Default)') st_select = SelectWdg('new_link_view', label='View: ') st_select.set_option('values', views) st_select.set_option('labels', labels) widget.add(st_select) return widget
def get_tasks(my, sobjects=[]): # get all of the relevant tasks to the user task_search = Search("sthpw/task") task_search.add_column("search_id", distinct=True) if sobjects: task_search.add_filter("search_type", sobjects[0].get_search_type()) sobject_ids = SObject.get_values(sobjects, "id", unique=True) task_search.add_filters("search_id", sobject_ids) # only look at this project search_type = SearchType.get(my.search_type).get_full_key() task_search.add_filter("search_type", search_type) my.process_filter.alter_search(task_search) if isinstance(my.user_filter, UserFilterWdg): my.user_filter.alter_search(task_search) else: user = Environment.get_user_name() task_search.add_filter('assigned', user) status_filters = my.task_status_filter.get_values() if not status_filters: return [] task_search.add_filters("status", status_filters) tasks = task_search.get_sobjects() return tasks
def get_display(my): #defining init is better than get_display() for this kind of SelectWdg search = Search(SearchType.SEARCH_TYPE) if my.mode == None or my.mode == my.ALL_BUT_STHPW: # always add the login / login group search types filter = search.get_regex_filter( "search_type", "login|task|note|timecard|trigger|milestone", "EQ") no_sthpw_filter = search.get_regex_filter("search_type", "^(sthpw).*", "NEQ") search.add_where('%s or %s' % (filter, no_sthpw_filter)) elif my.mode == my.CURRENT_PROJECT: project = Project.get() project_code = project.get_code() #project_type = project.get_project_type().get_type() project_type = project.get_value("type") search.add_where("\"namespace\" in ('%s','%s') " % (project_type, project_code)) search.add_order_by("search_type") search_types = search.get_sobjects() values = SObject.get_values(search_types, 'search_type') labels = [x.get_label() for x in search_types] values.append('CustomLayoutWdg') labels.append('CustomLayoutWdg') my.set_option('values', values) my.set_option('labels', labels) #my.set_search_for_options(search, "search_type", "get_label()") my.add_empty_option(label='-- Select Search Type --') return super(SearchTypeSelectWdg, my).get_display()
def get_display(my): #defining init is better than get_display() for this kind of SelectWdg search = Search( SearchType.SEARCH_TYPE ) if my.mode == None or my.mode == my.ALL_BUT_STHPW: # always add the login / login group search types filter = search.get_regex_filter("search_type", "login|task|note|timecard|trigger|milestone", "EQ") no_sthpw_filter = search.get_regex_filter("search_type", "^(sthpw).*", "NEQ") search.add_where('%s or %s' %(filter, no_sthpw_filter)) elif my.mode == my.CURRENT_PROJECT: project = Project.get() project_code = project.get_code() #project_type = project.get_project_type().get_type() project_type = project.get_value("type") search.add_where("\"namespace\" in ('%s','%s') " % (project_type, project_code)) search.add_order_by("search_type") search_types = search.get_sobjects() values = SObject.get_values(search_types, 'search_type') labels = [ x.get_label() for x in search_types ] values.append('CustomLayoutWdg') labels.append('CustomLayoutWdg') my.set_option('values', values) my.set_option('labels', labels) #my.set_search_for_options(search, "search_type", "get_label()") my.add_empty_option(label='-- Select Search Type --') return super(SearchTypeSelectWdg, my).get_display()
def preprocess(my): '''determine if this is for EditWdg or EDIT ROW of a table''' # get the number of task pipelines needed for EditWdg, which is one # for the EDIT ROW , there could be more than 1 my.task_mapping = None from tactic.ui.panel import EditWdg if hasattr(my, 'parent_wdg') and isinstance(my.get_parent_wdg(), EditWdg): task = my.get_current_sobject() task_pipe_code = task.get_value('pipeline_code') # if the current task has no pipeline, then search for # any task pipeline if not task_pipe_code: # just use the default task_pipe_code = 'task' pipeline = Pipeline.get_by_code(task_pipe_code) if not pipeline: pipeline = Pipeline.get_by_code('task') my.task_pipelines = [pipeline] else: # get all of the pipelines for tasks search = Search('sthpw/pipeline') search.add_regex_filter('search_type', 'sthpw/task') my.task_pipelines = search.get_sobjects() # get all of the pipelines for the current search_type search_type = my.state.get("search_type") search = Search('sthpw/pipeline') if search_type: search.add_filter('search_type', search_type) my.sobject_pipelines = search.get_sobjects() # insert the default task pipeline if not overridden in the db default_task_found = False pipeline_codes = SObject.get_values(my.task_pipelines, 'code') if 'task' in pipeline_codes: default_task_found = True if not default_task_found: default_pipe = Pipeline.get_by_code('task') my.task_pipelines.append(default_pipe) my.task_mapping = {} # the following works for insert but on edit, it should read from pipeline_code attribute for pipeline in my.sobject_pipelines: processes = pipeline.get_process_names() for process in processes: attrs = pipeline.get_process_attrs(process) task_pipeline = attrs.get('task_pipeline') if task_pipeline: key = '%s|%s' % (pipeline.get_code(), process) my.task_mapping[key] = task_pipeline my.is_preprocess = True
def get_mail_users(my, column): # mail groups recipients = set() expr = my.notification.get_value(column, no_exception=True) if expr: sudo = Sudo() # Introduce an environment that can be reflected env = { 'sobject': my.sobject } #if expr.startswith("@"): # logins = Search.eval(expr, list=True, env_sobjects=env) #else: parts = expr.split("\n") # go through each login and evaluate each logins = [] for part in parts: if part.startswith("@") or part.startswith("{"): results = Search.eval(part, list=True, env_sobjects=env) # clear the container after each expression eval ExpressionParser.clear_cache() # these can just be login names, get the actual Logins if results: if isinstance(results[0], basestring): login_sobjs = Search.eval("@SOBJECT(sthpw/login['login','in','%s'])" %'|'.join(results), list=True) login_list = SObject.get_values(login_sobjs, 'login') for result in results: # the original result could be an email address already if result not in login_list: logins.append(result) if login_sobjs: logins.extend( login_sobjs ) else: logins.extend(results) elif part.find("@") != -1: # this is just an email address logins.append( part ) elif part: # this is a group group = LoginGroup.get_by_code(part) if group: logins.extend( group.get_logins() ) del sudo else: notification_id = my.notification.get_id() logins = GroupNotification.get_logins_by_id(notification_id) for login in logins: recipients.add(login) return recipients
def get_display(my): widget = Widget() span = SpanWdg('[ projects ]', css='hand') span.add_style('color','white') span.add_event('onclick',"spt.show_block('%s')" %my.WDG_ID) widget.add(span) # add the popup div = DivWdg(id=my.WDG_ID, css='popup_wdg') widget.add(div) div.add_style('width', '80px') div.add_style('display', 'none') title_div = DivWdg() div.add(title_div) title = FloatDivWdg(' ', width='60px') title.add_style('margin-right','2px') title_div.add_style('padding-bottom', '4px') title_div.add(title) title_div.add(CloseWdg(my.get_off_script(), is_absolute=False)) div.add(HtmlElement.br()) search = Search(Project) search.add_where("\"code\" not in ('sthpw','admin')") search.add_column('code') projects = search.get_sobjects() values = SObject.get_values(projects, 'code') web = WebContainer.get_web() root = web.get_site_root() security = Environment.get_security() for value in values: if not security.check_access("project", value, "view"): continue script = "location.href='/%s/%s'"%(root, value) sub_div = DivWdg(HtmlElement.b(value), css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) div.add(HtmlElement.hr()) if security.check_access("project", 'default', "view"): script = "location.href='/%s'" % root sub_div = DivWdg('home', css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) if security.check_access("project", "admin", "view"): script = "location.href='/%s/admin/'" %root sub_div = DivWdg('admin', css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) return widget
def build_cache_by_column(self, column): # do not build if it already exists if self.caches.has_key(column): return # build a search_key cache column_cache = SObject.get_dict(self.sobjects, key_cols=[column]) self.caches[column] = column_cache return column_cache
def build_cache_by_column(my, column): # do not build if it already exists if my.caches.has_key(column): return # build a search_key cache column_cache = SObject.get_dict(my.sobjects, key_cols=[column]) my.caches[column] = column_cache return column_cache
def get_display(my): sobject = my.get_current_sobject() column = my.kwargs.get('column') if column: name = column else: name = my.get_name() value = my.get_value(name=name) if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type == "timestamp" or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to local if not SObject.is_day_column(name): date = SPTDate.convert_to_local(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) else: value_wdg = DivWdg() value_wdg.add_style("float: right") value_wdg.add_style("padding-right: 3px") value_wdg.add(str(value)) return value_wdg return value
def get_display(my): sobject = my.get_current_sobject() column = my.kwargs.get('column') if column: name = column else: name = my.get_name() value = my.get_value(name=name) if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type == "timestamp" or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to local if not SObject.is_day_column(name): date = SPTDate.convert_to_local(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) else: value_wdg = DivWdg() value_wdg.add_style("float: right") value_wdg.add_style("padding-right: 3px") value_wdg.add( str(value) ) return value_wdg return value
def get_info(self): # check if the sobj type is the same search_types = SObject.get_values(self.sobjs, 'search_type', unique=True) search_ids = SObject.get_values(self.sobjs, 'search_id', unique=False) infos = [] # this doesn't really work if the same asset is submitted multiple times if len(search_types) == 1 and len(search_ids) == len(self.sobjs): assets = [] if search_types[0]: assets = Search.get_by_id(search_types[0], search_ids) asset_dict = SObject.get_dict(assets) for id in search_ids: asset = asset_dict.get(id) aux_dict = {} aux_dict['info'] = SubmissionInfo._get_target_sobject_data( asset) aux_dict['search_key'] = '%s:%s' % (search_types[0], id) infos.append(aux_dict) else: # TODO: this is a bit database intensive, mixed search_types not # recommended search_types = SObject.get_values(self.sobjs, 'search_type',\ unique=False) for idx in xrange(0, len(search_types)): search_type = search_types[idx] aux_dict = {} aux_dict['info'] = '' aux_dict['search_key'] = '' if search_type: asset = Search.get_by_id(search_type, search_ids[idx]) aux_dict['info'] = SubmissionInfo._get_target_sobject_data( asset) aux_dict['search_key'] = '%s:%s' % (search_types[idx], search_ids[idx]) infos.append(aux_dict) return infos
def get_display(self): widget = Widget() span = SpanWdg('[ projects ]', css='hand') span.add_style('color', 'white') span.add_event('onclick', "spt.show_block('%s')" % self.WDG_ID) widget.add(span) # add the popup div = DivWdg(id=self.WDG_ID, css='popup_wdg') widget.add(div) div.add_style('width', '80px') div.add_style('display', 'none') title_div = DivWdg() div.add(title_div) title = FloatDivWdg(' ', width='60px') title.add_style('margin-right', '2px') title_div.add_style('padding-bottom', '4px') title_div.add(title) title_div.add(CloseWdg(self.get_off_script(), is_absolute=False)) div.add(HtmlElement.br()) search = Search(Project) search.add_where("\"code\" not in ('sthpw','admin')") search.add_column('code') projects = search.get_sobjects() values = SObject.get_values(projects, 'code') web = WebContainer.get_web() root = web.get_site_root() security = Environment.get_security() for value in values: if not security.check_access("project", value, "view"): continue script = "location.href='/%s/%s'" % (root, value) sub_div = DivWdg(HtmlElement.b(value), css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) div.add(HtmlElement.hr()) if security.check_access("project", 'default', "view"): script = "location.href='/%s'" % root sub_div = DivWdg('home', css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) if security.check_access("project", "admin", "view"): script = "location.href='/%s/admin/'" % root sub_div = DivWdg('admin', css='selection_item') sub_div.add_event('onclick', script) div.add(sub_div) return widget
def get_registered_hours(search_key, week, weekday, year, desc=None, login=None, project=None): ''' get the total registered hours for the week. ADD YEAR!!!''' timecards = Timecard.get(search_key, week, year, desc, login, project) hours = SObject.get_values(timecards, weekday, unique=False) reg_hours = 0.0 for hour in hours: if hour: reg_hours += float(hour) return reg_hours
def get_tasks(self, sobject): search_type = SearchType.get("prod/shot").get_full_key() # get all of the shots in the episode shots = sobject.get_all_children("prod/shot") ids = SObject.get_values(shots, "id") search = Search("sthpw/task") search.add_filter("search_type", search_type) search.add_filters("search_id", ids) return search.get_sobjects()
def get_tasks(my, sobject): search_type = SearchType.get("prod/shot").get_full_key() # get all of the shots in the episode shots = sobject.get_all_children("prod/shot") ids = SObject.get_values(shots, "id") search = Search("sthpw/task") search.add_filter("search_type", search_type) search.add_filters("search_id", ids) return search.get_sobjects()
def get_info(self): # check if the sobj type is the same search_types = SObject.get_values(self.sobjs, 'search_type', unique=True) search_ids = SObject.get_values(self.sobjs, 'search_id', unique=False) infos = [] # this doesn't really work if the same asset is submitted multiple times if len(search_types) == 1 and len(search_ids) == len(self.sobjs): assets = [] if search_types[0]: assets = Search.get_by_id(search_types[0], search_ids) asset_dict = SObject.get_dict(assets) for id in search_ids: asset = asset_dict.get(id) aux_dict = {} aux_dict['info'] = SubmissionInfo._get_target_sobject_data(asset) aux_dict['search_key'] = '%s:%s' %(search_types[0], id) infos.append(aux_dict) else: # TODO: this is a bit database intensive, mixed search_types not # recommended search_types = SObject.get_values(self.sobjs, 'search_type',\ unique=False) for idx in xrange(0, len(search_types)): search_type = search_types[idx] aux_dict = {} aux_dict['info'] = '' aux_dict['search_key'] = '' if search_type: asset = Search.get_by_id(search_type, search_ids[idx]) aux_dict['info'] = SubmissionInfo._get_target_sobject_data(asset) aux_dict['search_key'] = '%s:%s' %(search_types[idx], search_ids[idx]) infos.append(aux_dict) return infos
def get_display(my): top = DivWdg() value = my.get_value() widget_type = my.get_option("type") if widget_type in ['integer', 'float', 'timecode', 'currency']: top.add_style("float: right") my.justify = "right" elif widget_type in ['date','time']: name = my.get_name() if value and not SObject.is_day_column(name): value = my.get_timezone_value(value) value = str(value) else: top.add_style("float: left") my.justify = "left" top.add_style("padding-right: 3px") top.add_style("min-height: 15px") format = my.get_option('format') value = my.get_format_value( value, format ) top.add(value) sobject = my.get_current_sobject() if sobject: column = my.kwargs.get('column') if column: name = column else: name = my.get_name() top.add_update( { 'search_key': sobject.get_search_key(), 'column': name, 'format': format } ) return top
def get_display(my): top = DivWdg() value = my.get_value() widget_type = my.get_option("type") if widget_type in ['integer', 'float', 'timecode', 'currency']: top.add_style("float: right") my.justify = "right" elif widget_type in ['date', 'time']: name = my.get_name() if value and not SObject.is_day_column(name): value = my.get_timezone_value(value) value = str(value) else: top.add_style("float: left") my.justify = "left" top.add_style("padding-right: 3px") top.add_style("min-height: 15px") format = my.get_option('format') value = my.get_format_value(value, format) top.add(value) sobject = my.get_current_sobject() if sobject: column = my.kwargs.get('column') if column: name = column else: name = my.get_name() top.add_update({ 'search_key': sobject.get_search_key(), 'column': name, 'format': format }) return top
def update_process_table(my): ''' make sure to update process table''' process_names = my.get_process_names() pipeline_code = my.get_code() search = Search("config/process") search.add_filter("pipeline_code", pipeline_code) process_sobjs = search.get_sobjects() existing_names = SObject.get_values(process_sobjs, 'process') count = 0 for process_name in process_names: exists = False for process_sobj in process_sobjs: # if it already exist, then update if process_sobj.get_value("process") == process_name: exists = True break if not exists: process_sobj = SearchType.create("config/process") process_sobj.set_value("pipeline_code", pipeline_code) process_sobj.set_value("process", process_name) attrs = my.get_process_attrs(process_name) color = attrs.get('color') if color: process_sobj.set_value("color", color) process_sobj.set_value("sort_order", count) process_sobj.commit() count += 1 # delete obsolete obsolete = set(existing_names) - set(process_names) if obsolete: for obsolete_name in obsolete: for process_sobj in process_sobjs: # delete it if process_sobj.get_value("process") == obsolete_name: process_sobj.delete() break
def get_text_value(my): value = my.get_value() widget_type = my.get_option("type") if widget_type in ['date', 'time']: name = my.get_name() if not SObject.is_day_column(name): value = my.get_timezone_value(value) value = str(value) format = my.get_option('format') if format == 'Checkbox': value = str(value) else: value = my.get_format_value(value, format) return value
def _get_bins(self): search = Search(Bin) # get all the types in the Bin table type_search = Search(Bin) type_search.add_column('type') type_search.add_group_by('type') types = SObject.get_values(type_search.get_sobjects(), 'type') select = SelectWdg('display_limit') select.set_option('persist', 'true') display_limit = select.get_value() if display_limit: self.display_limit = display_limit # by default, get 10 for each type joined_statements = [] for type in types: # TODO: fix this sql to run through search select = Search('prod/bin') select.add_filter("type", type) select.set_show_retired(False) select.add_order_by("code") select.add_limit(self.display_limit) statement = select.get_statement() joined_statements.append(statement) #joined_statements.append("select * from \"bin\" where \"type\" ='%s' and (\"s_status\" != 'retired' or \"s_status\" is NULL)" \ # " order by \"code\" desc limit %s" % (type, self.display_limit)) if len(joined_statements) > 1: joined_statements = ["(%s)"%x for x in joined_statements] statement = ' union all '.join(joined_statements) elif len(joined_statements) == 1: statement = joined_statements[0] else: # no bins created yet return [] #print "statement: ", statement return Bin.get_by_statement(statement)
def postprocess(my): super(FlashAssetPublishCmd, my).postprocess() # parse the introspect file code = my.sobject.get_code() upload_dir = my.get_upload_dir() introspect_path = "%s/%s.xml" % (upload_dir, code) xml = Xml() xml.read_file(introspect_path) flash_layer_names = xml.get_values("introspect/layers/layer/@name") if not flash_layer_names: return # extract the layers from the flash layer_names layer_names = [] for flash_layer_name in flash_layer_names: if flash_layer_name.find(":") == -1: continue layer_name, instance_name = flash_layer_name.split(":") # make sure it is unique if layer_name not in layer_names: layer_names.append(layer_name) base_key = my.sobject.get_search_type_obj().get_base_key() # TODO: make the flash shot tab run FlashShotPublishCmd instead # and move this postprocess there # this is not meant for flash/asset, but for flash/shot if base_key == 'flash/asset' or not layer_names: return # get all of the layers in this shot and compare to the session existing_layers = my.sobject.get_all_children("prod/layer") existing_layer_names = SObject.get_values(existing_layers,"name") for layer_name in layer_names: if layer_name not in existing_layer_names: print "creating ", layer_name Layer.create(layer_name, code)
def _get_bins(my): search = Search(Bin) # get all the types in the Bin table type_search = Search(Bin) type_search.add_column('type') type_search.add_group_by('type') types = SObject.get_values(type_search.get_sobjects(), 'type') select = SelectWdg('display_limit') select.set_option('persist', 'true') display_limit = select.get_value() if display_limit: my.display_limit = display_limit # by default, get 10 for each type joined_statements = [] for type in types: # TODO: fix this sql to run through search select = Search('prod/bin') select.add_filter("type", type) select.set_show_retired(False) select.add_order_by("code") select.add_limit(my.display_limit) statement = select.get_statement() joined_statements.append(statement) #joined_statements.append("select * from \"bin\" where \"type\" ='%s' and (\"s_status\" != 'retired' or \"s_status\" is NULL)" \ # " order by \"code\" desc limit %s" % (type, my.display_limit)) if len(joined_statements) > 1: joined_statements = ["(%s)" % x for x in joined_statements] statement = ' union all '.join(joined_statements) elif len(joined_statements) == 1: statement = joined_statements[0] else: # no bins created yet return [] #print "statement: ", statement return Bin.get_by_statement(statement)
def postprocess(my): super(FlashAssetPublishCmd, my).postprocess() # parse the introspect file code = my.sobject.get_code() upload_dir = my.get_upload_dir() introspect_path = "%s/%s.xml" % (upload_dir, code) xml = Xml() xml.read_file(introspect_path) flash_layer_names = xml.get_values("introspect/layers/layer/@name") if not flash_layer_names: return # extract the layers from the flash layer_names layer_names = [] for flash_layer_name in flash_layer_names: if flash_layer_name.find(":") == -1: continue layer_name, instance_name = flash_layer_name.split(":") # make sure it is unique if layer_name not in layer_names: layer_names.append(layer_name) base_key = my.sobject.get_search_type_obj().get_base_key() # TODO: make the flash shot tab run FlashShotPublishCmd instead # and move this postprocess there # this is not meant for flash/asset, but for flash/shot if base_key == 'flash/asset' or not layer_names: return # get all of the layers in this shot and compare to the session existing_layers = my.sobject.get_all_children("prod/layer") existing_layer_names = SObject.get_values(existing_layers, "name") for layer_name in layer_names: if layer_name not in existing_layer_names: print "creating ", layer_name Layer.create(layer_name, code)
def update_process_table(my): ''' make sure to update process table''' process_names = my.get_process_names() pipeline_code = my.get_code() search = Search("config/process") search.add_filter("pipeline_code", pipeline_code) process_sobjs = search.get_sobjects() existing_names = SObject.get_values(process_sobjs, 'process') count = 1 for process_name in process_names: exists = False for process_sobj in process_sobjs: # if it already exist, then update if process_sobj.get_value("process") == process_name: exists = True break if not exists: process_sobj = SearchType.create("config/process") process_sobj.set_value("pipeline_code", pipeline_code) process_sobj.set_value("process", process_name) attrs = my.get_process_attrs(process_name) color = attrs.get('color') if color: process_sobj.set_value("color", color) process_sobj.set_value("sort_order", count) process_sobj.commit() count += 1 # delete obsolete obsolete = set(existing_names) - set(process_names) if obsolete: for obsolete_name in obsolete: for process_sobj in process_sobjs: # delete it if process_sobj.get_value("process") == obsolete_name: process_sobj.delete() break
def get_text_value(my): value = my.get_value() widget_type = my.get_option("type") if widget_type in ['date','time']: name = my.get_name() if not SObject.is_day_column(name): value = my.get_timezone_value(value) value = str(value) format = my.get_option('format') if format == 'Checkbox': value = str(value) else: value = my.get_format_value( value, format ) return value
def get_behavior(cls, sobject): '''it takes sobject as an argument and turn it into a dictionary to pass to NotificationTestCmd''' pal = WebContainer.get_web().get_palette() bg_color = pal.color('background') sobj_dict = SObject.get_sobject_dict(sobject) sobj_json = jsondumps(sobj_dict) bvr = { 'type': 'click_up', 'cbjs_action': ''' var server = TacticServerStub.get(); var rtn = {}; var msg = ''; try { spt.app_busy.show( 'Email Test', 'Waiting for email server response...' ); rtn = server.execute_cmd('tactic.command.NotificationTestCmd', args={'sobject_dict': %s}); msg = rtn.description; msg += '\\nYou can also review the notification in the Notification Log.' } catch(e) { msg = 'Error found in this notification:\\n\\n' + spt.exception.handler(e); } //console.log(msg) spt.app_busy.hide(); var popup_id = 'Notification test result'; var class_name = 'tactic.ui.panel.CustomLayoutWdg'; msg= msg.replace(/\\n/g, '<br/>'); var options = { 'html': '<div><div style="background:%s; padding: 5px">' + msg + '</div></div>'}; var kwargs = {'width':'600px'}; spt.panel.load_popup(popup_id, class_name, options, kwargs); ''' % (sobj_json, bg_color) } return bvr
def get_behavior(cls, sobject): '''it takes sobject as an argument and turn it into a dictionary to pass to NotificationTestCmd''' pal = WebContainer.get_web().get_palette() bg_color = pal.color('background') sobj_dict = SObject.get_sobject_dict(sobject) sobj_json = jsondumps(sobj_dict) bvr = {'type': 'click_up', 'cbjs_action': ''' var server = TacticServerStub.get(); var rtn = {}; var msg = ''; try { spt.app_busy.show( 'Email Test', 'Waiting for email server response...' ); rtn = server.execute_cmd('tactic.command.NotificationTestCmd', args={'sobject_dict': %s}); msg = rtn.description; msg += '\\nYou can also review the notification in the Notification Log.' } catch(e) { msg = 'Error found in this notification:\\n\\n' + spt.exception.handler(e); } //console.log(msg) spt.app_busy.hide(); var popup_id = 'Notification test result'; var class_name = 'tactic.ui.panel.CustomLayoutWdg'; msg= msg.replace(/\\n/g, '<br/>'); var options = { 'html': '<div><div style="background:%s; padding: 5px">' + msg + '</div></div>'}; var kwargs = {'width':'600px'}; spt.panel.load_popup(popup_id, class_name, options, kwargs); ''' %(sobj_json, bg_color)} return bvr
def get_tasks(my, sobjects=[]): # get all of the relevant tasks to the user task_search = Search("sthpw/task") task_search.add_column("search_id", distinct=True) if sobjects: task_search.add_filter("search_type", sobjects[0].get_search_type() ) sobject_ids = SObject.get_values(sobjects, "id", unique=True) task_search.add_filters("search_id", sobject_ids) # only look at this project search_type = SearchType.get(my.search_type).get_full_key() task_search.add_filter("search_type", search_type) my.process_filter.alter_search(task_search) if isinstance(my.user_filter, UserFilterWdg): my.user_filter.alter_search(task_search) else: user = Environment.get_user_name() task_search.add_filter('assigned', user) status_filters = my.task_status_filter.get_values() if not status_filters: return [] task_search.add_filters("status", status_filters) tasks = task_search.get_sobjects() return tasks
def execute(self): web = WebContainer.get_web() if web.get_form_value("Add Annotation") == "": return annotate_msg = web.get_form_value("annotate_msg") if annotate_msg == "": return xpos = web.get_form_value("mouse_xpos") ypos = web.get_form_value("mouse_ypos") file_code = web.get_form_value("file_code") user = web.get_user_name() annotate = SObject("sthpw/annotation") annotate.set_value("message", annotate_msg) annotate.set_value("xpos", xpos) annotate.set_value("ypos", ypos) annotate.set_value("login", user) annotate.set_value("file_code", file_code) annotate.commit() self.description = "Added annotation '%s'" % annotate_msg
def get_display(self): sobject = self.get_current_sobject() column = self.kwargs.get('column') if column: name = column else: name = self.get_name() value = self.get_value(name=name) empty = self.get_option("empty") if empty and self.is_editable() and not value: from pyasm.web import SpanWdg div = DivWdg() div.add_style("text-align: center") div.add_style("width: 100%") div.add_style("white-space: nowrap") if empty in [True, 'true']: div.add("--Select--") div.add_style("opacity: 0.5") return div if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type in ["timestamp", "time"] or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to user timezone if not SObject.is_day_column(name): date = self.get_timezone_value(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) #else: # value_wdg.add_style("float: right") # value_wdg.add_style("padding-right: 3px") if sobject and SearchType.column_exists(sobject.get_search_type(), name): value_wdg = DivWdg() self.add_value_update(value_wdg, sobject, name) # don't call str() to prevent utf-8 encode error value_wdg.add(value) value_wdg.add_style("overflow-x: hidden") value_wdg.add_style("text-overflow: ellipsis") # sompe properties min_height = 25 value_wdg.add_style("min-height: %spx" % min_height) single_line = self.get_option("single_line") or False if single_line in ["true", True]: value_wdg.add_style("line-height: %spx" % min_height) value_wdg.add_style("white-space: nowrap") #value_wdg.add_style("overflow-y: hidden") #value_wdg.add_class("spt_scrollable") #value_wdg.add_attr("title", value) link_expression = self.get_option("link_expression") if link_expression: value_wdg.add_class("tactic_new_tab") value_wdg.add_style("display: inline-block") value_wdg.add_attr("search_key", sobject.get_search_key()) value_wdg.add_style("text-decoration: underline") #value_wdg.add_attr("spt_class_name", "tactic.ui.tools.SObjectDetailWdg") value_wdg.add_class("hand") return value_wdg return value
def get_display(my): ''' this does not run do_search''' search_type = my.options.get("search_type") if search_type: my.search_type = search_type view = my.options.get("view") if view: my.view = view search_type = my.search_type pipeline_name = my.pipeline_name sobject_filter = my.sobject_filter assert search_type != None search = Search(search_type) widget = Widget() div = FilterboxWdg() widget.add(div) my.process_filter = ProcessFilterSelectWdg(label="Process: ", \ search_type=search_type, css='med', name=my.process_filter_name) my.process_filter.set_submit_onchange(False) # get all of the sobjects related to this task taskless_filter = FilterCheckboxWdg('show_taskless_assets', \ label='Show Taskless Assets', css='small') taskless_filter.set_submit_onchange(False) # add in the asset filter if sobject_filter: sobject_filter.alter_search(search) div.add(sobject_filter) # append the process filter and user filter div.add_advanced_filter(my.process_filter) # add a hint hint = HintWdg('You can select a single process or the << heading >> '\ 'which will select the group of processes it contains.') div.add_advanced_filter(hint) if UserFilterWdg.has_restriction(): user = Environment.get_user_name() my.user_filter = HiddenWdg('user_filter', user) my.user_filter.set_persistence() else: # it has a special colunn 'assigned' my.user_filter = UserFilterWdg(['user_filter', 'Assigned: ']) my.user_filter.set_search_column('assigned') div.add_advanced_filter(my.user_filter) # add the show assets with no task option div.add_advanced_filter(taskless_filter) # add the task filter my.task_status_filter = TaskStatusFilterWdg() div.add_advanced_filter(my.task_status_filter) div.add_advanced_filter(HtmlElement.br()) if search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) shot_statuses = shot_filter.get_statuses() if shot_statuses: search.add_filters("status", shot_filter.get_values()) # add search limit search_limit = SearchLimitWdg() search_limit.set_limit(my.search_limit) div.add_bottom(search_limit) # only show shots that match the task filter if not taskless_filter.is_checked(False): # filter out the tasks search.add_column("id") tmp_sobjects = search.get_sobjects() sobjects = [] if tmp_sobjects: # get all of the sobject ids corresponding to these tasks tasks = my.get_tasks(tmp_sobjects) sobject_ids = SObject.get_values(tasks, "search_id", unique=True) search = Search(search_type) search.add_filters("id", sobject_ids) search_limit.alter_search(search) sobjects = search.get_sobjects() else: search_limit.alter_search(search) tmp_sobjects = search.get_sobjects() sobjects = tmp_sobjects table = TableWdg(search_type, my.view) widget.add(HtmlElement.br()) table.set_sobjects(sobjects) widget.add(table) return widget
def get_display(my): my.search_type = my.kwargs.get('search_type') my.texture_search_type = my.kwargs.get('texture_search_type') assert my.search_type app_name = WebContainer.get_web().get_selected_app() # add an outside box top = DivWdg(css='spt_view_panel') #div = DivWdg(css="maq_search_bar") div = DivWdg() div.add_color("background", "background2", -15) my.set_as_panel(top) top.add(div) div.add_style("margin: 5px") div.add_style("padding: 10px") div.add_style("font-style: bold") process_div = DivWdg() process_div.add_style("padding-left: 10px") div.add(process_div) process_div.add(my.get_process_wdg(my.search_type)) process_div.add(my.get_context_filter_wdg()) process_div.add(HtmlElement.br(clear="all")) div.add(HtmlElement.br()) checkin_options = DivWdg(css='spt_ui_options') checkin_options.add_style("padding: 10px") swap = SwapDisplayWdg() #swap.set_off() title = SpanWdg("Check in Options") SwapDisplayWdg.create_swap_title(title, swap, checkin_options, is_open=False) div.add(swap) div.add(title) checkin_options.add(my.get_file_type_wdg()) checkin_options.add(my.get_snapshot_type_wdg()) checkin_options.add(HtmlElement.br(1)) checkin_options.add(my.get_export_method_wdg()) checkin_options.add(my.get_checkin_as_wdg()) #my.add( my.get_render_icon_wdg() ) # For different export methods checkin_options.add(my.get_currency_wdg()) checkin_options.add(my.get_reference_option()) checkin_options.add(my.get_auto_version_wdg()) checkin_options.add(my.get_texture_option(app=app_name)) checkin_options.add(my.get_handoff_wdg()) if not my.context_select.get_value(for_display=True): my.add(DivWdg('A context must be selected.', css='warning')) return div.add(checkin_options) top.add(my.get_introspect_wdg()) top.add(HtmlElement.br(2)) # create the interface table = Table() table.set_max_width() #table.set_class("table") table.add_color('background', 'background2') #table.add_style('line-height','3.0em') #table.add_row(css='smaller') tr = table.add_row(css='smaller') tr.add_style('height', '3.5em') table.add_header(" ") table.add_header(" ") th = table.add_header("Instance") th.add_style('text-align: left') table.add_header(my.get_checkin()) table.add_header("Sandbox") tr.add_color('background', 'background2', -15) # get session and handle case where there is no session my.session = SessionContents.get() if my.session == None: instance_names = [] asset_codes = [] node_names = [] else: instance_names = my.session.get_instance_names() asset_codes = my.session.get_asset_codes() node_names = my.session.get_node_names() # get all of the possible assets based on the asset codes search = Search(my.search_type) search.add_filters("code", asset_codes) assets = search.get_sobjects() assets_dict = SObject.get_dict(assets, ["code"]) if my.session: my.add("Current Project: <b>%s</b>" % my.session.get_project_dir()) else: my.add("Current Project: Please press 'Introspect'") count = 0 for i in range(0, len(node_names)): node_name = node_names[i] if not my.session.is_tactic_node(node_name) and \ not my.session.get_node_type(node_name) in ['transform','objectSet']: continue instance_name = instance_names[i] # backwards compatible: try: asset_code = asset_codes[i] except IndexError, e: asset_code = instance_name # skip if this is a reference if my.list_references == False and \ my.session.is_reference(node_name): continue table.add_row() # check that this asset exists asset = assets_dict.get(asset_code) if not asset: continue # list items if it is a set if asset.get_value('asset_type', no_exception=True) in ["set", "section"]: my.current_sobject = asset my.handle_set(table, instance_name, asset, instance_names) count += 1 # if this asset is in the database, then allow it to checked in if asset: if my.session.get_snapshot_code(instance_name, snapshot_type='set'): continue # hack remember this my.current_sobject = asset my.handle_instance(table, instance_name, asset, node_name) else: table.add_blank_cell() table.add_cell(instance_name) count += 1
continue elif len(search_types) == 1: search_type = my.sobjects[0].get_value("search_type") try: if search_codes != None: ref_sobjs = Search.get_by_code(search_type, search_codes) else: ref_sobjs = Search.get_by_id(search_type, search_ids) except SearchException, e: # skips unknown search_type/project print e.__str__() pass # TODO: None defaults to search_key, should be empty my.ref_sobj_dict = SObject.get_dict(ref_sobjs, None) # when drawn as part of a TbodyWdg, we want to disable the calculation # of most things so that it will not try to display a prev row if my.get_option('disable') == 'true': my.ref_sobj_dict = None my.empty = True my.is_preprocessed = True #def handle_td(my, td): # td.add_class("task_spacer_column") # td.add_style("font-weight: bold") # if my.empty: # td.add_style("border-top: 0px")
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values()) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value( True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values()) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add(HtmlElement.br()) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name, value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def get_mail_users(my, column): # mail groups recipients = set() expr = my.notification.get_value(column, no_exception=True) if expr: sudo = Sudo() # Introduce an environment that can be reflected env = {'sobject': my.sobject} #if expr.startswith("@"): # logins = Search.eval(expr, list=True, env_sobjects=env) #else: parts = expr.split("\n") # go through each login and evaluate each logins = [] for part in parts: if part.startswith("@") or part.startswith("{"): results = Search.eval(part, list=True, env_sobjects=env) # clear the container after each expression eval ExpressionParser.clear_cache() # these can just be login names, get the actual Logins if results: if isinstance(results[0], basestring): login_sobjs = Search.eval( "@SOBJECT(sthpw/login['login','in','%s'])" % '|'.join(results), list=True) login_list = SObject.get_values( login_sobjs, 'login') for result in results: # the original result could be an email address already if result not in login_list: logins.append(result) if login_sobjs: logins.extend(login_sobjs) else: logins.extend(results) elif part.find("@") != -1: # this is just an email address logins.append(part) elif part: # this is a group group = LoginGroup.get_by_code(part) if group: logins.extend(group.get_logins()) del sudo else: notification_id = my.notification.get_id() logins = GroupNotification.get_logins_by_id(notification_id) for login in logins: recipients.add(login) return recipients
def get_display(my): my.search_type = my.kwargs.get('search_type') my.texture_search_type = my.kwargs.get('texture_search_type') assert my.search_type app_name = WebContainer.get_web().get_selected_app() # add an outside box top = DivWdg(css='spt_view_panel') #div = DivWdg(css="maq_search_bar") div = DivWdg() div.add_color("background", "background2", -15) my.set_as_panel(top) top.add(div) div.add_style("margin: 5px") div.add_style("padding: 10px") div.add_style("font-style: bold") process_div = DivWdg() process_div.add_style("padding-left: 10px") div.add(process_div) process_div.add( my.get_process_wdg(my.search_type)) process_div.add( my.get_context_filter_wdg() ) process_div.add(HtmlElement.br(clear="all")) div.add( HtmlElement.br() ) checkin_options = DivWdg(css='spt_ui_options') checkin_options.add_style("padding: 10px") swap = SwapDisplayWdg() #swap.set_off() title = SpanWdg("Check in Options") SwapDisplayWdg.create_swap_title(title, swap, checkin_options, is_open=False) div.add(swap) div.add(title) checkin_options.add( my.get_file_type_wdg() ) checkin_options.add( my.get_snapshot_type_wdg() ) checkin_options.add(HtmlElement.br(1)) checkin_options.add( my.get_export_method_wdg() ) checkin_options.add( my.get_checkin_as_wdg() ) #my.add( my.get_render_icon_wdg() ) # For different export methods checkin_options.add( my.get_currency_wdg() ) checkin_options.add( my.get_reference_option()) checkin_options.add( my.get_auto_version_wdg()) checkin_options.add( my.get_texture_option(app=app_name)) checkin_options.add( my.get_handoff_wdg()) if not my.context_select.get_value(for_display=True): my.add(DivWdg('A context must be selected.', css='warning')) return div.add(checkin_options) top.add( my.get_introspect_wdg() ) top.add(HtmlElement.br(2)) # create the interface table = Table() table.set_max_width() #table.set_class("table") table.add_color('background','background2') #table.add_style('line-height','3.0em') #table.add_row(css='smaller') tr = table.add_row(css='smaller') tr.add_style('height', '3.5em') table.add_header(" ") table.add_header(" ") th = table.add_header("Instance") th.add_style('text-align: left') table.add_header(my.get_checkin()) table.add_header("Sandbox") tr.add_color('background','background2', -15) # get session and handle case where there is no session my.session = SessionContents.get() if my.session == None: instance_names = [] asset_codes = [] node_names = [] else: instance_names = my.session.get_instance_names() asset_codes = my.session.get_asset_codes() node_names = my.session.get_node_names() # get all of the possible assets based on the asset codes search = Search(my.search_type) search.add_filters("code", asset_codes) assets = search.get_sobjects() assets_dict = SObject.get_dict(assets, ["code"]) if my.session: my.add("Current Project: <b>%s</b>" % my.session.get_project_dir() ) else: my.add("Current Project: Please press 'Introspect'") count = 0 for i in range(0, len(node_names) ): node_name = node_names[i] if not my.session.is_tactic_node(node_name) and \ not my.session.get_node_type(node_name) in ['transform','objectSet']: continue instance_name = instance_names[i] # backwards compatible: try: asset_code = asset_codes[i] except IndexError, e: asset_code = instance_name # skip if this is a reference if my.list_references == False and \ my.session.is_reference(node_name): continue table.add_row() # check that this asset exists asset = assets_dict.get(asset_code) if not asset: continue # list items if it is a set if asset.get_value('asset_type', no_exception=True) in ["set", "section"]: my.current_sobject = asset my.handle_set( table, instance_name, asset, instance_names) count +=1 # if this asset is in the database, then allow it to checked in if asset: if my.session.get_snapshot_code(instance_name, snapshot_type='set'): continue # hack remember this my.current_sobject = asset my.handle_instance(table, instance_name, asset, node_name) else: table.add_blank_cell() table.add_cell(instance_name) count += 1
def update_process_table(my, search_type=None): ''' make sure to update process table''' template = my.get_template_pipeline() if template: if template.get_code() == my.get_code(): template_processes = [] else: template_processes = template.get_process_names() else: template_processes = [] process_names = my.get_process_names() pipeline_code = my.get_code() search = Search("config/process") search.add_filter("pipeline_code", pipeline_code) process_sobjs = search.get_sobjects() existing_names = SObject.get_values(process_sobjs, 'process') pipeline_has_updates = False count = 1 for process_name in process_names: exists = False for process_sobj in process_sobjs: # if it already exist, then update if process_sobj.get_value("process") == process_name: exists = True break if not exists: process_sobj = SearchType.create("config/process") # default to (main) for non-task status pipeline if search_type and search_type != 'sthpw/task': process_sobj.set_value('subcontext_options', '(main)') process_sobj.set_value("pipeline_code", pipeline_code) process_sobj.set_value("process", process_name) # copy information over from the template if process_name in template_processes: template_attrs = template.get_process_attrs(process_name) process = my.get_process(process_name) for name, value in template_attrs.items(): if name in ['xpos', 'ypos', 'name']: continue process.set_attribute(name, value) pipeline_has_updates = True search = Search("config/process") search.add_filter("process", process_name) # NEED ANOTHER FILTER for templates here search.add_filter("pipeline_code", "%/__TEMPLATE__", op="like") # copy certain values from the template template_process = search.get_sobject() for name, value in template_process.get_data().items(): if not value: continue if name in ['checkin_mode']: process_sobj.set_value(name, value) attrs = my.get_process_attrs(process_name) color = attrs.get('color') if color: process_sobj.set_value("color", color) process_sobj.set_value("sort_order", count) process_sobj.commit() count += 1 if pipeline_has_updates: my.set_value("pipeline", my.get_pipeline_xml().to_string()) my.commit() # delete obsolete obsolete = set(existing_names) - set(process_names) if obsolete: for obsolete_name in obsolete: for process_sobj in process_sobjs: # delete it if process_sobj.get_value("process") == obsolete_name: process_sobj.delete() break
def execute(my): date = Date() cur_time = date.get_utc() print "Burn down" #first = 8 * 60 * 60 first = 30 next = 10 # search for all of the tasks that are pending search = Search("sthpw/task") search.add_filter("status", "Pending") sobjects = search.get_sobjects() # get the time when this was set to pending search = Search("sthpw/status_log") search.add_filter("from_status", "Assignment") search.add_filter("to_status", "Pending") logs = search.get_sobjects() logs_dict = SObject.get_dict(logs, ["search_type", "search_id"] ) # analyze tasks ready_sobjects = [] for sobject in sobjects: search_key = sobject.get_search_key() # get the logs log = logs_dict.get(search_key) if not log: continue log_date = Date(db=log.get_value("timestamp")) log_time = log_date.get_utc() interval = cur_time - log_time # if we haven't passed the first marker, then just skip if interval < first: continue # put an upper limit where it doesn't make anymore sense if interval > 21*24*60*60: continue # once we've reached the first marker, email next interval start = (interval - first) / next print "start: ", interval, first, start continue parent = sobject.get_parent() if not parent: print "WARNING: parent does not exist [%s]" % sobject.get_search_key() continue process = sobject.get_value("process") assigned = sobject.get_value("assigned") status = sobject.get_value("status") code = parent.get_code() print (code, assigned, process, status, interval/3600) ready_sobjects.append( sobject ) # TODO: problem how to prevent emails from happening every iteration? # this is run every minute, so remember the last time an email has been # sent for a particular if not ready_sobjects: return from pyasm.command import Command class BurnDownCmd(Command): def get_title(my): return "Burn Down Command" def set_sobjects(my, sobjects): my.sobjects = [sobject] def execute(my): # call email trigger from email_trigger import EmailTrigger email_trigger = EmailTrigger() email_trigger.set_command(my) email_trigger.execute() # call email trigger #cmd = BurnDownCmd() #cmd.set_sobjects(ready_sobjects) #Command.execute_cmd(cmd) # remember the time of each email for sobject in ready_sobjects: search_key = sobject.get_search_key() my.notified[search_key] = cur_time
def preprocess(self): # protect against the case where there is a single sobject that # is an insert (often seen in "insert") if self.is_preprocessed == True: return skip = False if len(self.sobjects) == 1: if not self.sobjects[0].has_value("search_type"): skip = True if not skip: search_types = SObject.get_values(self.sobjects, 'search_type', unique=True) try: search_codes = SObject.get_values(self.sobjects, 'search_code', unique=True) search_ids = None except Exception as e: print "WARNING: ", e search_ids = SObject.get_values(self.sobjects, 'search_id', unique=True) search_codes = None else: search_types = [] search_codes = [] # if there is more than one search_type, then go get each parent # individually # NOTE: this is very slow!!!! ref_sobjs = [] if len(search_types) > 1: ref_sobjs = [] for tmp_sobj in self.sobjects: try: ref_sobj = tmp_sobj.get_parent() if ref_sobj: ref_sobjs.append(ref_sobj) else: warning = "Dangling reference: %s" % tmp_sobj.get_search_key( ) Environment.add_warning(warning, warning) except SearchException as e: # skips unknown search_type/project print e.__str__() continue elif len(search_types) == 1: search_type = self.sobjects[0].get_value("search_type") try: if search_codes != None: ref_sobjs = Search.get_by_code(search_type, search_codes) else: ref_sobjs = Search.get_by_id(search_type, search_ids) except SearchException as e: # skips unknown search_type/project print e.__str__() pass # TODO: None defaults to search_key, should be empty self.ref_sobj_dict = SObject.get_dict(ref_sobjs, None) # when drawn as part of a TbodyWdg, we want to disable the calculation # of most things so that it will not try to display a prev row if self.get_option('disable') == 'true': self.ref_sobj_dict = None self.empty = True self.is_preprocessed = True
def preprocess(my): '''determine if this is for EditWdg or EDIT ROW of a table''' # get the number of task pipelines needed for EditWdg, which is one # for the EDIT ROW , there could be more than 1 my.task_mapping = None from tactic.ui.panel import EditWdg if hasattr(my, 'parent_wdg') and isinstance(my.get_parent_wdg(), EditWdg): task = my.get_current_sobject() task_pipe_code = task.get_value('pipeline_code') # if the current task has no pipeline, then search for # any task pipeline if not task_pipe_code: # just use the default task_pipe_code = 'task' pipeline = Pipeline.get_by_code(task_pipe_code) if not pipeline: pipeline = Pipeline.get_by_code('task') my.task_pipelines = [pipeline] else: # get all of the pipelines for tasks search = Search('sthpw/pipeline') search.add_regex_filter('search_type', 'sthpw/task') my.task_pipelines = search.get_sobjects() # get all of the pipelines for the current search_type search_type = my.state.get("search_type"); search = Search('sthpw/pipeline') if search_type: search.add_filter('search_type', search_type) my.sobject_pipelines = search.get_sobjects() # insert the default task pipeline if not overridden in the db default_task_found = False pipeline_codes = SObject.get_values(my.task_pipelines, 'code') if 'task' in pipeline_codes: default_task_found = True if not default_task_found: default_pipe = Pipeline.get_by_code('task') my.task_pipelines.append(default_pipe) my.task_mapping = {} # the following works for insert but on edit, it should read from pipeline_code attribute for pipeline in my.sobject_pipelines: processes = pipeline.get_process_names() for process in processes: attrs = pipeline.get_process_attrs(process) task_pipeline = attrs.get('task_pipeline') if task_pipeline: key = '%s|%s' %(pipeline.get_code(), process) my.task_mapping[key] = task_pipeline #my.task_mapping = "|".join(my.task_mapping) my.is_preprocess = True
def execute(my): error_list = [] from pyasm.biz import Project Project.clear_cache() sthpw_search = Search("sthpw/project") sthpw_search.add_filter('code','sthpw') sthpw_search.set_show_retired(True) sthpw_proj = sthpw_search.get_sobject() search = Search("sthpw/project") if my.project_code: search.add_filter("code", my.project_code) else: #search.add_enum_order_by("type", ['sthpw','prod','game','design','simple', 'unittest']) search.add_enum_order_by("code", ['sthpw']) projects = search.get_sobjects() project_codes = SObject.get_values(projects, 'code') # append sthpw project in case it's retired if 'sthpw' not in project_codes and sthpw_proj: if not my.project_code: projects.insert(0, sthpw_proj) sthpw_proj.reactivate() current_dir = os.getcwd() tmp_dir = Environment.get_tmp_dir() output_file = '%s/upgrade_output.txt' % tmp_dir if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) elif os.path.exists(output_file): os.unlink(output_file) ofile = open(output_file, 'w') import datetime ofile.write('Upgrade Time: %s\n\n' %datetime.datetime.now()) # dynamically generate #sql = DbContainer.get(code) database_type = Sql.get_default_database_type() #if database_type in ['Sqlite', 'MySQL']: if database_type != "PostgreSQL": # general an upgrade import imp namespaces = ['default', 'simple', 'sthpw', 'config'] for namespace in namespaces: if database_type == 'Sqlite': from pyasm.search.upgrade.sqlite import convert_sqlite_upgrade file_path = convert_sqlite_upgrade(namespace) elif database_type == 'MySQL': from pyasm.search.upgrade.mysql import convert_mysql_upgrade file_path = convert_mysql_upgrade(namespace) elif database_type == 'SQLServer': from pyasm.search.upgrade.sqlserver import convert_sqlserver_upgrade file_path = convert_sqlserver_upgrade(namespace) elif database_type == 'Oracle': file_path = convert_oracle_upgrade(namespace) else: raise Exception("Database type not implemented here") (path, name) = os.path.split(file_path) (name, ext) = os.path.splitext(name) (file, filename, data) = imp.find_module(name, [path]) module = imp.load_module(name, file, filename, data) class_name = "%s%sUpgrade" % (database_type,namespace.capitalize()) exec("%s = module.%s" % (class_name, class_name) ) # load all the default modules from pyasm.search.upgrade.project import * for project in projects: code = project.get_code() if code == "sthpw": type = "sthpw" else: type = project.get_type() if not type: type = 'default' if not my.quiet: print project.get_code(), type print "-"*30 # if the project is admin, the just ignore for now if code == 'admin': continue if not project.database_exists(): ofile.write("*" * 80 + '\n') msg = "Project [%s] does not have a database\n"% project.get_code() ofile.write(msg) print msg ofile.write("*" * 80 + '\n\n') continue upgrade = None if database_type != 'PostgreSQL': upgrade_class = "%s%sUpgrade" % (database_type, type.capitalize()) conf_upgrade = eval("%sConfigUpgrade()" % database_type) else: upgrade_class = "%sUpgrade" % type.capitalize() conf_upgrade = eval("ConfigUpgrade()") upgrade = eval("%s()" % upgrade_class) # upgrade config (done for every project but sthpw) conf_upgrade.set_project(project.get_code()) conf_upgrade.set_to_version(my.to_version) conf_upgrade.set_forced(my.is_forced) conf_upgrade.set_quiet(my.quiet) conf_upgrade.set_confirmed(my.is_confirmed) conf_upgrade.execute() # append the errors for each upgrade key = '%s|%s' %(project.get_code(), conf_upgrade.__class__.__name__) error_list.append((conf_upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) # perform the upgrade to the other tables if upgrade: upgrade.set_project(project.get_code() ) upgrade.set_to_version(my.to_version) upgrade.set_forced(my.is_forced) upgrade.set_quiet(my.quiet) upgrade.set_confirmed(my.is_confirmed) #Command.execute_cmd(upgrade) # put each upgrade function in its own transaction # carried out in BaseUpgrade upgrade.execute() # append the errors for each upgrade key = '%s|%s' %(project.get_code(), upgrade.__class__.__name__) error_list.append((upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) from pyasm.search import DatabaseImpl project.set_value("last_db_update", DatabaseImpl.get().get_timestamp_now(), quoted=False) if project.has_value('last_version_update'): last_version = project.get_value('last_version_update') if my.to_version > last_version: project.set_value("last_version_update", my.to_version) else: # it should be getting the upgrade now, redo the search print "Please run upgrade_db.py again, the sthpw db has just been updated" return project.commit(triggers=False) # print the errors for each upgrade for cls_name, project_code, errors in error_list: if not my.quiet: print print "Errors for %s [%s]:" %(project_code, cls_name) ofile.write("Errors for %s [%s]:\n" %(project_code, cls_name)) if not my.quiet: print "*" * 80 ofile.write("*" * 80 + '\n') for func, error in errors: if not my.quiet: print '[%s]' % func print "-" * 70 print error ofile.write('[%s]\n' % func) ofile.write("-" * 70 + '\n') ofile.write('%s\n' %error) ofile.close() if my.quiet: print "Please refer to the upgrade_output.txt file for any upgrade messages." print # handle sthpw database separately. This ensures that the project entry # gets created if none exists. #print "sthpw" #print "-"*30 #upgrade = SthpwUpgrade() #upgrade.set_project("sthpw") #Command.execute_cmd(upgrade) # update the template zip files data_dir = Environment.get_data_dir(manual=False) dest_dir = '%s/templates' %data_dir if os.path.exists(dest_dir): install_dir = Environment.get_install_dir() src_code_template_dir = '%s/src/install/start/templates' %install_dir if os.path.exists(src_code_template_dir): zip_files = os.listdir(src_code_template_dir) io_errors = False for zip_file in zip_files: if not zip_file.endswith(".zip"): continue try: src_file = '%s/%s' %(src_code_template_dir, zip_file) dest_file = '%s/%s' %(dest_dir, zip_file) shutil.copyfile(src_file, dest_file) except IOError, e: print e io_errors = True if not io_errors: print "Default project template files have been updated." else: print "There was a problem copying the default template files to <TACTIC_DATA_DIR>/templates."
def get_connected_sobjects(cls, sobjects, direction="dst", order_by=None, context='', filters=None): '''get the sobjects that are connect to this sobject.''' unique_stype = False single_sobject = False src_search_types = [] src_search_ids = [] if not sobjects: return [], [] if isinstance(sobjects, list): search_types = [x.get_search_type() for x in sobjects] search_ids = [x.get_id() for x in sobjects] if len(Common.get_unique_list(search_types)) == 1: unique_stype = True else: search_types = [sobjects.get_search_type()] search_ids = [sobjects.get_id()] unique_stype = True single_sobject = True if direction == "dst": prefixA = "dst" prefixB = "src" else: prefixA = "src" prefixB = "dst" connections = [] if unique_stype: search = Search(SObjectConnection) search.add_filter("%s_search_type" % prefixA, search_types[0] ) search.add_filters("%s_search_id" % prefixA, search_ids ) if context: search.add_filter("context", context) if order_by: search.add_order_by(order_by) key = search.get_statement() cache = Container.get("SObjectConnection:cache") if cache == None: cache = {} Container.put("SObjectConnection:cache", cache) ret_val = cache.get(key) if ret_val != None: return ret_val connections = search.get_sobjects() """ new_search = Search(src_search_types[0]) select = new_search.get_select() from_table = new_search.get_table() to_table = 'connection' from_col = 'id' to_col = 'src_search_id' select.add_join(from_table, to_table, from_col, to_col, join='INNER', database2='sthpw') """ else: raise TacticException('Only unique stypes are supported for the passed in sobjects') src_sobjects = [] src_stype = None src_stypes = SObject.get_values(connections, "%s_search_type" % prefixB, unique=True) src_ids = SObject.get_values(connections, "%s_search_id" % prefixB, unique=True) if len(src_stypes) == 1: src_stype = src_stypes[0] if src_stype: single_src_ids = len(src_ids) == 1 if not filters and single_src_ids: src = Search.get_by_id(src_stype, src_ids[0]) src_sobjects.append(src) else: new_search = Search(src_stype) if single_src_ids: new_search.add_filter('id', src_ids[0]) else: new_search.add_filters('id', src_ids) if filters: new_search.add_op_filters(filters) src_sobjects = new_search.get_sobjects() else: for connection in connections: src_search_type = connection.get_value("%s_search_type" % prefixB) src_search_id = connection.get_value("%s_search_id" % prefixB) # TODO: this could be made faster because often, these will be # of the same stype if not filters: src = Search.get_by_id(src_search_type, src_search_id) else: src_search = Search(src_search_type) src_search.add_filter("id", src_search_id) src_search.add_op_filters(filters) src = src_search.get_sobject() #if not src.is_retired(): # don't check for retired here. check it at the caller for # css manipulation if src: src_sobjects.append(src) else: print "WARNING: connection sobject does not exist .. deleting" connection.delete() cache[key] = connections, src_sobjects return connections, src_sobjects
def execute(my): error_list = [] from pyasm.biz import Project Project.clear_cache() sthpw_search = Search("sthpw/project") sthpw_search.add_filter('code', 'sthpw') sthpw_search.set_show_retired(True) sthpw_proj = sthpw_search.get_sobject() search = Search("sthpw/project") if my.project_code: search.add_filter("code", my.project_code) else: #search.add_enum_order_by("type", ['sthpw','prod','game','design','simple', 'unittest']) search.add_enum_order_by("code", ['sthpw']) projects = search.get_sobjects() project_codes = SObject.get_values(projects, 'code') # append sthpw project in case it's retired if 'sthpw' not in project_codes and sthpw_proj: if not my.project_code: projects.insert(0, sthpw_proj) sthpw_proj.reactivate() current_dir = os.getcwd() tmp_dir = Environment.get_tmp_dir() output_file = '%s/upgrade_output.txt' % tmp_dir if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) elif os.path.exists(output_file): os.unlink(output_file) ofile = open(output_file, 'w') import datetime ofile.write('Upgrade Time: %s\n\n' % datetime.datetime.now()) # dynamically generate #sql = DbContainer.get(code) database_type = Sql.get_default_database_type() #if database_type in ['Sqlite', 'MySQL']: if database_type != "PostgreSQL": # general an upgrade import imp namespaces = ['default', 'simple', 'sthpw', 'config'] for namespace in namespaces: if database_type == 'Sqlite': from pyasm.search.upgrade.sqlite import convert_sqlite_upgrade file_path = convert_sqlite_upgrade(namespace) elif database_type == 'MySQL': from pyasm.search.upgrade.mysql import convert_mysql_upgrade file_path = convert_mysql_upgrade(namespace) elif database_type == 'SQLServer': from pyasm.search.upgrade.sqlserver import convert_sqlserver_upgrade file_path = convert_sqlserver_upgrade(namespace) elif database_type == 'Oracle': file_path = convert_oracle_upgrade(namespace) else: raise Exception("Database type not implemented here") (path, name) = os.path.split(file_path) (name, ext) = os.path.splitext(name) (file, filename, data) = imp.find_module(name, [path]) module = imp.load_module(name, file, filename, data) class_name = "%s%sUpgrade" % (database_type, namespace.capitalize()) exec("%s = module.%s" % (class_name, class_name)) # load all the default modules from pyasm.search.upgrade.project import * for project in projects: code = project.get_code() if code == "sthpw": type = "sthpw" else: type = project.get_type() if not type: type = 'default' if not my.quiet: print project.get_code(), type print "-" * 30 # if the project is admin, the just ignore for now if code == 'admin': continue if not project.database_exists(): ofile.write("*" * 80 + '\n') msg = "Project [%s] does not have a database\n" % project.get_code( ) ofile.write(msg) print msg ofile.write("*" * 80 + '\n\n') continue upgrade = None if database_type != 'PostgreSQL': upgrade_class = "%s%sUpgrade" % (database_type, type.capitalize()) conf_upgrade = eval("%sConfigUpgrade()" % database_type) else: upgrade_class = "%sUpgrade" % type.capitalize() conf_upgrade = eval("ConfigUpgrade()") upgrade = eval("%s()" % upgrade_class) # upgrade config (done for every project but sthpw) conf_upgrade.set_project(project.get_code()) conf_upgrade.set_to_version(my.to_version) conf_upgrade.set_forced(my.is_forced) conf_upgrade.set_quiet(my.quiet) conf_upgrade.set_confirmed(my.is_confirmed) conf_upgrade.execute() # append the errors for each upgrade key = '%s|%s' % (project.get_code(), conf_upgrade.__class__.__name__) error_list.append((conf_upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) # perform the upgrade to the other tables if upgrade: upgrade.set_project(project.get_code()) upgrade.set_to_version(my.to_version) upgrade.set_forced(my.is_forced) upgrade.set_quiet(my.quiet) upgrade.set_confirmed(my.is_confirmed) #Command.execute_cmd(upgrade) # put each upgrade function in its own transaction # carried out in BaseUpgrade upgrade.execute() # append the errors for each upgrade key = '%s|%s' % (project.get_code(), upgrade.__class__.__name__) error_list.append((upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) from pyasm.search import DatabaseImpl project.set_value("last_db_update", DatabaseImpl.get().get_timestamp_now(), quoted=False) if project.has_value('last_version_update'): last_version = project.get_value('last_version_update') if my.to_version > last_version: project.set_value("last_version_update", my.to_version) else: # it should be getting the upgrade now, redo the search print "Please run upgrade_db.py again, the sthpw db has just been updated" return project.commit(triggers=False) # print the errors for each upgrade for cls_name, project_code, errors in error_list: if not my.quiet: print print "Errors for %s [%s]:" % (project_code, cls_name) ofile.write("Errors for %s [%s]:\n" % (project_code, cls_name)) if not my.quiet: print "*" * 80 ofile.write("*" * 80 + '\n') for func, error in errors: if not my.quiet: print '[%s]' % func print "-" * 70 print error ofile.write('[%s]\n' % func) ofile.write("-" * 70 + '\n') ofile.write('%s\n' % error) ofile.close() if my.quiet: print "Please refer to the file [%s] for any upgrade messages." % output_file print # handle sthpw database separately. This ensures that the project entry # gets created if none exists. #print "sthpw" #print "-"*30 #upgrade = SthpwUpgrade() #upgrade.set_project("sthpw") #Command.execute_cmd(upgrade) # update the template zip files data_dir = Environment.get_data_dir(manual=False) dest_dir = '%s/templates' % data_dir if os.path.exists(dest_dir): install_dir = Environment.get_install_dir() src_code_template_dir = '%s/src/install/start/templates' % install_dir if os.path.exists(src_code_template_dir): zip_files = os.listdir(src_code_template_dir) io_errors = False for zip_file in zip_files: if not zip_file.endswith(".zip"): continue try: src_file = '%s/%s' % (src_code_template_dir, zip_file) dest_file = '%s/%s' % (dest_dir, zip_file) shutil.copyfile(src_file, dest_file) except IOError, e: print e io_errors = True if not io_errors: print "Default project template files have been updated." else: print "There was a problem copying the default template files to <TACTIC_DATA_DIR>/templates."
def preprocess(self): # protect against the case where there is a single sobject that # is an insert (often seen in "insert") if self.is_preprocessed == True: return skip = False if len(self.sobjects) == 1: if not self.sobjects[0].has_value("search_type"): skip = True if not skip: search_types = SObject.get_values(self.sobjects, 'search_type', unique=True) try: search_codes = SObject.get_values(self.sobjects, 'search_code', unique=True) search_ids = None except Exception as e: print "WARNING: ", e search_ids = SObject.get_values(self.sobjects, 'search_id', unique=True) search_codes = None else: search_types = [] search_codes = [] # if there is more than one search_type, then go get each parent # individually # NOTE: this is very slow!!!! ref_sobjs = [] if len(search_types) > 1: ref_sobjs = [] for tmp_sobj in self.sobjects: try: ref_sobj = tmp_sobj.get_parent() if ref_sobj: ref_sobjs.append(ref_sobj) else: warning = "Dangling reference: %s" % tmp_sobj.get_search_key() Environment.add_warning(warning, warning) except SearchException as e: # skips unknown search_type/project print e.__str__() continue elif len(search_types) == 1: search_type = self.sobjects[0].get_value("search_type") try: if search_codes != None: ref_sobjs = Search.get_by_code(search_type, search_codes) else: ref_sobjs = Search.get_by_id(search_type, search_ids) except SearchException as e: # skips unknown search_type/project print e.__str__() pass # TODO: None defaults to search_key, should be empty self.ref_sobj_dict = SObject.get_dict(ref_sobjs, None) # when drawn as part of a TbodyWdg, we want to disable the calculation # of most things so that it will not try to display a prev row if self.get_option('disable') == 'true': self.ref_sobj_dict = None self.empty = True self.is_preprocessed = True
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values() ) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids ) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values() ) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add( HtmlElement.br() ) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name,value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def execute(my): date = Date() cur_time = date.get_utc() print "Burn down" #first = 8 * 60 * 60 first = 30 next = 10 # search for all of the tasks that are pending search = Search("sthpw/task") search.add_filter("status", "Pending") sobjects = search.get_sobjects() # get the time when this was set to pending search = Search("sthpw/status_log") search.add_filter("from_status", "Assignment") search.add_filter("to_status", "Pending") logs = search.get_sobjects() logs_dict = SObject.get_dict(logs, ["search_type", "search_id"]) # analyze tasks ready_sobjects = [] for sobject in sobjects: search_key = sobject.get_search_key() # get the logs log = logs_dict.get(search_key) if not log: continue log_date = Date(db=log.get_value("timestamp")) log_time = log_date.get_utc() interval = cur_time - log_time # if we haven't passed the first marker, then just skip if interval < first: continue # put an upper limit where it doesn't make anymore sense if interval > 21 * 24 * 60 * 60: continue # once we've reached the first marker, email next interval start = (interval - first) / next print "start: ", interval, first, start continue parent = sobject.get_parent() if not parent: print "WARNING: parent does not exist [%s]" % sobject.get_search_key( ) continue process = sobject.get_value("process") assigned = sobject.get_value("assigned") status = sobject.get_value("status") code = parent.get_code() print(code, assigned, process, status, interval / 3600) ready_sobjects.append(sobject) # TODO: problem how to prevent emails from happening every iteration? # this is run every minute, so remember the last time an email has been # sent for a particular if not ready_sobjects: return from pyasm.command import Command class BurnDownCmd(Command): def get_title(my): return "Burn Down Command" def set_sobjects(my, sobjects): my.sobjects = [sobject] def execute(my): # call email trigger from email_trigger import EmailTrigger email_trigger = EmailTrigger() email_trigger.set_command(my) email_trigger.execute() # call email trigger #cmd = BurnDownCmd() #cmd.set_sobjects(ready_sobjects) #Command.execute_cmd(cmd) # remember the time of each email for sobject in ready_sobjects: search_key = sobject.get_search_key() my.notified[search_key] = cur_time
div = DivWdg() div.add("List of Saved Searches: ") div.add(HtmlElement.br(2)) try: search = Search("config/widget_config") search.add_where("\"view\" like 'saved_search:%'") search.add_filter("search_type", my.search_type) configs = search.get_sobjects() except SearchException, e: print "WARNING: ", e configs = [] except: my.clear_search_data(my.search_type) raise views = SObject.get_values(configs, "view") select = SelectWdg("saved_search") select.set_id("saved_search") select.add_empty_option("-- Select --") #select.set_option("query", "config/widget_config|view|view") select.set_option("values", views) #select.set_option("query_filter", "\"view\" like 'saved_search:%'") div.add(select) retrieve_button = ButtonWdg("Retrieve Search") behavior = { 'type': 'click', 'cbjs_action': 'spt.dg_table.retrieve_search_cbk(evt, bvr);' } retrieve_button.add_behavior( behavior )