def preprocess(my): if my.sobjects: try: search = Search(Task) search_ids = [x.get_id() for x in my.sobjects] search.add_filters("search_id", search_ids) search_type = my.sobjects[0].get_search_type() search.add_filter("search_type", search_type) # go thru children of main search search = my.alter_task_search(search, prefix='children') # go thru Local Search search = my.alter_task_search( search, prefix='main_body', prefix_namespace=my.__class__.__name__) sobj = my.sobjects[0] pipeline = Pipeline.get_by_sobject(sobj) if pipeline: process_names = pipeline.get_process_names(True) search.add_enum_order_by("process", process_names) else: search.add_order_by("process") search.add_order_by("id") tasks = search.get_sobjects() # create a data structure for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = my.data.get(search_key) if not sobject_tasks: sobject_tasks = [] my.data[search_key] = sobject_tasks sobject_tasks.append(task) except: from tactic.ui.app import SearchWdg parent_search_type = get_search_type() SearchWdg.clear_search_data(parent_search_type) raise
def preprocess(my): if my.sobjects: try: search = Search(Task) search_ids = [x.get_id() for x in my.sobjects] search.add_filters("search_id", search_ids) search_type = my.sobjects[0].get_search_type() search.add_filter("search_type", search_type) # go thru children of main search search = my.alter_task_search(search, prefix='children') # go thru Local Search search = my.alter_task_search(search, prefix='main_body', prefix_namespace=my.__class__.__name__) sobj = my.sobjects[0] pipeline = Pipeline.get_by_sobject(sobj) if pipeline: process_names = pipeline.get_process_names(True) search.add_enum_order_by("process", process_names) else: search.add_order_by("process") search.add_order_by("id") tasks = search.get_sobjects() # create a data structure for task in tasks: search_type = task.get_value("search_type") search_id = task.get_value("search_id") search_key = "%s|%s" % (search_type, search_id) sobject_tasks = my.data.get(search_key) if not sobject_tasks: sobject_tasks = [] my.data[search_key] = sobject_tasks sobject_tasks.append(task) except: from tactic.ui.app import SearchWdg parent_search_type = get_search_type() SearchWdg.clear_search_data(parent_search_type) raise
def execute(my): assert my.search_type assert my.view assert my.file_path search = Search(my.search_type) if my.search_ids: search.add_enum_order_by("id", my.search_ids) search.add_filters("id", my.search_ids) sobjects = search.get_sobjects() elif my.search_keys: sobjects = Search.get_by_search_keys(my.search_keys, keep_order=True) """ search_codes = [SearchKey.extract_code(i) for i in my.search_keys if SearchKey.extract_code(i) ] if search_codes: search.add_filters("code", search_codes) else: search_ids = [SearchKey.extract_id(i) for i in my.search_keys if SearchKey.extract_id(i) ] search.add_filters("id", search_ids) """ else: sobjects = search.get_sobjects() from pyasm.widget import WidgetConfigView from pyasm.web import Widget config = WidgetConfigView.get_by_search_type(my.search_type, my.view) columns = [] if my.column_names: columns = my.column_names # should allow exporting ids only """ else: if not config: columns = search.get_columns() else: columns = config.get_element_names() """ if my.include_id: columns.insert(0, "id") # create the csv file org_file = file(my.file_path, 'w') csvwriter = csv.writer(org_file, quoting=csv.QUOTE_NONNUMERIC) # write the titles csvwriter.writerow(columns) elements = my.get_elements(config, columns) display_option_dict = {} # this is for widgets that do preprocessing on all sobjects for idx, element in enumerate(elements): element.set_sobjects(sobjects) element.preprocess() display_options = config.get_display_options(columns[idx]) display_option_dict[element] = display_options for idx, sobject in enumerate(sobjects): values = [] for element in elements: element.set_current_index(idx) value = element.get_text_value() if isinstance(value, Widget): value = value.get_buffer_display() elif isinstance(value, basestring): if isinstance(value, unicode): value = value.encode('UTF-8', 'ignore') else: value = str(value) options = display_option_dict.get(element) if options.get('csv_force_string') == 'true' and value: value = '#FORCESTRING#%s' % value values.append(value) # write the values as list csvwriter.writerow(values) org_file.close() file2 = open(my.file_path, 'r') mod_file_path = '%s_mod' % my.file_path mod_file = open(mod_file_path, 'w') for line in file2: mod_line = re.sub(r'(\'|\"|)(#FORCESTRING#)', '=\\1', line) mod_file.write(mod_line) # new file file2.close() mod_file.close() #os.unlink(my.file_path) shutil.move(mod_file_path, my.file_path)
def get_by_sobjects(sobjects, process=None, order=True): if not sobjects: return [] # quickly go through the sobjects to determine if their search types # are the same multi_stypes = False for sobject in sobjects: if sobject.get_search_type() != sobjects[0].get_search_type(): multi_stypes = True break search = Search( Task.SEARCH_TYPE ) if multi_stypes: # sort this into a dictionary and make multiple calls to # search.add_relationship_filters # use the first sobject as a sample sobjects_dict = {} for sobject in sobjects: st = sobject.get_search_type() sobj_list = sobjects_dict.get(st) if sobj_list == None: sobjects_dict[st] = [sobject] else: sobj_list.append(sobject) search.add_op('begin') for key, sobj_list in sobjects_dict.items(): search.add_op('begin') search.add_relationship_filters(sobj_list) search.add_op('and') search.add_op('or') else: from pyasm.biz import Schema schema = Schema.get() # FIXME: why doesn't the ops work here? filters = [] search.add_relationship_filters(sobjects) """ for sobject in sobjects: search_type = sobject.get_search_type() attrs = schema.get_relationship_attrs("sthpw/task", search_type) attrs = schema.resolve_relationship_attrs(attrs, "sthpw/task", search_type) search_code = sobject.get_value(attrs.get("to_col")) #search_code = sobject.get_value("code") #search.add_filter('search_type', search_type) #search.add_filter('search_id', search_id, quoted=False) #search.add_op("and") if attrs.get("from_col") == "search_code": filters.append("search_type = '%s' and search_code = '%s'" % (search_type, search_code)) else: filters.append("search_type = '%s' and search_id = %s" % (search_type, search_code)) search.add_where(" or ".join(filters)) """ search.add_order_by("search_type") search.add_order_by("search_code") search.add_order_by("search_id") # get the pipeline of the sobject pipeline = Pipeline.get_by_sobject(sobject) if order: if pipeline: process_names = pipeline.get_process_names(True) search.add_enum_order_by("process", process_names) else: search.add_order_by("process") search.add_order_by("id") if process: if isinstance(process, basestring): search.add_filter("process", process) else: search.add_filters("process", process) tasks = search.get_sobjects() return tasks
elif answer == 'n': sys.exit(0) else: print "Only y or n is accepted. Exiting..." sys.exit(0) # check if some projects are already in newer version Batch(site=site) search = Search("sthpw/project") if project_code: search.add_filter("code", project_code) else: search.add_enum_order_by("type", ['sthpw','prod','flash','game','design','simple', 'unittest']) projects = search.get_sobjects() project_dict = {} for project in projects: last_version = project.get_value('last_version_update', no_exception=True) if last_version > version: project_dict[project.get_code()] = last_version if project_dict: data = [] for key, value in project_dict.items(): data.append(' %s --- %s' %(key, value)) if is_confirmed: answer = 'y' else: answer = raw_input("Several projects are already in newer versions:\n%s\n"\
def execute(my): assert my.search_type assert my.view assert my.file_path search = Search(my.search_type) if my.search_ids: search.add_enum_order_by("id", my.search_ids) search.add_filters("id", my.search_ids) sobjects = search.get_sobjects() elif my.search_keys: sobjects = Search.get_by_search_keys(my.search_keys, keep_order=True) """ search_codes = [SearchKey.extract_code(i) for i in my.search_keys if SearchKey.extract_code(i) ] if search_codes: search.add_filters("code", search_codes) else: search_ids = [SearchKey.extract_id(i) for i in my.search_keys if SearchKey.extract_id(i) ] search.add_filters("id", search_ids) """ else: sobjects = search.get_sobjects() from pyasm.widget import WidgetConfigView from pyasm.web import Widget config = WidgetConfigView.get_by_search_type(my.search_type, my.view) columns = [] if my.column_names: columns = my.column_names # should allow exporting ids only """ else: if not config: columns = search.get_columns() else: columns = config.get_element_names() """ if my.include_id: columns.insert(0, "id") # create the csv file org_file = file(my.file_path, 'w') csvwriter = csv.writer(org_file, quoting=csv.QUOTE_NONNUMERIC) # write the titles csvwriter.writerow(columns) elements = my.get_elements(config, columns) display_option_dict = {} # this is for widgets that do preprocessing on all sobjects for idx, element in enumerate(elements): element.set_sobjects(sobjects) element.preprocess() display_options = config.get_display_options(columns[idx]) display_option_dict[element] = display_options for idx, sobject in enumerate(sobjects): values = [] for element in elements: element.set_current_index(idx) value = element.get_text_value() if isinstance(value, Widget): value = value.get_buffer_display() elif isinstance(value, basestring): if isinstance(value, unicode): value = value.encode('UTF-8', 'ignore') else: value = str(value) options = display_option_dict.get(element) if options.get('csv_force_string')=='true' and value: value= '#FORCESTRING#%s'%value values.append( value ) # write the values as list csvwriter.writerow(values) org_file.close() file2 = open(my.file_path, 'r') mod_file_path = '%s_mod' %my.file_path mod_file = open(mod_file_path, 'w') for line in file2: mod_line = re.sub(r'(\'|\"|)(#FORCESTRING#)', '=\\1', line) mod_file.write(mod_line) # new file file2.close() mod_file.close() #os.unlink(my.file_path) shutil.move(mod_file_path, my.file_path)
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values()) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value( True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values()) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add(HtmlElement.br()) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name, value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def get_display(my): web = WebContainer.get_web() search_type = web.get_form_value("search_type") search_ids = web.get_form_value("search_ids") #print "PicLens why am I begin run???" if not search_type or not search_ids: return "" from pyasm.search import Search search = Search(search_type) search.add_filters('id', search_ids.split("|")) search.add_enum_order_by('id', search_ids.split("|") ) sobjects = search.get_sobjects() xml = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <rss version="2.0" xmlns:media="http://search.yahoo.com/mrss"> <channel> <title></title> <link></link> <description></description> ''' from pyasm.biz import Snapshot for sobject in sobjects: snapshot = Snapshot.get_latest_by_sobject(sobject, "icon") if not snapshot: snapshot = Snapshot.get_latest_by_sobject(sobject, "publish") if not snapshot: continue web = snapshot.get_name_by_type(".swf") if not web: web = snapshot.get_name_by_type("web") icon = snapshot.get_name_by_type("icon") web_dir = snapshot.get_web_dir() web_path = "%s/%s" % (web_dir, web) icon_path = "%s/%s" % (web_dir, icon) title = "%s - %s" % (sobject.get_code(), sobject.get_name() ) xml += ''' <item> <title>%s</title> <link>%s</link> <media:thumbnail url="%s" /> <media:content url="%s" type="" /> </item> ''' % (title, web_path, icon_path, web_path) xml += ''' </channel> </rss> ''' return xml
def get_display(self): web = WebContainer.get_web() widget = Widget() if not self.search_type: self.search_type = self.options.get("search_type") assert self.search_type sobject_filter = self.sobject_filter web_state = WebState.get() web_state.add_state("ref_search_type", self.search_type) div = FilterboxWdg() widget.add(div) # add the sobject filter if self.sobject_filter: div.add(self.sobject_filter) # add a milestone filter milestone_filter = FilterSelectWdg("milestone_filter", label="Milestone: ") milestones = Search("sthpw/milestone").get_sobjects() milestone_filter.set_sobjects_for_options(milestones, "code", "code") milestone_filter.add_empty_option(label='-- Any Milestones --') milestone_filter.set_submit_onchange(False) milestone = milestone_filter.get_value() div.add_advanced_filter(milestone_filter) # add a process filter process_filter = ProcessFilterSelectWdg(name=self.process_filter_name, label='Process: ') process_filter.set_search_type(self.search_type) process_filter.set_submit_onchange(False) div.add_advanced_filter(process_filter) user_filter = None user = Environment.get_user_name() # it has a special colunn 'assigned' if not UserFilterWdg.has_restriction(): user_filter = UserFilterWdg() user_filter.set_search_column('assigned') user = user_filter.get_value() div.add_advanced_filter(user_filter) # add a task properties search search_columns = ['status', 'description'] task_search_filter = SearchFilterWdg(name='task_prop_search', \ columns=search_columns, label='Task Search: ') div.add_advanced_filter(task_search_filter) # add a retired filter retired_filter = RetiredFilterWdg() div.add_advanced_filter(retired_filter) # set a limit to only see set amount of sobjects at a time search_limit = SearchLimitWdg() search_limit.set_limit(50) search_limit.set_style(SearchLimitWdg.LESS_DETAIL) div.add_bottom(search_limit) div.add_advanced_filter(HtmlElement.br(2)) start_date_wdg = CalendarInputWdg("start_date_filter", label="From: ", css='med') start_date_wdg.set_persist_on_submit() div.add_advanced_filter(start_date_wdg) start_date = start_date_wdg.get_value() # these dates are actually used for search filtering processed_start_date = None processed_end_date = None if start_date: date = Date(db_date=start_date) # this guarantees a valid date( today ) is invalid input is detected processed_start_date = date.get_db_date() if start_date != processed_start_date: start_date_wdg.set_value(self.INVALID) # add hints hint = HintWdg("The 'From' and 'To' dates apply to bid dates.") #span.add(hint) end_date_wdg = CalendarInputWdg("end_date_filter", label="To: ", css='med') end_date_wdg.set_persist_on_submit() div.add_advanced_filter(end_date_wdg) div.add_advanced_filter(hint) end_date = end_date_wdg.get_value() if end_date: date = Date(db_date=end_date) processed_end_date = date.get_db_date() if end_date != processed_end_date: end_date_wdg.set_value(self.INVALID) # show sub task checkbox sub_task_cb = FilterCheckboxWdg('show_sub_tasks', label='show sub tasks', css='med') div.add_advanced_filter(sub_task_cb) div.add_advanced_filter(HtmlElement.br(2)) task_filter = TaskStatusFilterWdg() div.add_advanced_filter(task_filter) shot_filter = None if self.search_type == 'prod/shot': shot_filter = SObjectStatusFilterWdg() div.add_advanced_filter(shot_filter) # add refresh icon ''' refresh = IconRefreshWdg(long=False) calendar_div.add(refresh) calendar_div.add(SpanWdg(' ', css='small')) ''' # get all of the assets search = Search(self.search_type) if sobject_filter: sobject_filter.alter_search(search) if shot_filter: shot_statuses = shot_filter.get_statuses() shot_statuses_selected = shot_filter.get_values() if shot_statuses != shot_statuses_selected: search.add_filters("status", shot_filter.get_values() ) assets = search.get_sobjects() if not assets: # drawing the empty table prevents the loss of some prefs data table = TableWdg("sthpw/task", self.task_view) #widget.add(HtmlElement.h3("No assets found")) widget.add(table) return widget # this assumes looking at one project only project_search_type = assets[0].get_search_type() ids = SObject.get_values(assets, 'id') # get all of the tasks search = Search("sthpw/task") if processed_start_date and start_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_start_date >= '%s' or actual_start_date >='%s')" \ % (processed_start_date, processed_start_date)) if processed_end_date and end_date_wdg.get_value(True) != self.INVALID: search.add_where("(bid_end_date <= '%s' or actual_end_date <='%s')" \ % (processed_end_date, processed_end_date)) # filter out sub pipeline tasks if not sub_task_cb.is_checked(): search.add_regex_filter('process', '/', op='NEQ') search.add_filter("search_type", project_search_type) search.add_filters("search_id", ids ) # order by the search ids of the asset as the were defined in the # previous search search.add_enum_order_by("search_id", ids) if user != "": search.add_filter("assigned", user) if milestone != "": search.add_filter("milestone_code", milestone) process_filter.alter_search(search) task_search_filter.alter_search(search) if not self.show_all_task_approvals: #task_filter = TaskStatusFilterWdg(task_pipeline="task") #widget.add(task_filter) task_statuses = task_filter.get_processes() task_statuses_selected = task_filter.get_values() # one way to show tasks with obsolete statuses when the user # check all the task status checkboxes if task_statuses != task_statuses_selected: search.add_filters("status", task_filter.get_values() ) # filter for retired ... # NOTE: this must be above the search limit filter # because it uses a get count which commits the retired flag if retired_filter.get_value() == 'true': search.set_show_retired(True) # alter_search() will run set_search() implicitly search_limit.alter_search(search) # define the table table = TableWdg("sthpw/task", self.task_view) # get all of the tasks tasks = search.get_sobjects() sorted_tasks = self.process_tasks(tasks, search) widget.add( HtmlElement.br() ) table.set_sobjects(sorted_tasks) # make some adjustments to the calendar widget calendar_wdg = table.get_widget("schedule") for name,value in self.calendar_options.items(): calendar_wdg.set_option(name, value) widget.add(table) return widget
def execute(my): error_list = [] from pyasm.biz import Project Project.clear_cache() sthpw_search = Search("sthpw/project") sthpw_search.add_filter('code','sthpw') sthpw_search.set_show_retired(True) sthpw_proj = sthpw_search.get_sobject() search = Search("sthpw/project") if my.project_code: search.add_filter("code", my.project_code) else: #search.add_enum_order_by("type", ['sthpw','prod','game','design','simple', 'unittest']) search.add_enum_order_by("code", ['sthpw']) projects = search.get_sobjects() project_codes = SObject.get_values(projects, 'code') # append sthpw project in case it's retired if 'sthpw' not in project_codes and sthpw_proj: if not my.project_code: projects.insert(0, sthpw_proj) sthpw_proj.reactivate() current_dir = os.getcwd() tmp_dir = Environment.get_tmp_dir() output_file = '%s/upgrade_output.txt' % tmp_dir if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) elif os.path.exists(output_file): os.unlink(output_file) ofile = open(output_file, 'w') import datetime ofile.write('Upgrade Time: %s\n\n' %datetime.datetime.now()) # dynamically generate #sql = DbContainer.get(code) database_type = Sql.get_default_database_type() #if database_type in ['Sqlite', 'MySQL']: if database_type != "PostgreSQL": # general an upgrade import imp namespaces = ['default', 'simple', 'sthpw', 'config'] for namespace in namespaces: if database_type == 'Sqlite': from pyasm.search.upgrade.sqlite import convert_sqlite_upgrade file_path = convert_sqlite_upgrade(namespace) elif database_type == 'MySQL': from pyasm.search.upgrade.mysql import convert_mysql_upgrade file_path = convert_mysql_upgrade(namespace) elif database_type == 'SQLServer': from pyasm.search.upgrade.sqlserver import convert_sqlserver_upgrade file_path = convert_sqlserver_upgrade(namespace) elif database_type == 'Oracle': file_path = convert_oracle_upgrade(namespace) else: raise Exception("Database type not implemented here") (path, name) = os.path.split(file_path) (name, ext) = os.path.splitext(name) (file, filename, data) = imp.find_module(name, [path]) module = imp.load_module(name, file, filename, data) class_name = "%s%sUpgrade" % (database_type,namespace.capitalize()) exec("%s = module.%s" % (class_name, class_name) ) # load all the default modules from pyasm.search.upgrade.project import * for project in projects: code = project.get_code() if code == "sthpw": type = "sthpw" else: type = project.get_type() if not type: type = 'default' if not my.quiet: print project.get_code(), type print "-"*30 # if the project is admin, the just ignore for now if code == 'admin': continue if not project.database_exists(): ofile.write("*" * 80 + '\n') msg = "Project [%s] does not have a database\n"% project.get_code() ofile.write(msg) print msg ofile.write("*" * 80 + '\n\n') continue upgrade = None if database_type != 'PostgreSQL': upgrade_class = "%s%sUpgrade" % (database_type, type.capitalize()) conf_upgrade = eval("%sConfigUpgrade()" % database_type) else: upgrade_class = "%sUpgrade" % type.capitalize() conf_upgrade = eval("ConfigUpgrade()") upgrade = eval("%s()" % upgrade_class) # upgrade config (done for every project but sthpw) conf_upgrade.set_project(project.get_code()) conf_upgrade.set_to_version(my.to_version) conf_upgrade.set_forced(my.is_forced) conf_upgrade.set_quiet(my.quiet) conf_upgrade.set_confirmed(my.is_confirmed) conf_upgrade.execute() # append the errors for each upgrade key = '%s|%s' %(project.get_code(), conf_upgrade.__class__.__name__) error_list.append((conf_upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) # perform the upgrade to the other tables if upgrade: upgrade.set_project(project.get_code() ) upgrade.set_to_version(my.to_version) upgrade.set_forced(my.is_forced) upgrade.set_quiet(my.quiet) upgrade.set_confirmed(my.is_confirmed) #Command.execute_cmd(upgrade) # put each upgrade function in its own transaction # carried out in BaseUpgrade upgrade.execute() # append the errors for each upgrade key = '%s|%s' %(project.get_code(), upgrade.__class__.__name__) error_list.append((upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) from pyasm.search import DatabaseImpl project.set_value("last_db_update", DatabaseImpl.get().get_timestamp_now(), quoted=False) if project.has_value('last_version_update'): last_version = project.get_value('last_version_update') if my.to_version > last_version: project.set_value("last_version_update", my.to_version) else: # it should be getting the upgrade now, redo the search print "Please run upgrade_db.py again, the sthpw db has just been updated" return project.commit(triggers=False) # print the errors for each upgrade for cls_name, project_code, errors in error_list: if not my.quiet: print print "Errors for %s [%s]:" %(project_code, cls_name) ofile.write("Errors for %s [%s]:\n" %(project_code, cls_name)) if not my.quiet: print "*" * 80 ofile.write("*" * 80 + '\n') for func, error in errors: if not my.quiet: print '[%s]' % func print "-" * 70 print error ofile.write('[%s]\n' % func) ofile.write("-" * 70 + '\n') ofile.write('%s\n' %error) ofile.close() if my.quiet: print "Please refer to the upgrade_output.txt file for any upgrade messages." print # handle sthpw database separately. This ensures that the project entry # gets created if none exists. #print "sthpw" #print "-"*30 #upgrade = SthpwUpgrade() #upgrade.set_project("sthpw") #Command.execute_cmd(upgrade) # update the template zip files data_dir = Environment.get_data_dir(manual=False) dest_dir = '%s/templates' %data_dir if os.path.exists(dest_dir): install_dir = Environment.get_install_dir() src_code_template_dir = '%s/src/install/start/templates' %install_dir if os.path.exists(src_code_template_dir): zip_files = os.listdir(src_code_template_dir) io_errors = False for zip_file in zip_files: if not zip_file.endswith(".zip"): continue try: src_file = '%s/%s' %(src_code_template_dir, zip_file) dest_file = '%s/%s' %(dest_dir, zip_file) shutil.copyfile(src_file, dest_file) except IOError, e: print e io_errors = True if not io_errors: print "Default project template files have been updated." else: print "There was a problem copying the default template files to <TACTIC_DATA_DIR>/templates."
def execute(my): error_list = [] from pyasm.biz import Project Project.clear_cache() sthpw_search = Search("sthpw/project") sthpw_search.add_filter('code', 'sthpw') sthpw_search.set_show_retired(True) sthpw_proj = sthpw_search.get_sobject() search = Search("sthpw/project") if my.project_code: search.add_filter("code", my.project_code) else: #search.add_enum_order_by("type", ['sthpw','prod','game','design','simple', 'unittest']) search.add_enum_order_by("code", ['sthpw']) projects = search.get_sobjects() project_codes = SObject.get_values(projects, 'code') # append sthpw project in case it's retired if 'sthpw' not in project_codes and sthpw_proj: if not my.project_code: projects.insert(0, sthpw_proj) sthpw_proj.reactivate() current_dir = os.getcwd() tmp_dir = Environment.get_tmp_dir() output_file = '%s/upgrade_output.txt' % tmp_dir if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) elif os.path.exists(output_file): os.unlink(output_file) ofile = open(output_file, 'w') import datetime ofile.write('Upgrade Time: %s\n\n' % datetime.datetime.now()) # dynamically generate #sql = DbContainer.get(code) database_type = Sql.get_default_database_type() #if database_type in ['Sqlite', 'MySQL']: if database_type != "PostgreSQL": # general an upgrade import imp namespaces = ['default', 'simple', 'sthpw', 'config'] for namespace in namespaces: if database_type == 'Sqlite': from pyasm.search.upgrade.sqlite import convert_sqlite_upgrade file_path = convert_sqlite_upgrade(namespace) elif database_type == 'MySQL': from pyasm.search.upgrade.mysql import convert_mysql_upgrade file_path = convert_mysql_upgrade(namespace) elif database_type == 'SQLServer': from pyasm.search.upgrade.sqlserver import convert_sqlserver_upgrade file_path = convert_sqlserver_upgrade(namespace) elif database_type == 'Oracle': file_path = convert_oracle_upgrade(namespace) else: raise Exception("Database type not implemented here") (path, name) = os.path.split(file_path) (name, ext) = os.path.splitext(name) (file, filename, data) = imp.find_module(name, [path]) module = imp.load_module(name, file, filename, data) class_name = "%s%sUpgrade" % (database_type, namespace.capitalize()) exec("%s = module.%s" % (class_name, class_name)) # load all the default modules from pyasm.search.upgrade.project import * for project in projects: code = project.get_code() if code == "sthpw": type = "sthpw" else: type = project.get_type() if not type: type = 'default' if not my.quiet: print project.get_code(), type print "-" * 30 # if the project is admin, the just ignore for now if code == 'admin': continue if not project.database_exists(): ofile.write("*" * 80 + '\n') msg = "Project [%s] does not have a database\n" % project.get_code( ) ofile.write(msg) print msg ofile.write("*" * 80 + '\n\n') continue upgrade = None if database_type != 'PostgreSQL': upgrade_class = "%s%sUpgrade" % (database_type, type.capitalize()) conf_upgrade = eval("%sConfigUpgrade()" % database_type) else: upgrade_class = "%sUpgrade" % type.capitalize() conf_upgrade = eval("ConfigUpgrade()") upgrade = eval("%s()" % upgrade_class) # upgrade config (done for every project but sthpw) conf_upgrade.set_project(project.get_code()) conf_upgrade.set_to_version(my.to_version) conf_upgrade.set_forced(my.is_forced) conf_upgrade.set_quiet(my.quiet) conf_upgrade.set_confirmed(my.is_confirmed) conf_upgrade.execute() # append the errors for each upgrade key = '%s|%s' % (project.get_code(), conf_upgrade.__class__.__name__) error_list.append((conf_upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) # perform the upgrade to the other tables if upgrade: upgrade.set_project(project.get_code()) upgrade.set_to_version(my.to_version) upgrade.set_forced(my.is_forced) upgrade.set_quiet(my.quiet) upgrade.set_confirmed(my.is_confirmed) #Command.execute_cmd(upgrade) # put each upgrade function in its own transaction # carried out in BaseUpgrade upgrade.execute() # append the errors for each upgrade key = '%s|%s' % (project.get_code(), upgrade.__class__.__name__) error_list.append((upgrade.__class__.__name__, project.get_code(), \ Container.get_seq(key))) from pyasm.search import DatabaseImpl project.set_value("last_db_update", DatabaseImpl.get().get_timestamp_now(), quoted=False) if project.has_value('last_version_update'): last_version = project.get_value('last_version_update') if my.to_version > last_version: project.set_value("last_version_update", my.to_version) else: # it should be getting the upgrade now, redo the search print "Please run upgrade_db.py again, the sthpw db has just been updated" return project.commit(triggers=False) # print the errors for each upgrade for cls_name, project_code, errors in error_list: if not my.quiet: print print "Errors for %s [%s]:" % (project_code, cls_name) ofile.write("Errors for %s [%s]:\n" % (project_code, cls_name)) if not my.quiet: print "*" * 80 ofile.write("*" * 80 + '\n') for func, error in errors: if not my.quiet: print '[%s]' % func print "-" * 70 print error ofile.write('[%s]\n' % func) ofile.write("-" * 70 + '\n') ofile.write('%s\n' % error) ofile.close() if my.quiet: print "Please refer to the file [%s] for any upgrade messages." % output_file print # handle sthpw database separately. This ensures that the project entry # gets created if none exists. #print "sthpw" #print "-"*30 #upgrade = SthpwUpgrade() #upgrade.set_project("sthpw") #Command.execute_cmd(upgrade) # update the template zip files data_dir = Environment.get_data_dir(manual=False) dest_dir = '%s/templates' % data_dir if os.path.exists(dest_dir): install_dir = Environment.get_install_dir() src_code_template_dir = '%s/src/install/start/templates' % install_dir if os.path.exists(src_code_template_dir): zip_files = os.listdir(src_code_template_dir) io_errors = False for zip_file in zip_files: if not zip_file.endswith(".zip"): continue try: src_file = '%s/%s' % (src_code_template_dir, zip_file) dest_file = '%s/%s' % (dest_dir, zip_file) shutil.copyfile(src_file, dest_file) except IOError, e: print e io_errors = True if not io_errors: print "Default project template files have been updated." else: print "There was a problem copying the default template files to <TACTIC_DATA_DIR>/templates."
pass elif answer == 'n': sys.exit(0) else: print "Only y or n is accepted. Exiting..." sys.exit(0) # check if some projects are already in newer version Batch(site=site) search = Search("sthpw/project") if project_code: search.add_filter("code", project_code) else: search.add_enum_order_by( "type", ['sthpw', 'prod', 'flash', 'game', 'design', 'simple', 'unittest']) projects = search.get_sobjects() project_dict = {} for project in projects: last_version = project.get_value('last_version_update', no_exception=True) if last_version > version: project_dict[project.get_code()] = last_version if project_dict: data = [] for key, value in project_dict.items(): data.append(' %s --- %s' % (key, value)) if is_confirmed: answer = 'y'
def get_by_sobjects(sobjects, process=None, order=True): if not sobjects: return [] # quickly go through the sobjects to determine if their search types # are the same multi_stypes = False for sobject in sobjects: if sobject.get_search_type() != sobjects[0].get_search_type(): multi_stypes = True break search = Search( Task.SEARCH_TYPE ) if multi_stypes: # sort this into a dictionary and make multiple calls to # search.add_relationship_filters # use the first sobject as a sample sobjects_dict = {} for sobject in sobjects: st = sobject.get_search_type() sobj_list = sobjects_dict.get(st) if sobj_list == None: sobjects_dict[st] = [sobject] else: sobj_list.append(sobject) search.add_op('begin') for key, sobj_list in sobjects_dict.items(): search.add_op('begin') search.add_relationship_filters(sobj_list) search.add_op('and') search.add_op('or') else: from pyasm.biz import Schema schema = Schema.get() # FIXME: why doesn't the ops work here? filters = [] search.add_relationship_filters(sobjects) """ for sobject in sobjects: search_type = sobject.get_search_type() attrs = schema.get_relationship_attrs("sthpw/task", search_type) attrs = schema.resolve_relationship_attrs(attrs, "sthpw/task", search_type) search_code = sobject.get_value(attrs.get("to_col")) #search_code = sobject.get_value("code") #search.add_filter('search_type', search_type) #search.add_filter('search_id', search_id, quoted=False) #search.add_op("and") if attrs.get("from_col") == "search_code": filters.append("search_type = '%s' and search_code = '%s'" % (search_type, search_code)) else: filters.append("search_type = '%s' and search_id = %s" % (search_type, search_code)) search.add_where(" or ".join(filters)) """ search.add_order_by("search_type") search.add_order_by("search_code") search.add_order_by("search_id") # get the pipeline of the sobject pipeline = Pipeline.get_by_sobject(sobject) if order: if pipeline: process_names = pipeline.get_process_names(True) search.add_enum_order_by("process", process_names) else: search.add_order_by("process") search.add_order_by("id") if process: if isinstance(process, basestring): search.add_filter("process", process) else: search.add_filters("process", process) tasks = search.get_sobjects() return tasks
def get_display(my): web = WebContainer.get_web() search_type = web.get_form_value("search_type") search_ids = web.get_form_value("search_ids") #print "PicLens why am I begin run???" if not search_type or not search_ids: return "" from pyasm.search import Search search = Search(search_type) search.add_filters('id', search_ids.split("|")) search.add_enum_order_by('id', search_ids.split("|") ) sobjects = search.get_sobjects() xml = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <rss version="2.0" xmlns:media="http://search.yahoo.com/mrss"> <channel> <title></title> <link></link> <description></description> ''' from pyasm.biz import Snapshot for sobject in sobjects: snapshot = Snapshot.get_latest_by_sobject(sobject, "icon") if not snapshot: snapshot = Snapshot.get_latest_by_sobject(sobject, "publish") if not snapshot: continue web = snapshot.get_name_by_type(".swf") if not web: web = snapshot.get_name_by_type("web") icon = snapshot.get_name_by_type("icon") web_dir = snapshot.get_web_dir() web_path = "%s/%s" % (web_dir, web) icon_path = "%s/%s" % (web_dir, icon) title = "%s - %s" % (sobject.get_code(), sobject.get_name() ) xml += ''' <item> <title>%s</title> <link>%s</link> <media:thumbnail url="%s" /> <media:content url="%s" type="" /> </item> ''' % (title, web_path, icon_path, web_path) xml += ''' </channel> </rss> ''' return xml