def wrapper_function(*args, **kwargs): conn = None # Find the connection in the function params: for arg in args: if isinstance(arg, grt.classes.db_mgmt_Connection): conn = arg break if not conn: grt.log_error('db.sybase', 'Cannot find a connection object to apply the allow-ddl-in-tran fix') return func cursor = get_connection(conn).cursor() try: current_db = cursor.execute('SELECT db_name()').fetchone()[0] # Will restore it later except Exception: current_db = 'master' cursor.execute('USE master') # This is required for the next query to work cursor.execute('sp_dboption tempdb,"ddl in tran", true') cursor.execute('CHECKPOINT tempdb') # Like FLUSH in mysql for options if current_db != 'master': cursor.execute('USE ?', current_db) del cursor # Needed to use just one connection to the DB (Sybase Developer Edition allows only one connection) res = func(*args, **kwargs) # Once the function is executed, restore False to 'ddl in tran': cursor = get_connection(conn).cursor() cursor.execute('USE master') # This is required for the next query to work cursor.execute('sp_dboption tempdb,"ddl in tran", false') cursor.execute('CHECKPOINT tempdb') # Like FLUSH in mysql for options if current_db != 'master': cursor.execute('USE ?', current_db) # Restore the originally active database return res
def execute_script(connection, script, log): connection = get_connection(connection) ranges = grt.modules.MysqlSqlFacade.getSqlStatementRanges(script) for start, length in ranges: if grt.query_status(): raise grt.UserInterrupt() statement = script[start:start + length] try: grt.send_info("Execute statement", statement) grt.log_debug3("DbMySQLFE", "Execute %s\n" % statement) connection.execute(statement) except db_utils.QueryError, exc: if log: entry = grt.classes.GrtLogEntry() entry.owner = log entry.name = str(exc) entry.entryType = 2 log.entries.append(entry) grt.send_warning("%s" % exc) grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc)) return False except Exception, exc: if log: entry = grt.classes.GrtLogEntry() entry.owner = log entry.name = "Exception: " + str(exc) entry.entryType = 2 log.entries.append(entry) grt.send_warning("Exception caught: %s" % exc) grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc)) return False
def execute_script(connection, script, log): connection = get_connection(connection) ranges = grt.modules.MysqlSqlFacade.getSqlStatementRanges(script) for start, length in ranges: if grt.query_status(): raise grt.UserInterrupt() statement = script[start:start+length] try: grt.send_info("Execute statement", statement) grt.log_debug3("DbMySQLFE", "Execute %s\n" % statement) connection.execute(statement) except db_utils.QueryError, exc: if log: entry = grt.classes.GrtLogEntry() entry.owner = log entry.name = str(exc) entry.entryType = 2 log.entries.append(entry) grt.send_warning("%s" % exc) grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc)) return False except Exception, exc: if log: entry = grt.classes.GrtLogEntry() entry.owner = log entry.name = "Exception: " + str(exc) entry.entryType = 2 log.entries.append(entry) grt.send_warning("Exception caught: %s" % exc) grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc)) return False
def go_next(self): i = self._worker_count.get_string_value() try: count = int(i) if count < 1: raise Exception("Bad value") except Exception: mforms.Utilities.show_error("Invalid Value", "Worker thread count must be a number larger than 0.", "OK", "", "") return self.main.plan.state.dataBulkTransferParams["workerCount"] = count #if self.dump_to_file.get_active(): # self.main.plan.state.dataBulkTransferParams["GenerateDumpScript"] = self.dump_to_file_entry.get_string_value() #else: # if "GenerateDumpScript" in self.main.plan.state.dataBulkTransferParams: # del self.main.plan.state.dataBulkTransferParams["GenerateDumpScript"] if self.copy_script_checkbox.get_active(): self.main.plan.state.dataBulkTransferParams["GenerateCopyScript"] = self.copy_script_entry.get_string_value() else: if self.main.plan.state.dataBulkTransferParams.has_key("GenerateCopyScript"): del self.main.plan.state.dataBulkTransferParams["GenerateCopyScript"] self.main.plan.state.dataBulkTransferParams["LiveDataCopy"] = 1 if self._copy_db.get_active() else 0 self.main.plan.state.dataBulkTransferParams["DebugTableCopy"] = 1 if self._debug_copy.get_active() else 0 self.main.plan.state.dataBulkTransferParams["TruncateTargetTables"] = 1 if self._truncate_db.get_active() else 0 for key in self.main.plan.state.dataBulkTransferParams.keys(): if key.endswith(":rangeKey"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rangeStart"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rangeEnd"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rowCount"): del self.main.plan.state.dataBulkTransferParams[key] tables_to_copy = [] for row in range(self._tree.count()): n = self._tree.node_at_row(row) table = self._tables_by_id[n.get_tag()] count = n.get_string(1) if not count: tables_to_copy.append(table) else: try: count = int(count) if count > 0: # tables_to_copy.append(table) self.main.plan.state.dataBulkTransferParams["%s:rowCount" % table.__id__] = count except: grt.log_error("Invalid value in Migration DataCopy tree: %s"%count) self.main.plan.state.dataBulkTransferParams["tableList"] = tables_to_copy if self._copy_db.get_active() or self.copy_script_checkbox.get_active(): return WizardPage.go_next(self) else: self.main.go_next_page(2) return
def submitBug(user, password, data): #Initializes the return value as no error ret_val = '' try: # Creates the object to open and manage the cookies cookieJar = cookielib.CookieJar() urlOpener = urllib2.build_opener( urllib2.HTTPCookieProcessor(cookieJar)) # Attempts to login ret_val = login(urlOpener, user, password) if ret_val == '': log_file = '' if 'log_file' in data.keys(): log_file = data['log_file'] del data['log_file'] # Submits the bug, returning any error message to the caller ret_val = submit_bug(urlOpener, data, log_file) # Finally logs out logout(urlOpener) except Exception, e: ret_val = 'error|Unknown failure submitting bug report, please proceed through http://bugs.mysql.com/report.php' log_error( "WB Bug Report", 'An error occurred while submitting the request, %s : %s\n' % (e.__class__.__name__, str(e)))
def test_connection(): #Initializes the return value as no error ret_val = 'error|Unable to connecto through the Bug System, please proceed through http://bugs.mysql.com/report.php' try: # Creates the object to open and manage the cookies cookieJar = cookielib.CookieJar() urlOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar)) # Attempts to login response = urlOpener.open('http://bugs.mysql.com/index.php') # Reads the server response data = response.read() # Creates the parser to confirm successful login parser = HTMLGetData() parser.quit_on_done = True parser.add_path_node("html") parser.add_path_node("body") parser.add_path_conditioned_node("div",[('id','nav')]) parser.add_path_node("ul") parser.add_path_conditioned_node("li",[('id','current')]) parser.feed(data) if len(parser.result)==1: if parser.result[0] == 'Bugs Home': ret_val = 'success|' except BaseException, e: log_error("WB Bug Report", 'An error occurred while testing conectivity, %s: %s\n' % (e.__class__.__name__, str(e)))
def submitBug(user, password, data): #Initializes the return value as no error ret_val = '' try: # Creates the object to open and manage the cookies cookieJar = cookielib.CookieJar() urlOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar)) # Attempts to login ret_val = login(urlOpener, user, password) if ret_val == '': log_file = '' if 'log_file' in data.keys(): log_file = data['log_file'] del data['log_file'] # Submits the bug, returning any error message to the caller ret_val = submit_bug(urlOpener, data, log_file) # Finally logs out logout(urlOpener) except Exception, e: ret_val = 'error|Unknown failure submitting bug report, please proceed through http://bugs.mysql.com/report.php' log_error("WB Bug Report", 'An error occurred while submitting the request, %s : %s\n' % (e.__class__.__name__, str(e)))
def _run(self): try: self.worker(self.add_message) except Exception, e: import traceback log_error("WorkerThreadHelper", "An exception occurred in the worker thread:\n%s\n" % traceback.format_exc()) self.add_message(e)
def attach_file(opener, bug_number, file): # default error response ret_val = 'Unknown error attaching log file to bug report' # The file names to be used local_file_name = "wb.log" zip_file_name = 'wb_log.zip' # Copies the file to the local folder normalized_path = file.replace("\\","/") try: shutil.copyfile(normalized_path, local_file_name) # Creates the zip file zip_file = zipfile.ZipFile(zip_file_name,'w') zip_file.write(local_file_name, os.path.basename(local_file_name), zipfile.ZIP_DEFLATED) zip_file.close() # Reads the file as binary data zip_data = open(zip_file_name,'rb').read() # Creates the list of fields to be encoded fields = [ ('MAX_FILE_SIZE','512000'), ('file_desc', 'Workbench Log File'), ('file_private' , '1'), ('file_add' , 'Add file')] # Creates the file to be encoded files = [('file',zip_file_name, zip_data)] # Encodes the request data content_type, body = encode_multipart_formdata(fields, files) # Creates a custom request for the file submition request = urllib2.Request('http://bugs.mysql.com/bug.php?id=' + bug_number + '&files=2') request.add_unredirected_header('Content-Type', content_type) request.add_unredirected_header('Content-Length', str(len(body))) # Performs the request response = opener.open(request, body) data = response.read(); parser = MySQLGetRequestResult() parser.feed(data) if parser.result_type == 'success': ret_val = '' else: ret_val = 'Error attaching the log file to the bug report' except urllib2.URLError, e: ret_val = 'Error attaching the log file to the bug report' log_error('WB Bug Report', 'Error attaching the log file: %s \n' % str(e))
def attach_file(opener, bug_number, file): # default error response ret_val = 'Unknown error attaching log file to bug report' # The file names to be used local_file_name = "wb.log" zip_file_name = 'wb_log.zip' # Copies the file to the local folder normalized_path = file.replace("\\", "/") try: shutil.copyfile(normalized_path, local_file_name) # Creates the zip file zip_file = zipfile.ZipFile(zip_file_name, 'w') zip_file.write(local_file_name, os.path.basename(local_file_name), zipfile.ZIP_DEFLATED) zip_file.close() # Reads the file as binary data zip_data = open(zip_file_name, 'rb').read() # Creates the list of fields to be encoded fields = [('MAX_FILE_SIZE', '512000'), ('file_desc', 'Workbench Log File'), ('file_private', '1'), ('file_add', 'Add file')] # Creates the file to be encoded files = [('file', zip_file_name, zip_data)] # Encodes the request data content_type, body = encode_multipart_formdata(fields, files) # Creates a custom request for the file submition request = urllib2.Request('http://bugs.mysql.com/bug.php?id=' + bug_number + '&files=2') request.add_unredirected_header('Content-Type', content_type) request.add_unredirected_header('Content-Length', str(len(body))) # Performs the request response = opener.open(request, body) data = response.read() parser = MySQLGetRequestResult() parser.feed(data) if parser.result_type == 'success': ret_val = '' else: ret_val = 'Error attaching the log file to the bug report' except urllib2.URLError, e: ret_val = 'Error attaching the log file to the bug report' log_error('WB Bug Report', 'Error attaching the log file: %s \n' % str(e))
def _the_observer(self, name, sender, args): for obs, nam, obj in self.observers: if (nam is None or name == nam) and (obj is None or obj == sender): try: obs(name, sender, args) except: grt.log_error("PyNotificationCenter", "Error calling notification observer for %s" % name) import traceback traceback.print_exc()
def reformat(text): # add a select in front of the expression to fake a valid statement prefix = "SELECT " text = prefix+text try: output = grt.modules.SQLIDEUtils.reformatSQLStatement(text) except Exception, exc: grt.log_error("ExplainRenderer", "Exception reformatting snippet '%s': %s\n" % (text, exc)) output = text
def save_file_content_and_backup(self, path, content, backup_extension, as_admin = False, admin_password = None): # Check if dir, where config file will be stored is writable dirname, filename = splitpath(path) if not as_admin and not self.is_dir_writable(dirname.strip(" \r\t\n")): raise PermissionDeniedError("Cannot write to directory %s" % dirname) if self.ssh is not None: ## Get temp dir for using as tmpdir tmpdir, status = self.process_ops.get_cmd_output("echo %temp%") if type(tmpdir) is unicode: tmpdir = tmpdir.encode("utf8") if type(tmpdir) is str: tmpdir = tmpdir.strip(" \r\t\n") if tmpdir[1] == ":": tmpdir = tmpdir[2:] else: log_debug(_this_file, '%s: Temp directory path "%s" is not in expected form. The expected form is something like "C:\\Windows\\Temp"\n' % (self.__class__.__name__, tmpdir) ) tmpdir = None log_debug2(_this_file, '%s: Got temp dir: "%s"\n' % (self.__class__.__name__, tmpdir) ) else: tmpdir = None if not tmpdir: tmpdir = dirname tmpfilename = tmpdir + r"\workbench-temp-file.ini" log_debug(_this_file, '%s: Remotely writing contents to temporary file "%s"\n' % (self.__class__.__name__, tmpfilename) ) log_debug3(_this_file, '%s: %s\n' % (self.__class__.__name__, content) ) self.ssh.set_contents(tmpfilename, content) if backup_extension: log_debug(_this_file, '%s: Backing up "%s"\n' % (self.__class__.__name__, path) ) backup_cmd = "copy /y " + quote_path_win(path) + " " + quote_path_win(path+backup_extension) msg, code = self.process_ops.get_cmd_output(backup_cmd) if code != 0: print backup_cmd, "->", msg log_error(_this_file, '%s: Error backing up file: %s\n' % (self.__class__.__name__, backup_cmd+'->'+msg) ) raise RuntimeError("Error backing up file: %s" % msg) copy_to_dest = "copy /y " + quote_path_win(tmpfilename) + " " + quote_path_win(path) delete_tmp = "del " + quote_path_win(tmpfilename) log_debug(_this_file, '%s: Copying file to final destination: "%s"\n' % (self.__class__.__name__, copy_to_dest) ) msg, code = self.process_ops.get_cmd_output(copy_to_dest) if code != 0: print copy_to_dest, "->", msg log_error(_this_file, '%s: Error copying temporary file over destination file: %s\n%s to %s\n' % (self.__class__.__name__, msg, tmpfilename, path) ) raise RuntimeError("Error copying temporary file over destination file: %s\n%s to %s" % (msg, tmpfilename, path)) log_debug(_this_file, '%s: Deleting tmp file: "%s"\n' % (self.__class__.__name__, delete_tmp) ) msg, code = self.process_ops.get_cmd_output(delete_tmp) if code != 0: print "Could not delete temporary file %s: %s" % (tmpfilename, msg) log_info(_this_file, '%s: Could not delete temporary file "%s": %s\n' % (self.__class__.__name__, tmpfilename, msg) ) else: raise Exception("No SSH session active, cannot save file remotely")
def _the_observer(self, name, sender, args): for obs, nam, obj in self.observers: if (nam is None or name == nam) and (obj is None or obj == sender): try: obs(name, sender, args) except: grt.log_error( "PyNotificationCenter", "Error calling notification observer for %s\n" % name) import traceback traceback.print_exc()
def display_cost(self, item): text = item.get_text() if text: cost = text.lower().split()[0] if cost == "read": self._context.show_cost_info_type("read_eval_cost") elif cost == "data": self._context.show_cost_info_type("data_read_per_join") else: grt.log_error("vexplain", "Unknown cost info type: %s\n" % cost) self.drawbox.set_needs_repaint()
def tab_changed(self): if self.old_active_tab and hasattr(self.old_active_tab, "page_deactivated"): self.old_active_tab.page_deactivated() i = self.tabview.get_active_tab() panel = self.tabs[i] if panel is not None and hasattr(panel, "page_activated"): try: panel.page_activated() except Exception, e: import traceback log_error(_this_file, "Unhandled exception in Admin for %s: %s\n" % (panel, traceback.format_exc())) mforms.Utilities.show_error("Error", "An unhandled exception occurred (%s). Please refer to the log files for details." % e, "OK", "", "")
def init_extensions(self, server_profile, ctrl_be): log_info("WBA", "Initializing extension modules for WBA...\n") init_count = 0 # search in the same dir where the WBA code itself is located for location in [os.path.dirname(__file__)]: try: folders = [ f for f in os.listdir(location) if f.startswith("wba_") and os.path.isdir(os.path.join(location, f)) ] except: continue sys.path.append(location) for candidate in folders: if os.path.exists( os.path.join(location, candidate, "__init__.py")): mod = __import__(candidate) if hasattr(mod, "wba_register"): log_info( "WBA", "Registering WBA extension module %s/%s...\n" % (location, candidate)) try: mod.wba_register(server_profile, ctrl_be, self) init_count += 1 log_info( "WBA", "WBA extension module %s/%s registered OK\n" % (location, candidate)) except: import traceback traceback.print_exc() log_error( "WBA", "Exception caught while loading WBA extension module %s/%s\n" % (location, candidate)) del sys.modules[mod.__name__] del mod else: # unload the module del sys.modules[mod.__name__] del mod sys.path.pop() log_info("WBA", "%i extension modules initialized\n" % init_count)
def save_file_content_and_backup(self, filename, content, backup_extension, as_admin = False, admin_password = None): log_debug(_this_file, '%s: Saving file "%s" with backup (sudo="%s")\n' % (self.__class__.__name__, filename, str(as_admin)) ) if as_admin: tmp_name = self.tempdir+"\\wbfilesavetmp" tmp = open(tmp_name, "w+b") try: log_debug(_this_file, '%s: Writing file contents to tmp file "%s"\n' % (self.__class__.__name__, tmp_name) ) tmp.write(content) tmp.close() if backup_extension and os.path.exists(filename): #dprint_ex(1, "Creating backup of %s to %s" % (filename, filename+backup_extension)) #self._copy_file(source = filename, dest = filename + backup_extension, # as_admin = as_admin, admin_password = admin_password) # Create backup and copy over file to final destination in a single command # This is done because running copy twice, would bring up the UAC dialog twice script = "copy /Y %s %s && copy /Y %s %s" % (quote_path_win(filename), quote_path_win(filename + backup_extension), quote_path_win(tmp_name), quote_path_win(filename)) log_debug(_this_file, '%s: Creating backup and commiting tmp file: "%s"\n' % (self.__class__.__name__, script) ) output = [] res = self.process_ops.exec_cmd(script, as_admin = True, admin_password = admin_password, output_handler = lambda line, l= output: l.append(line) ) if res != 0: output = "\n".join(output) raise RuntimeError("Error while executing '%s'. Output = '%s'" % (script, output)) else: log_debug(_this_file, '%s: Copying over tmp file to final filename using sudo: %s -> %s\n' % (self.__class__.__name__, tmp_name, filename) ) self._copy_file(source = tmp_name, dest = filename, as_admin = as_admin, admin_password = admin_password) log_debug(_this_file, '%s: Delete tmp file "%s"\n' % (self.__class__.__name__, tmp_name) ) # delete tmp file ## BIZARRE STUFF GOING ON HERE # commenting out the following line, will make something in committing config file change fail # even tho the copies happen before this line.. wtf # os.remove(tmp_name) log_debug(_this_file, '%s: Done.\n' % self.__class__.__name__) except Exception, exc: log_error(_this_file, '%s: Exception caught: %s\n' % (self.__class__.__name__, str(exc)) ) if tmp: tmp.close() raise
def get_profiles_for(system): path = mforms.App.get().get_resource_path("mysql.profiles") if not path: path = mforms.App.get().get_resource_path("") if not path: log_error(_this_file, "Could not find mysql.profiles dir\n") return [] path += "/mysql.profiles" files = os.listdir(path) profiles = [] for f in files: data = grt.unserialize(os.path.join(path, f)) if data.has_key("sys.system") and data["sys.system"] == system: profiles.append(data) return profiles
def thread_work(self): try: self.func() except grt.UserInterrupt: self.owner.send_info("%s cancelled" % self.label) mforms.Utilities.perform_from_main_thread(self.set_aborted, False) return except Exception, exc: print import traceback traceback.print_exc() grt.log_error("Wizard", "Thread '%s' failed: %s\n" % (self.label, exc)) self.owner.send_error("%s: %s" % (self.label, str(exc))) mforms.Utilities.perform_from_main_thread(lambda self=self,exc=exc:self.set_failed("Error: %s" % self.owner.format_exception_text(exc)), False) return
def local_run_cmd_windows(command, as_admin=False, admin_password=None, sudo_prefix=None, output_handler=None): # wrap cmd command = "cmd.exe /C " + command out_str ="" retcode = 1 if as_admin: try: from ctypes import c_int, WINFUNCTYPE, windll from ctypes.wintypes import HWND, LPCSTR, UINT prototype = WINFUNCTYPE(c_int, HWND, LPCSTR, LPCSTR, LPCSTR, LPCSTR, UINT) scriptparts = command.partition(" ") cmdname = scriptparts[0] cmdparams = scriptparts[2] paramflags = (1, "hwnd", 0), (1, "operation", "runas"), (1, "file", cmdname), (1, "params", cmdparams), (1, "dir", None), (1, "showcmd", 0) SHellExecute = prototype(("ShellExecuteA", windll.shell32), paramflags) ret = SHellExecute() # > 32 is OK, < 32 is error code retcode = 1 if ret > 32: retcode = 0 else: if ret == 0: log_error(_this_file, 'local_run_cmd_windows(): Out of memory executing "%s"\n' % command) else: log_error(_this_file, 'local_run_cmd_windows(): Error %i executing "%s"\n' % (ret, command) ) return retcode except: import traceback traceback.print_exc() else: try: stdin, stdout, stderr = os.popen3(command) out = stdout.read() if not out: out = "" err = stderr.read() if not err: err = "" out_str = out + err retcode = stdout.close() # this doesn't really work, it will return None stderr.close() stdin.close() except Exception, exc: import traceback traceback.print_exc() retcode = 1 out_str = "Internal error: %s"%exc
def login(opener, user, password): ret_val = 'error|Unknown error accessing the bug system' try: # Encodes the needed information for the login params = urllib.urlencode({ 'email': user, 'password': password, 'dest': '' }) # Performs the actual login response = opener.open('https://dev.mysql.com/login/', params) # Reads the server response data = response.read() # Creates the parser to confirm successful login parser = HTMLGetData() parser.quit_on_done = True parser.add_path_node("html") parser.add_path_node("body") parser.add_path_conditioned_node("div", [('id', 'container')]) parser.add_path_conditioned_node("div", [('class', 'page_container')]) parser.add_path_conditioned_node("div", [('id', 'page')]) parser.add_path_conditioned_node("h1", [('class', 'page_header'), ('id', 'mainContent')]) # Performs the parsing parser.feed(data) # Initializing error, to be cleaned in case of success # A simple error is returned on any login attemp failure, just as the web page does ret_val = 'error|Error accessing the bug system, please verify your email and password' # Sets the return value if len(parser.result) == 1: if parser.result[0] == 'Login Successful': ret_val = '' except urllib2.URLError, e: ret_val = 'error|Error accessing the bug system, check your network settings' log_error('WB Bug Report', 'Error accessing the bug system: %s\n' % str(e))
def submit_bug(opener, data, log_file): # The default error in case of unknown failure ret_val = 'error|Unknown error while submitting the bug, please proceed through http://bugs.mysql.com/report.php' try: # Encodes the received information for the bug submition params = urllib.urlencode(data) # Submits the bug response = opener.open('http://bugs.mysql.com/report.php', params) # Reads the server response data = response.read() parser = MySQLGetRequestResult() parser.feed(data) # If we have a result... if parser.result_type != '': # Starts creating the result of the bug submition ret_val = parser.result_type + '|' # Creates the bug submition result new_line = "\n" result_data = new_line.join(parser.result) # When a log file is to be appended, tries uploading it # On error, add an entry into the bug submition result data if parser.result_type == 'success' and log_file != '': file_attach_error = attach_file(opener, parser.result[1][1:], log_file) if file_attach_error != '': ret_val = ret_val + 'no_log_submitted: ' + file_attach_error + new_line ret_val = ret_val + result_data except urllib2.URLError, e: # The default error in case of unknown failure ret_val = 'error|An error occurred while submitting the report, please proceed through http://bugs.mysql.com/report.php' log_error( "WB Bug Report", 'An error occurred while submitting the request: %s\n' % str(e))
def sym_expr(self, node): otext = text = self.default_handler(node) try: if len(text) > self.opt_expr_length_per_line: children = node_children(node) text = node_value(children[0]) i = 1 while i < len(children)-1: oper = node_value(children[i]) value = node_value(children[i+1]) text += "\n" + indent(oper) + " " + value i += 2 if i < len(children): text += " "+node_value(children[-1]) except: grt.log_error("SQLReformatter", "Error formatting expression: %s" % otext) import traceback traceback.print_exc() return text
def is_dir_writable(self, path): ret = False try: dirlist = os.listdir(path) filename = '~wba_write_test' cnt = 1 while True: if filename + str(cnt) not in dirlist: break cnt += 1 filename = os.path.join(path, filename + str(cnt)) fp = open(filename, 'w') fp.close() os.remove(filename) ret = True except (IOError, OSError), e: ret = False log_error(_this_file, '%s: code="%s"\n' % (self.__class__.__name__, str(e.errno)) )
def poll(self): output = StringIO.StringIO() if self.ctrl_be.server_helper.execute_command("/usr/bin/uptime", output_handler=output.write) == 0: data = output.getvalue().strip(" \r\t\n,:.") load_value = data.split()[-3] # in some systems, the format is x.xx x.xx x.xx and in others, it's x.xx, x.xx, x.xx load_value = load_value.rstrip(",") try: result = float(load_value.replace(',','.')) except (ValueError, TypeError): log_error(_this_file, "Shell source %s returned wrong value. Expected int or float but got %s\n" % (self.name, load_value)) result = 0 if self.widget is not None: self.widget.set_value(self.calc_cb(result) if self.calc_cb else result) if self.label_cb is not None: self.ctrl_be.uitask(self.label.set_text, self.label_cb(result)) else: log_debug(_this_file, "CPU stat command returned error: %s\n" % output.getvalue())
def submit_bug(opener, data, log_file): # The default error in case of unknown failure ret_val = 'error|Unknown error while submitting the bug, please proceed through http://bugs.mysql.com/report.php' try: # Encodes the received information for the bug submition params = urllib.urlencode(data) # Submits the bug response = opener.open('http://bugs.mysql.com/report.php', params) # Reads the server response data = response.read() parser = MySQLGetRequestResult() parser.feed(data) # If we have a result... if parser.result_type != '': # Starts creating the result of the bug submition ret_val = parser.result_type + '|' # Creates the bug submition result new_line = "\n" result_data = new_line.join(parser.result) # When a log file is to be appended, tries uploading it # On error, add an entry into the bug submition result data if parser.result_type == 'success' and log_file != '': file_attach_error = attach_file(opener, parser.result[1][1:], log_file) if file_attach_error != '': ret_val = ret_val + 'no_log_submitted: ' + file_attach_error + new_line ret_val = ret_val + result_data except urllib2.URLError, e: # The default error in case of unknown failure ret_val = 'error|An error occurred while submitting the report, please proceed through http://bugs.mysql.com/report.php' log_error("WB Bug Report", 'An error occurred while submitting the request: %s\n' % str(e))
def login(opener, user, password): ret_val = 'error|Unknown error accessing the bug system' try: # Encodes the needed information for the login params = urllib.urlencode({'email': user, 'password': password, 'dest': ''}) # Performs the actual login response = opener.open('https://dev.mysql.com/login/', params) # Reads the server response data = response.read() # Creates the parser to confirm successful login parser = HTMLGetData() parser.quit_on_done = True parser.add_path_node("html") parser.add_path_node("body") parser.add_path_conditioned_node("div",[('id','container')]) parser.add_path_conditioned_node("div",[('class','page_container')]) parser.add_path_conditioned_node("div",[('id','page')]) parser.add_path_conditioned_node("h1",[('class','page_header'),('id','mainContent')]) # Performs the parsing parser.feed(data) # Initializing error, to be cleaned in case of success # A simple error is returned on any login attemp failure, just as the web page does ret_val = 'error|Error accessing the bug system, please verify your email and password' # Sets the return value if len(parser.result) == 1: if parser.result[0] == 'Login Successful': ret_val = '' except urllib2.URLError, e: ret_val = 'error|Error accessing the bug system, check your network settings' log_error('WB Bug Report', 'Error accessing the bug system: %s\n' % str(e))
def test_connection(): #Initializes the return value as no error ret_val = 'error|Unable to connecto through the Bug System, please proceed through http://bugs.mysql.com/report.php' try: # Creates the object to open and manage the cookies cookieJar = cookielib.CookieJar() urlOpener = urllib2.build_opener( urllib2.HTTPCookieProcessor(cookieJar)) # Attempts to login response = urlOpener.open('http://bugs.mysql.com/index.php') # Reads the server response data = response.read() # Creates the parser to confirm successful login parser = HTMLGetData() parser.quit_on_done = True parser.add_path_node("html") parser.add_path_node("body") parser.add_path_conditioned_node("div", [('id', 'nav')]) parser.add_path_node("ul") parser.add_path_conditioned_node("li", [('id', 'current')]) parser.feed(data) if len(parser.result) == 1: if parser.result[0] == 'Bugs Home': ret_val = 'success|' except BaseException, e: log_error( "WB Bug Report", 'An error occurred while testing conectivity, %s: %s\n' % (e.__class__.__name__, str(e)))
def save_file_content_and_backup(self, filename, content, backup_extension, as_admin = False, admin_password = None): log_debug(_this_file, '%s: Saving file "%s" with backup (sudo="%s")\n' % (self.__class__.__name__, filename, as_admin) ) if as_admin: # The delete argument is only available starting from py 2.6 (NamedTemporaryFile deletes files on close in all cases, unless you pass delete=False) tmp = tempfile.NamedTemporaryFile(dir=self.tempdir) tmp_name = tmp.name try: log_debug(_this_file, '%s: Writing file contents to tmp file "%s"\n' % (self.__class__.__name__, tmp_name) ) tmp.write(content) tmp.flush() if backup_extension and os.path.exists(filename): log_debug(_this_file, '%s: Creating backup of "%s" to "%s"\n' % (self.__class__.__name__, filename, filename+backup_extension)) self._copy_file(source = filename, dest = filename + backup_extension, as_admin = as_admin, admin_password = admin_password) log_debug(_this_file, '%s: Copying over tmp file to final filename using sudo: %s -> %s\n' % (self.__class__.__name__, tmp_name, filename) ) self._copy_file(source = tmp_name, dest = filename, as_admin = as_admin, admin_password = admin_password) log_debug(_this_file, '%s: Copying file done\n' % self.__class__.__name__) tmp.close() except Exception, exc: log_error(_this_file, '%s: Exception caught: %s\n' % (self.__class__.__name__, str(exc)) ) if tmp: tmp.close() raise
def wrapper(self, error): import grt grt.log_error(self.__class__.__name__, str(error) + '\n') return method(self, error)
if self.ssh: if as_admin: command = wrap_for_sudo('cat %s' % filename, self.process_ops.sudo_prefix) out, ret = self.ssh.exec_cmd(command, as_admin, admin_password) if ret != 0: raise Exception('Error executing "%s" via SSH in remote server' % command) return out else: try: return self.ssh.get_contents(filename) except IOError, exc: if exc.errno == errno.EACCES: raise PermissionDeniedError("Permission denied attempting to read file %s" % filename) else: print "Attempt to read remote file with no ssh session" log_error(_this_file, '%s: Attempt to read remote file with no ssh session\n' % self.__class__.__name__) import traceback traceback.print_stack() raise Exception("Cannot read remote file without an SSH session") return None #----------------------------------------------------------------------------- def save_file_content(self, filename, content, as_admin = False, admin_password = None): self.save_file_content_and_backup(filename, content, None, as_admin, admin_password) #----------------------------------------------------------------------------- def save_file_content_and_backup(self, path, content, backup_extension, as_admin = False, admin_password = None): # Check if dir, where config file will be stored is writable dirname, filename = splitpath(path) if not as_admin and not self.is_dir_writable(dirname.strip(" \r\t\n")):
def log_error(msg): tb = traceback.extract_stack(limit=2) grt.log_error("%s:%s:%s"%(os.path.basename(tb[-2][0]),tb[-2][2],tb[-2][1]), msg)
if duplicate_connection_count > 0: message.append(' and ') message.append('%i duplicated instances' % duplicated_instance_count) message.append(', which were not restored.') mforms.Utilities.show_warning('Restore Connections', ''.join(message), 'OK', '', '') except zipfile.BadZipfile, error: mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file or the file is corrupted: %s\n' % error) except IOError, error: mforms.Utilities.show_error('Restore Connections Error', 'Cannot read from file. Please check this file ' 'permissions and try again.', 'OK', '', '') grt.log_error('restoreConnections', '%s\n' % str(error)) return 0 @ModuleInfo.export(grt.STRING, grt.classes.db_mgmt_Connection) def connectionStringFromConnection(conn): #<user>[:<password>]@<host>[:<port>][:<socket>] connstr = "" if conn.driver.name == "MysqlNative": connstr = "%s@%s:%s" % (conn.parameterValues["userName"], conn.parameterValues["hostName"], conn.parameterValues["port"]) elif conn.driver.name == "MysqlNativeSocket": connstr = "%s@%s::%s" % (conn.parameterValues["userName"], conn.parameterValues["hostName"], conn.parameterValues["socket"]) elif conn.driver.name == "MysqlNativeSSH": #XXX this is incomplete, need some way to encode the ssh params connstr = "%s@%s::%s" % (conn.parameterValues["userName"], conn.parameterValues["hostName"], conn.parameterValues["port"]) return connstr
def restoreConnections(): def generate_unique_name(name, name_set): new_name = name idx = 1 while True: if not new_name in name_set: return new_name new_name = name + ' (%d)' % idx idx += 1 file_chooser = mforms.newFileChooser(mforms.Form.main_form(), mforms.OpenFile) file_chooser.set_title('Select a Connections Backup File') file_chooser.set_extensions('ZIP Files (*.zip)|*.zip', 'import') if file_chooser.run_modal(): backup_path = file_chooser.get_path() try: backup_file = zipfile.ZipFile(backup_path, 'r') try: instances_file = tempfile.NamedTemporaryFile(delete=False) instances_file.write(backup_file.read('server_instances.xml')) instances_file.close() connections_file = tempfile.NamedTemporaryFile(delete=False) connections_file.write(backup_file.read('connections.xml')) connections_file.close() except KeyError, error: mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message, 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message) return connections = grt.unserialize(connections_file.name) if not isinstance(connections, grt.List): mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'The selected archive does not have a valid connection backup file.\n') return inserted_connections = {} existent_connection_names = set(conn.name for conn in grt.root.wb.rdbmsMgmt.storedConns) existent_connection_ids = set(conn.__id__ for conn in grt.root.wb.rdbmsMgmt.storedConns) duplicate_connection_count = 0 for candidate_connection in connections: if candidate_connection.__id__ in existent_connection_ids: duplicate_connection_count = duplicate_connection_count + 1 continue candidate_connection.name = generate_unique_name(candidate_connection.name, existent_connection_names) existent_connection_names.add(candidate_connection.name) candidate_connection.owner = grt.root.wb.rdbmsMgmt inserted_connections[candidate_connection.__id__] = candidate_connection grt.root.wb.rdbmsMgmt.storedConns.append(candidate_connection) instances = grt.unserialize(instances_file.name) if not isinstance(instances, grt.List): mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'Workbench restored %i valid connections but server configuration data could not be found or is not valid.\n' % len(connections)) return existent_instance_names = set(instance.name for instance in grt.root.wb.rdbmsMgmt.storedInstances) previous_instances_conns = set() duplicated_instance_count = 0 for candidate_instance in instances: if candidate_instance.connection.__id__ in previous_instances_conns: duplicated_instance_count = duplicated_instance_count + 1 continue # Skip instances whose connections are associated to previously processed instances previous_instances_conns.add(candidate_instance.connection.__id__) candidate_instance.name = generate_unique_name(candidate_instance.name, existent_instance_names) existent_instance_names.add(candidate_instance.name) new_conn = inserted_connections.get(candidate_instance.connection.__id__, None) candidate_instance = candidate_instance.shallow_copy() candidate_instance.connection = new_conn grt.root.wb.rdbmsMgmt.storedInstances.append(candidate_instance) grt.modules.Workbench.refreshHomeConnections() grt.modules.Workbench.saveConnections() grt.modules.Workbench.saveInstances() if duplicate_connection_count > 0 or duplicated_instance_count > 0: message = [] message.append('Workbench detected ') if duplicate_connection_count > 0: message.append('%i duplicated connections' % duplicate_connection_count) if duplicated_instance_count > 0: if duplicate_connection_count > 0: message.append(' and ') message.append('%i duplicated instances' % duplicated_instance_count) message.append(', which were not restored.') mforms.Utilities.show_warning('Restore Connections', ''.join(message), 'OK', '', '') except zipfile.BadZipfile, error: mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file or the file is corrupted: %s\n' % error)
def restoreConnections(): def generate_unique_name(name, name_set): new_name = name idx = 1 while True: if not new_name in name_set: return new_name new_name = name + ' (%d)' % idx idx += 1 file_chooser = mforms.newFileChooser(mforms.Form.main_form(), mforms.OpenFile) file_chooser.set_title('Select a Connections Backup File') file_chooser.set_extensions('ZIP Files (*.zip)|*.zip', 'import') if file_chooser.run_modal(): backup_path = file_chooser.get_path() try: backup_file = zipfile.ZipFile(backup_path, 'r') try: instances_file = tempfile.NamedTemporaryFile(delete=False) instances_file.write(backup_file.read('server_instances.xml')) instances_file.close() connections_file = tempfile.NamedTemporaryFile(delete=False) connections_file.write(backup_file.read('connections.xml')) connections_file.close() except KeyError, error: mforms.Utilities.show_error( 'Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message, 'OK', '', '') grt.log_error( 'restoreConnections', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message) return instances = grt.unserialize(instances_file.name) connections = grt.unserialize(connections_file.name) if not isinstance(instances, grt.List) or not isinstance( connections, grt.List): mforms.Utilities.show_error( 'Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error( 'restoreConnections', 'The selected file is not a valid backup file or the file is corrupted.' ) return inserted_connections = {} existent_connection_names = set( conn.name for conn in grt.root.wb.rdbmsMgmt.storedConns) for candidate_connection in connections: new_name = generate_unique_name(candidate_connection.name, existent_connection_names) if new_name != candidate_connection.name: candidate_connection.name = new_name existent_connection_names.add(candidate_connection.name) old_id = candidate_connection.__id__ candidate_connection = candidate_connection.shallow_copy() inserted_connections[old_id] = candidate_connection grt.root.wb.rdbmsMgmt.storedConns.append(candidate_connection) existent_instance_names = set( instance.name for instance in grt.root.wb.rdbmsMgmt.storedInstances) previous_instances_conns = set() for candidate_instance in instances: if candidate_instance.connection.__id__ in previous_instances_conns: continue # Skip instances whose connections are associated to previously processed instances previous_instances_conns.add( candidate_instance.connection.__id__) new_name = generate_unique_name(candidate_instance.name, existent_instance_names) if new_name != candidate_instance.name: candidate_instance.name = new_name existent_instance_names.add(candidate_instance.name) new_conn = inserted_connections.get( candidate_instance.connection.__id__, None) candidate_instance = candidate_instance.shallow_copy() candidate_instance.connection = new_conn grt.root.wb.rdbmsMgmt.storedInstances.append( candidate_instance) grt.modules.Workbench.refreshHomeConnections() grt.modules.Workbench.saveConnections() grt.modules.Workbench.saveInstances() except zipfile.BadZipfile, error: mforms.Utilities.show_error( 'Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error( 'restoreConnections', 'The selected file is not a valid backup file or the file is corrupted: %s' % error)
def go_next(self): i = self._worker_count.get_string_value() try: count = int(i) if count < 1: raise Exception("Bad value") except Exception: mforms.Utilities.show_error( "Invalid Value", "Worker thread count must be a number larger than 0.", "OK", "", "") return self.main.plan.state.dataBulkTransferParams["workerCount"] = count #if self.dump_to_file.get_active(): # self.main.plan.state.dataBulkTransferParams["GenerateDumpScript"] = self.dump_to_file_entry.get_string_value() #else: # if "GenerateDumpScript" in self.main.plan.state.dataBulkTransferParams: # del self.main.plan.state.dataBulkTransferParams["GenerateDumpScript"] if self.copy_script_radiobutton.get_active(): self.main.plan.state.dataBulkTransferParams[ "GenerateCopyScript"] = self.copy_script_entry.get_string_value( ) else: if self.main.plan.state.dataBulkTransferParams.has_key( "GenerateCopyScript"): del self.main.plan.state.dataBulkTransferParams[ "GenerateCopyScript"] if self.bulk_copy_script_radiobutton.get_active(): self.main.plan.state.dataBulkTransferParams[ "GenerateBulkCopyScript"] = self.bulk_copy_script_entry.get_string_value( ) else: if self.main.plan.state.dataBulkTransferParams.has_key( "GenerateBulkCopyScript"): del self.main.plan.state.dataBulkTransferParams[ "GenerateBulkCopyScript"] self.main.plan.state.dataBulkTransferParams[ "LiveDataCopy"] = 1 if self._copy_db.get_active() else 0 self.main.plan.state.dataBulkTransferParams[ "DebugTableCopy"] = 1 if self._debug_copy.get_active() else 0 self.main.plan.state.dataBulkTransferParams[ "DriverSendsDataAsUTF8"] = 1 if self._driver_sends_utf8.get_active( ) else 0 self.main.plan.state.dataBulkTransferParams[ "TruncateTargetTables"] = 1 if self._truncate_db.get_active( ) else 0 for key in self.main.plan.state.dataBulkTransferParams.keys(): if key.endswith(":rangeKey"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rangeStart"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rangeEnd"): del self.main.plan.state.dataBulkTransferParams[key] if key.endswith(":rowCount"): del self.main.plan.state.dataBulkTransferParams[key] tables_to_copy = [] for row in range(self._tree.count()): n = self._tree.node_at_row(row) table = self._tables_by_id[n.get_tag()] count = n.get_string(1) if not count: tables_to_copy.append(table) else: try: count = int(count) if count > 0: # tables_to_copy.append(table) self.main.plan.state.dataBulkTransferParams[ "%s:rowCount" % table.__id__] = count except: grt.log_error( "Invalid value in Migration DataCopy tree: %s" % count) self.main.plan.state.dataBulkTransferParams[ "tableList"] = tables_to_copy if self._copy_db.get_active( ) or self.copy_script_radiobutton.get_active( ) or self.bulk_copy_script_radiobutton.get_active(): return WizardPage.go_next(self) else: self.main.go_next_page(2) return
parser = MySQLGetRequestResult() parser.feed(data) if parser.result_type == 'success': ret_val = '' else: ret_val = 'Error attaching the log file to the bug report' except urllib2.URLError, e: ret_val = 'Error attaching the log file to the bug report' log_error('WB Bug Report', 'Error attaching the log file: %s \n' % str(e)) except IOError, e: ret_val = 'Error getting the log file' log_error('WB Bug Report', 'Error getting the log file: %s\n' % str(e)) return ret_val def encode_multipart_formdata(fields, files): LIMIT = '----------wb_file_limit' CRLF = '\r\n' L = [] for (key, value) in fields: L.append('--' + LIMIT) L.append('Content-Disposition: form-data; name="%s"' % key) L.append('') L.append(value) for (key, filename, value) in files: L.append('--' + LIMIT)
self.send("OK") elif cmd == "WAIT": # wait for the SSH connection to be established error = self.wait_connection(args) if not error: self.send("OK") else: self.send("ERROR "+error) elif cmd == "MESSAGE": msg = self.get_message(args) if msg: self.send(msg) else: self.send("NONE") else: log_error(_this_file, "Invalid request %s\n" % request) self.send("ERROR", "Invalid request") """ if "--single" in sys.argv: target = sys.argv[2] if "-pw" in sys.argv: password = sys.argv[sys.argv.index("-pw")+1] else: password = None if "-i" in sys.argv: keyfile = sys.argv[sys.argv.index("-i")+1] else: keyfile = None server = sys.argv[-1]
def show_table(self, schema, table): self._schema = schema self._table = table self._engine = None self.index_list.clear() self.index_info = [] self.column_list.clear() column_icon = mforms.App.get().get_resource_path("db.Column.16x16.png") index_icon = mforms.App.get().get_resource_path("db.Index.16x16.png") if table: try: rset = self.editor.executeManagementQuery("SHOW INDEX FROM `%s`.`%s`" % (schema, table), 0) except grt.DBError, e: log_error("Cannot execute SHOW INDEX FROM `%s`.`%s`: %s" % (schema, table, e)) rset = None index_rs_columns = range(13) column_rs_columns = [3, 4, 5, 7] for i in column_rs_columns: index_rs_columns.remove(i) column_to_index = {} if rset: ok = rset.goToFirstRow() curname = None columns = [] while ok: name = rset.stringFieldValue(2) if name != curname: if columns: node = self.index_list.add_node() node.set_icon_path(0, index_icon) node.set_string(0, curname) node.set_string(1, itype) node.set_string(2, "YES" if non_unique != "1" else "NO") node.set_string(3, ", ".join([c[1] for c in columns])) columns = [] self.index_info.append(([rset.stringFieldValue(i) for i in index_rs_columns], columns)) curname = name itype = rset.stringFieldValue(10) non_unique = rset.stringFieldValue(1) cname = rset.stringFieldValue(4) if cname not in column_to_index: column_to_index[cname] = [name] else: column_to_index[cname].append(name) columns.append([rset.stringFieldValue(i) for i in column_rs_columns]) ok = rset.nextRow() if columns: node = self.index_list.add_node() node.set_icon_path(0, index_icon) node.set_string(0, curname) node.set_string(1, itype) node.set_string(2, "YES" if non_unique != "1" else "NO") node.set_string(3, ", ".join([c[1] for c in columns])) try: rset = self.editor.executeManagementQuery("SHOW COLUMNS FROM `%s`.`%s`" % (schema, table), 0) except grt.DBError, e: log_error("Cannot execute SHOW COLUMNS FROM `%s`.`%s`: %s" % (schema, table, e)) rset = None
def process_until_done(self): total_row_count = 0 for table in self._working_set.values(): total_row_count += table["row_count"] progress_row_count = {} self.interrupted = False active_job_names = set() self._resume = False done = False while True: if done: # flush pending messages try: _update_resume_status = getattr(self._owner, "_update_resume_status", None) if callable(_update_resume_status): _update_resume_status(self._resume) msgtype, message = self._result_queue.get_nowait() except Queue.Empty: break else: msgtype, message = self._result_queue.get() if msgtype == "BEGIN": target_table = message.split(":")[0] active_job_names.add(target_table) self._owner.send_info(message) elif msgtype == "END": target_table = message.split(":")[0] if target_table in active_job_names: active_job_names.remove(target_table) self._owner.send_info(message) progress_row_count[target_table] = (True, progress_row_count.get(target_table, (False, 0))[1]) elif msgtype == "ERROR": target_table = message.split(":")[0] if target_table in active_job_names: active_job_names.remove(target_table) self._owner.send_error(message) self._owner.add_log_entry(2, target_table, message) grt.log_error("Migration", "%s\n"%message) self._resume = True elif msgtype == "PROGRESS": target_table, current, total = message.split(":") progress_row_count[target_table] = (False, int(current)) self._owner.send_progress(float(sum([x[1] for x in progress_row_count.values()])) / total_row_count, "Copying %s" % ", ".join(active_job_names)) elif msgtype == "LOG": self._owner.send_info(message) elif msgtype == "DONE": done = True if message: self._resume = True self._owner.send_error("Copy helper exited with an error: %s" % message) else: self._owner.send_info("Copy helper has finished") elif msgtype == "INTERRUPTED": done = True self.interrupted = True self._resume = True self._owner.send_info("Copy helper was aborted by user") else: self._owner.send_info(msgtype + ": " + message) return progress_row_count
def restoreConnections(): def generate_unique_name(name, name_set): new_name = name idx = 1 while True: if not new_name in name_set: return new_name new_name = name + ' (%d)' % idx idx += 1 file_chooser = mforms.newFileChooser(mforms.Form.main_form(), mforms.OpenFile) file_chooser.set_title('Select a Connections Backup File') file_chooser.set_extensions('ZIP Files (*.zip)|*.zip', 'import') if file_chooser.run_modal(): backup_path = file_chooser.get_path() try: backup_file = zipfile.ZipFile(backup_path, 'r') try: instances_file = tempfile.NamedTemporaryFile(delete=False) instances_file.write(backup_file.read('server_instances.xml')) instances_file.close() connections_file = tempfile.NamedTemporaryFile(delete=False) connections_file.write(backup_file.read('connections.xml')) connections_file.close() except KeyError, error: mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message, 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file ' 'or the file is corrupted: %s.' % error.message) return instances = grt.unserialize(instances_file.name) connections = grt.unserialize(connections_file.name) if not isinstance(instances, grt.List) or not isinstance(connections, grt.List): mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file or the file is corrupted.') return inserted_connections = {} existent_connection_names = set(conn.name for conn in grt.root.wb.rdbmsMgmt.storedConns) for candidate_connection in connections: new_name = generate_unique_name(candidate_connection.name, existent_connection_names) if new_name != candidate_connection.name: candidate_connection.name = new_name existent_connection_names.add(candidate_connection.name) old_id = candidate_connection.__id__ candidate_connection = candidate_connection.shallow_copy() inserted_connections[old_id] = candidate_connection grt.root.wb.rdbmsMgmt.storedConns.append(candidate_connection) existent_instance_names = set(instance.name for instance in grt.root.wb.rdbmsMgmt.storedInstances) previous_instances_conns = set() for candidate_instance in instances: if candidate_instance.connection.__id__ in previous_instances_conns: continue # Skip instances whose connections are associated to previously processed instances previous_instances_conns.add(candidate_instance.connection.__id__) new_name = generate_unique_name(candidate_instance.name, existent_instance_names) if new_name != candidate_instance.name: candidate_instance.name = new_name existent_instance_names.add(candidate_instance.name) new_conn = inserted_connections.get(candidate_instance.connection.__id__, None) candidate_instance = candidate_instance.shallow_copy() candidate_instance.connection = new_conn grt.root.wb.rdbmsMgmt.storedInstances.append(candidate_instance) grt.modules.Workbench.refreshHomeConnections() grt.modules.Workbench.saveConnections() grt.modules.Workbench.saveInstances() except zipfile.BadZipfile, error: mforms.Utilities.show_error('Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error('restoreConnections', 'The selected file is not a valid backup file or the file is corrupted: %s' % error)
def process_until_done(self): total_row_count = 0 for table in self._working_set.values(): total_row_count += table["row_count"] progress_row_count = {} self.interrupted = False active_job_names = set() self._resume = False done = False while True: if done: # flush pending messages try: _update_resume_status = getattr(self._owner, "_update_resume_status", None) if callable(_update_resume_status): _update_resume_status(self._resume) msgtype, message = self._result_queue.get_nowait() except Queue.Empty: break else: msgtype, message = self._result_queue.get() if msgtype == "BEGIN": target_table = message.split(":")[0] active_job_names.add(target_table) self._owner.send_info(message) elif msgtype == "END": target_table = message.split(":")[0] if target_table in active_job_names: active_job_names.remove(target_table) self._owner.send_info(message) progress_row_count[target_table] = (True, progress_row_count.get( target_table, (False, 0))[1]) elif msgtype == "ERROR": target_table = message.split(":")[0] if target_table in active_job_names: active_job_names.remove(target_table) self._owner.send_error(message) self._owner.add_log_entry(2, target_table, message) grt.log_error("Migration", "%s\n" % message) self._resume = True elif msgtype == "PROGRESS": target_table, current, total = message.split(":") progress_row_count[target_table] = (False, int(current)) self._owner.send_progress( float(sum([x[1] for x in progress_row_count.values()])) / total_row_count, "Copying %s" % ", ".join(active_job_names)) elif msgtype == "LOG": self._owner.send_info(message) elif msgtype == "DONE": done = True if message: self._resume = True self._owner.send_error( "Copy helper exited with an error: %s" % message) else: self._owner.send_info("Copy helper has finished") elif msgtype == "INTERRUPTED": done = True self.interrupted = True self._resume = True self._owner.send_info("Copy helper was aborted by user") else: self._owner.send_info(msgtype + ": " + message) return progress_row_count
def notify_exception_error(self, msg_type, msg_txt, msg_obj = None): self.notify(msg_type, msg_txt) log_error(_this_file, traceback.format_exc())
grt.modules.Workbench.saveInstances() except zipfile.BadZipfile, error: mforms.Utilities.show_error( 'Restore Connections Error', 'The selected file is not a valid backup file ' 'or the file is corrupted.', 'OK', '', '') grt.log_error( 'restoreConnections', 'The selected file is not a valid backup file or the file is corrupted: %s' % error) except IOError, error: mforms.Utilities.show_error( 'Restore Connections Error', 'Cannot read from file. Please check this file ' 'permissions and try again.', 'OK', '', '') grt.log_error('restoreConnections', str(error)) return 0 @ModuleInfo.plugin("wb.tools.copyConnectionString", caption="Copy Connection String to Clipboard", input=[wbinputs.selectedConnection()], pluginMenu="Home/Connections") @ModuleInfo.export(grt.INT, grt.classes.db_mgmt_Connection) def copyConnectionString(conn): #<user>[:<password>]@<host>[:<port>][:<socket>] connstr = "" if conn.driver.name == "MysqlNative": connstr = "%s:@%s:%s:" % (conn.parameterValues["userName"], conn.parameterValues["hostName"], conn.parameterValues["port"])