def do_run(self): global tunnel_serial tunnel_serial += 1 mforms.Utilities.set_thread_name("SSHTunnel%i" % tunnel_serial) sys.stdout.write('Thread started\n' ) # sys.stdout.write is thread safe while print isn't log_debug2("SSH Tunel %i thread started\n" % tunnel_serial) # Create a socket and pick a random port number for it: self._listen_sock = socket.socket() while True: local_port = random.randint(1024, 65535) try: self._listen_sock.bind(('127.0.0.1', local_port)) self._listen_sock.listen(2) with self.lock: self.local_port = local_port break except socket.error, exc: sys.stdout.write('Socket error: %s for port %d\n' % (exc, local_port)) err, msg = exc.args if err == 22: continue # retry self.notify_exception_error( 'ERROR', "Error initializing server end of tunnel", sys.exc_info()) raise exc finally:
def go_next(self): log_debug2("Setting up in path %s\n" % self.main.results_path) task = SSLWizard_GenerationTask(self.main, self.main.results_path) task.run() if task.result == False: return self.ca_cert = task.ca_cert self.server_cert = task.server_cert self.server_key = task.server_key self.client_cert = task.client_cert self.client_key = task.client_key f = open(os.path.join(self.main.results_path, "my.cnf.sample"), "w+") f.write("""# Copy this to your my.cnf file. Please change <directory> to the corresponding directory where the files were copied. [client] ssl-ca=%(ca_cert)s ssl-cert=%(client_cert)s ssl-key=%(client_key)s [mysqld] ssl-ca=%(ca_cert)s ssl-cert=%(server_cert)s ssl-key=%(server_key)s """ % {"ca_cert" : os.path.join("<directory>", os.path.basename(self.ca_cert)), "server_cert" : os.path.join("<directory>", os.path.basename(self.server_cert)), "server_key" : os.path.join("<directory>", os.path.basename(self.server_key)), "client_cert" : os.path.join("<directory>", os.path.basename(self.client_cert)), "client_key" : os.path.join("<directory>", os.path.basename(self.client_key)) }) f.close() log_debug2("SSL Wizard generation task result: %s\n" % str(task.result)) self.main.go_next_page()
def do_run(self): global tunnel_serial tunnel_serial += 1 mforms.Utilities.set_thread_name("SSHTunnel%i"%tunnel_serial) sys.stdout.write('Thread started\n') # sys.stdout.write is thread safe while print isn't log_debug2("SSH Tunel %i thread started\n" % tunnel_serial) # Create a socket and pick a random port number for it: self._listen_sock = socket.socket() while True: local_port = random.randint(1024, 65535) try: self._listen_sock.bind(('127.0.0.1', local_port)) self._listen_sock.listen(2) with self.lock: self.local_port = local_port break except socket.error, exc: sys.stdout.write('Socket error: %s for port %d\n' % (exc, local_port) ) err, msg = exc.args if err == 22: continue # retry self.notify_exception_error('ERROR',"Error initializing server end of tunnel", sys.exc_info()) raise exc finally:
def start_export(self): if self._user_query: query = self._user_query else: query = self.get_query() if self._local: rset = self._editor.executeManagementQuery(query, 1) if rset: if self._user_query: #We need to get columns info self.read_user_query_columns(rset) self._max_rows = rset.rowCount self.update_progress(0.0, "Begin Export") with open(self._filepath, 'wb') as csvfile: output = csv.writer(csvfile, delimiter = self.options['filedseparator']['value'], lineterminator = self.options['lineseparator']['value'], quotechar = self.options['encolsestring']['value'], quoting = csv.QUOTE_NONNUMERIC if self.options['encolsestring']['value'] else csv.QUOTE_NONE) output.writerow([value['name'].encode('utf-8') for value in self._columns]) ok = rset.goToFirstRow() # Because there's no realiable way to use offset only, we'll do this here. offset = 0 if self._offset and not self._limit: offset = self._offset i = 0 while ok: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self.update_progress(round(self._current_row / self._max_rows, 2), "Data export stopped by user request") return False i += 1 if offset > 0 and i <= offset: ok = rset.nextRow() continue self.item_count = self.item_count + 1 self._current_row = float(rset.currentRow + 1) self.update_progress(round(self._current_row / self._max_rows, 2), "Data export") row = [] for col in self._columns: if col['is_number'] or col['is_bignumber']: row.append(rset.intFieldValueByName(col['name'])) elif col['is_float']: row.append(rset.floatFieldValueByName(col['name'])) elif col['is_geometry']: row.append(rset.geoStringFieldValueByName(col['name'])) else: row.append(rset.stringFieldValueByName(col['name'])) output.writerow(row) csvfile.flush() ok = rset.nextRow() self.update_progress(1.0, "Export finished") else: self._editor.executeManagementCommand(query, 1) return True
def start_export(self): if self._user_query: query = self._user_query else: query = self.get_query() rset = self._editor.executeManagementQuery(query, 1) if rset: if self._user_query: #We need to get columns info self.read_user_query_columns(rset) with open(self._filepath, 'wb') as jsonfile: jsonfile.write('['.encode('utf-8')) ok = rset.goToFirstRow() self._max_rows = rset.rowCount # Because there's no realiable way to use offset only, we'll do this here. offset = 0 if self._offset and not self._limit: offset = self._offset i = 0 while ok: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") return False i += 1 if offset > 0 and i <= offset: ok = rset.nextRow() continue self.item_count = self.item_count + 1 self._current_row = rset.currentRow + 1 row = [] for col in self._columns: if col['is_number'] or col['is_bignumber']: row.append("\"%s\":%s" % (col['name'], json.dumps(rset.intFieldValueByName(col['name'])))) elif col['is_float']: row.append("\"%s\":%s" % (col['name'], json.dumps(rset.floatFieldValueByName(col['name'])))) elif col['is_geometry']: row.append("\"%s\":%s" % (col['name'], rset.geoJsonFieldValueByName(col['name']))) else: if col['type'] == "json": row.append("\"%s\":%s" % (col['name'], to_unicode(rset.stringFieldValueByName(col['name'])))) else: row.append("\"%s\":%s" % (col['name'], json.dumps(to_unicode(rset.stringFieldValueByName(col['name']))))) ok = rset.nextRow() line = "{%s}%s" % (', '.join(row), ",\n " if ok else "") jsonfile.write(line.encode('utf-8')) jsonfile.flush() jsonfile.write(']'.encode('utf-8')) return True
def start_export(self): if self._user_query: query = self._user_query else: query = self.get_query() rset = self._editor.executeManagementQuery(query, 1) if rset: if self._user_query: #We need to get columns info self.read_user_query_columns(rset) with open(self._filepath, 'wb') as jsonfile: jsonfile.write('[') ok = rset.goToFirstRow() self._max_rows = rset.rowCount # Because there's no realiable way to use offset only, we'll do this here. offset = 0 if self._offset and not self._limit: offset = self._offset i = 0 while ok: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") return False i += 1 if offset > 0 and i <= offset: ok = rset.nextRow() continue self.item_count = self.item_count + 1 self._current_row = rset.currentRow + 1 row = [] for col in self._columns: if col['is_number'] or col['is_bignumber']: row.append("\"%s\":%s" % (col['name'], json.dumps(rset.intFieldValueByName(col['name'])))) elif col['is_float']: row.append("\"%s\":%s" % (col['name'], json.dumps(rset.floatFieldValueByName(col['name'])))) elif col['is_geometry']: row.append("\"%s\":%s" % (col['name'], rset.geoJsonFieldValueByName(col['name']))) else: if col['type'] == "json": row.append("\"%s\":%s" % (col['name'], rset.stringFieldValueByName(col['name']))) else: row.append("\"%s\":%s" % (col['name'], json.dumps(rset.stringFieldValueByName(col['name'])))) ok = rset.nextRow() jsonfile.write("{%s}%s" % (', '.join(row), ",\n " if ok else "")) jsonfile.flush() jsonfile.write(']') return True
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as jsonfile: data = json.load(jsonfile) dest_col_order = list(set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(dest_col_order), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['name']) for i in self._mapping if i['active']]) col_type = dict([(i['name'], i['type']) for i in self._mapping if i['active']]) self._editor.executeManagementCommand(query, 1) try: self._max_rows = len(data) for row in data: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) return False self._current_row = self._current_row + 1 self.item_count = self.item_count + 1 for i, col in enumerate(col_order): if col_order[col] not in row: log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] if col_type[col] == 'double': val = row[col_order[col]].replace(self._decimal_separator, ',') elif col_type[col] == 'datetime': val = datetime.datetime.strptime(row[col_order[col]], self._date_format).strftime("%Y-%m-%d %H:%M:%S") self._editor.executeManagementCommand("""SET @a%d = "%s" """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) except Exception, e: log_error("Row import failed with error: %s" % e) except Exception, e: log_error("Import failed: %s" % e) self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1)
def generate(self, path, config_file): days = 3600 tool = "openssl" ca_key = os.path.join(path, "ca-key.pem") ca_cert = os.path.join(path, "ca-cert.pem") # Check if the tool exists log_debug2("Checking tool availability(%s)\n" % tool) if not self.run_command([tool, "version"]): self.display_error("Checking requirements", "The SSL tool (%s) is not available. Please verify if it's installed and the installation directory is in the PATH environment variable" % tool) return False, None, None, None, None, None # Check if path exists if not os.path.exists(self.path): self.display_error("Checking requirements", "The specified directory does not exist.") return False, None, None, None, None, None log_debug2("Creating CA certificate...\n") f = open(ca_key, "w") if not self.run_command([tool, "genrsa", "2048"], f): self.display_error("Creating CA certificate...", "Could not generate RSA certificate") return False, None, None, None, None, None log_debug2("Creating CA key...\n") req_cmd = [tool, "req", "-new", "-x509", "-nodes", "-days", str(days), "-key", ca_key, "-out", ca_cert, "-config", self.config_file["CA"]] if not self.run_command(req_cmd): self.display_error("Creating CA certificate...", "Could not generate keys") return False, None, None, None, None, None log_debug2("Create server certificate and self-sign\n") result, server_key, server_req, server_cert = self.generate_certificate(tool, path, "server", ca_cert, ca_key, self.config_file["Server"]) if not result: self.display_error("Create server certificate and self-sign", "Could not generate keys") return False, server_key, server_req, server_cert log_debug2("Create client certificates and self-sign\n") result, client_key, client_req, client_cert = self.generate_certificate(tool, path, "client", ca_cert, ca_key, self.config_file["Client"]) if not result: self.display_error("Create client certificates and self-sign", "Could not generate keys") return False, server_key, server_req, server_cert return True, ca_cert, server_cert, server_key, client_cert, client_key
def testInstanceSettingByName(what, connection, server_instance): global test_ssh_connection log_debug("Test %s in %s\n" % (what, connection.name)) profile = ServerProfile(connection, server_instance) if what == "connect_to_host": if test_ssh_connection: test_ssh_connection = None log_info("Instance test: Connecting to %s\n" % profile.ssh_hostname) try: test_ssh_connection = wb_admin_control.WbAdminControl(profile, None, connect_sql=False, test_only = True) test_ssh_connection.init() grt.send_info("connected.") except Exception, exc: log_error("Exception: %s" % exc.message) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR "+str(exc) except:
def testInstanceSettingByName(what, connection, server_instance): global test_ssh_connection log_debug("Test %s in %s\n" % (what, connection.name)) profile = ServerProfile(connection, server_instance) if what == "connect_to_host": if test_ssh_connection: test_ssh_connection = None log_info("Instance test: Connecting to %s\n" % profile.ssh_hostname) try: test_ssh_connection = wb_admin_control.WbAdminControl(profile, None, connect_sql=False, test_only=True) test_ssh_connection.init() grt.send_info("connected.") except Exception, exc: log_error("Exception: %s\n" % exc.message) import traceback log_debug2("Backtrace was: ", traceback.format_stack()) return "ERROR "+str(exc) except:
def go_next(self): log_debug2("Setting up in path %s\n" % self.main.results_path) task = SSLWizard_GenerationTask(self.main, self.main.results_path) task.run() if task.result == False: return self.ca_cert = task.ca_cert self.server_cert = task.server_cert self.server_key = task.server_key self.client_cert = task.client_cert self.client_key = task.client_key f = open(os.path.join(self.main.results_path, "my.cnf.sample"), "w+") f.write("""# Copy this to your my.cnf file. Please change <directory> to the corresponding # directory where the files were copied. [client] ssl-ca=%(ca_cert)s ssl-cert=%(client_cert)s ssl-key=%(client_key)s [mysqld] ssl-ca=%(ca_cert)s ssl-cert=%(server_cert)s ssl-key=%(server_key)s """ % {"ca_cert" : os.path.join("<directory>", os.path.basename(self.ca_cert)).replace('\\', '/'), "server_cert" : os.path.join("<directory>", os.path.basename(self.server_cert)).replace('\\', '/'), "server_key" : os.path.join("<directory>", os.path.basename(self.server_key)).replace('\\', '/'), "client_cert" : os.path.join("<directory>", os.path.basename(self.client_cert)).replace('\\', '/'), "client_key" : os.path.join("<directory>", os.path.basename(self.client_key)).replace('\\', '/') }) f.close() log_debug2("SSL Wizard generation task result: %s\n" % str(task.result)) self.main.go_next_page()
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand( "TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as csvfile: self.update_progress(0.0, "Prepare Import") dest_col_order = list( set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % ( self._table_w_prefix, ",".join([ "`%s`" % col for col in dest_col_order ]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['col_no']) for i in self._mapping if i['active']]) col_type = dict([(i['dest_col'], i['type']) for i in self._mapping if i['active']]) is_server_5_7 = self._targetVersion.is_supported_mysql_version_at_least( Version.fromstr("5.7.5")) self._editor.executeManagementCommand(query, 1) try: is_header = self.has_header reader = UniReader(csvfile, self.dialect, encoding=self._encoding) self._max_rows = os.path.getsize(self._filepath) self.update_progress(0.0, "Begin Import") for row in reader: if self._thread_event and self._thread_event.is_set(): self._editor.executeManagementCommand( "DEALLOCATE PREPARE stmt", 1) log_debug2("Worker thread was stopped by user") self.update_progress( round(self._current_row / self._max_rows, 2), "Import stopped by user request") return False self._current_row = float(csvfile.tell()) if is_header: is_header = False continue for i, col in enumerate(col_order): if col_order[col] >= len(row): log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": if is_server_5_7: val = """ST_GeomFromText("%s")""" % row[ col_name] else: val = """GeomFromText("%s")""" % row[col_name] self._editor.executeManagementCommand( """SET @a%d = %s """ % (i, val), 0) else: if col_type[col] == 'double': val = row[col_name].replace( self._decimal_separator, '.') elif col_type[col] == 'datetime': val = datetime.datetime.strptime( row[col_name], self._date_format).strftime( "%Y-%m-%d %H:%M:%S") if hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if self.options['nullwordaskeyword'][ 'value'] == "y" and val.upper() == "NULL": self._editor.executeManagementCommand( """SET @a%d = NULL """ % (i), 0) else: self._editor.executeManagementCommand( """SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand( "EXECUTE stmt USING %s" % ", ".join([ '@a%d' % i for i, col in enumerate(col_order) ]), 0) self.item_count = self.item_count + 1 self.update_progress( round(self._current_row / self._max_rows, 2), "Data import") except Exception, e: log_error("Row import failed with error: %s" % e) self.update_progress( round(self._current_row / self._max_rows, 2), "Row import failed with error: %s" % e) result = False self.update_progress(1.0, "Import finished") except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e)
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as jsonfile: data = json.load(jsonfile) dest_col_order = list(set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(["`%s`" % col for col in dest_col_order]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['name']) for i in self._mapping if i['active']]) col_type = dict([(i['name'], i['type']) for i in self._mapping if i['active']]) self._editor.executeManagementCommand(query, 1) try: self._max_rows = len(data) for row in data: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) return False self._current_row = self._current_row + 1 for i, col in enumerate(col_order): if col_order[col] not in row: log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": val = """ ST_GeomFromGeoJSON('%s')""" % json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand("""SET @a%d = %s """ % (i, val), 0) else: if col_type[col_name] != "json" and hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == 'double': val = val(str).replace(self._decimal_separator, '.') elif col_type[col_name] == 'datetime': val = datetime.datetime.strptime(val, self._date_format).strftime("%Y-%m-%d %H:%M:%S") elif col_type[col_name] == "json": val = json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == "int": self._editor.executeManagementCommand("""SET @a%d = %d """ % (i, int(val)), 0) else: self._editor.executeManagementCommand("""SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) self.item_count = self.item_count + 1 except Exception, e: log_error("Row import failed with error: %s" % e) except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e) self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1)
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as jsonfile: data = json.load(jsonfile) dest_col_order = [i['dest_col'] for i in self._mapping if i['active']] query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(["`%s`" % col for col in dest_col_order]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['name']) for i in self._mapping if i['active']]) col_type = dict([(i['name'], i['type']) for i in self._mapping if i['active']]) self._editor.executeManagementCommand(query, 1) try: self._max_rows = len(data) for row in data: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) return False self._current_row = self._current_row + 1 for i, col in enumerate(col_order): if col_order[col] not in row: log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": val = """ ST_GeomFromGeoJSON('%s')""" % json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand("""SET @a%d = %s """ % (i, val), 0) else: if col_type[col_name] != "json" and hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == 'double': val = val(str).replace(self._decimal_separator, '.') elif col_type[col_name] == 'datetime': val = datetime.datetime.strptime(val, self._date_format).strftime("%Y-%m-%d %H:%M:%S") elif col_type[col_name] == "json": val = json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == "int": self._editor.executeManagementCommand("""SET @a%d = %d """ % (i, int(val)), 0) else: self._editor.executeManagementCommand("""SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) self.item_count = self.item_count + 1 except Exception as e: log_error("Row import failed with error: %s" % e) except Exception as e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e) self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) return result
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as csvfile: self.update_progress(0.0, "Prepare Import") dest_col_order = list(set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(["`%s`" % col for col in dest_col_order]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['col_no']) for i in self._mapping if i['active']]) col_type = dict([(i['dest_col'], i['type']) for i in self._mapping if i['active']]) is_server_5_7 = self._targetVersion.is_supported_mysql_version_at_least(Version.fromstr("5.7.5")) self._editor.executeManagementCommand(query, 1) try: is_header = self.has_header reader = UniReader(csvfile, self.dialect, encoding=self._encoding) self._max_rows = os.path.getsize(self._filepath) self.update_progress(0.0, "Begin Import") for row in reader: if self._thread_event and self._thread_event.is_set(): self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) log_debug2("Worker thread was stopped by user") self.update_progress(round(self._current_row / self._max_rows, 2), "Import stopped by user request") return False self._current_row = float(csvfile.tell()) if is_header: is_header = False continue for i, col in enumerate(col_order): if col_order[col] >= len(row): log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": if is_server_5_7: val = """ST_GeomFromText("%s")""" % row[col_name] else: val = """GeomFromText("%s")""" % row[col_name] self._editor.executeManagementCommand("""SET @a%d = %s """ % (i, val), 0) else: if col_type[col] == 'double': val = row[col_name].replace(self._decimal_separator, '.') elif col_type[col] == 'datetime': val = datetime.datetime.strptime(row[col_name], self._date_format).strftime("%Y-%m-%d %H:%M:%S") if hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand("""SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) self.item_count = self.item_count + 1 self.update_progress(round(self._current_row / self._max_rows, 2), "Data import") except Exception, e: log_error("Row import failed with error: %s" % e) self.update_progress(round(self._current_row / self._max_rows, 2), "Row import failed with error: %s" % e) result = False self.update_progress(1.0, "Import finished") except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e)
grt.send_info("connected.") except Exception, exc: log_error("Exception: %s" % exc.message) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR "+str(exc) except: return "ERROR" try: test_ssh_connection.acquire_admin_access() except Exception, exc: log_error("Exception: %s" % exc.message) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR "+str(exc) os_info = test_ssh_connection.detect_operating_system_version() if os_info: os_type, os_name, os_variant, os_version = os_info log_info("Instance test: detected remote OS: %s (%s), %s, %s\n" % (os_info)) # check if the admin access error was because of wrong OS set if os_type != profile.target_os: return "ERROR Wrong Remote OS configured for connection. Set to %s, but was detected as %s" % (profile.target_os, os_type) else: log_warning("Instance test: could not determine OS version information\n") return "ERROR Could not determine remote OS details"
def notify(self, msg_type, msg_object): log_debug2("tunnel_%i: %s %s\n" % (self.local_port, msg_type, msg_object)) self.q.put((msg_type, msg_object))
def __init__(self, ctrl_be, server_profile, running, cpu_widget): self.ctrl_be = ctrl_be self.ssh = None self.cpu = 0 self.mtx = threading.Lock() self.running = running self.cpu_widget = cpu_widget self.settings = server_profile self.remote_admin_enabled = self.settings.uses_ssh if not self.remote_admin_enabled: return self.ctrl_be.add_me_for_event("shutdown", self) #upload script. Get local name, open ftp session and upload to the directory # where mysql.ini is. self.script = None self.ssh = ctrl_be.open_ssh_session_for_monitoring() (dirpath, code) = self.ssh.exec_cmd("cmd /C echo %USERPROFILE%") # %APPDATA% is n/a for LocalService # which is a user sshd can be run dirpath = dirpath.strip(" \r\t\n") if code == 0 and dirpath is not None and dirpath != "%USERPROFILE%": script_path = App.get().get_resource_path("mysql_system_status_rmt.vbs") filename = "\"" + dirpath + "\\mysql_system_status_rmt.vbs\"" log_debug('Script local path is "%s". Will be uploaded to "%s"\n' % (script_path, filename) ) if script_path is not None and script_path != "": #print "Uploading file to ", filename try: f = open(script_path) self.ssh.exec_cmd("cmd /C echo. > " + filename) maxsize = 1800 cmd = "" for line in f: line = line.strip("\r\n") tline = line.strip(" \t") if len(tline) > 0: if tline[0] != "'": if len(cmd) > maxsize: self.ssh.exec_cmd("cmd /C " + cmd.strip(" &")) self.ssh.exec_cmd("cmd /C echo " + line + " >> " + filename) cmd = "" else: cmd += "echo " + line + " >> " + filename cmd += " && " if len(cmd) > 0: self.ssh.exec_cmd("cmd /C " + cmd.strip(" &")) cmd = "" self.script = "cscript //NoLogo " + filename + " /DoStdIn" #run ssh in a thread log_debug2('About to run "%s"\n' % self.script) self.chan = None self.out = "" self.read_thread = threading.Thread(target=self.ssh.exec_cmd, args=(self.script, Users.CURRENT, None, self.reader, 1, self.save_channel)) self.read_thread.setDaemon(True) self.read_thread.start() except IOError, e: self.ssh.close() self.ssh = None raise e
def __init__(self, mon_be): DataSource.__init__(self, "sql_source", mon_be, None) self.mon_be = mon_be self.sources = {} self.rev_sources = {} log_debug2('DBStatusDataSource created.\n')
def __init__(self, ctrl_be, server_profile, running, cpu_widget): self.ctrl_be = ctrl_be self.ssh = None self.cpu = 0 self.mtx = threading.Lock() self.running = running self.cpu_widget = cpu_widget self.settings = server_profile self.remote_admin_enabled = self.settings.uses_ssh if not self.remote_admin_enabled: return self.ctrl_be.add_me_for_event("shutdown", self) #upload script. Get local name, open ftp session and upload to the directory # where mysql.ini is. self.script = None self.ssh = ctrl_be.open_ssh_session_for_monitoring() (dirpath, code) = self.ssh.exec_cmd( "cmd /C echo %USERPROFILE%") # %APPDATA% is n/a for LocalService # which is a user sshd can be run dirpath = dirpath.strip(" \r\t\n") if code == 0 and dirpath is not None and dirpath != "%USERPROFILE%": script_path = App.get().get_resource_path( "mysql_system_status_rmt.vbs") filename = "\"" + dirpath + "\\mysql_system_status_rmt.vbs\"" log_debug('Script local path is "%s". Will be uploaded to "%s"\n' % (script_path, filename)) if script_path is not None and script_path != "": #print "Uploading file to ", filename try: f = open(script_path) self.ssh.exec_cmd("cmd /C echo. > " + filename) maxsize = 1800 cmd = "" for line in f: line = line.strip("\r\n") tline = line.strip(" \t") if len(tline) > 0: if tline[0] != "'": if len(cmd) > maxsize: self.ssh.exec_cmd("cmd /C " + cmd.strip(" &")) self.ssh.exec_cmd("cmd /C echo " + line + " >> " + filename) cmd = "" else: cmd += "echo " + line + " >> " + filename cmd += " && " if len(cmd) > 0: self.ssh.exec_cmd("cmd /C " + cmd.strip(" &")) cmd = "" self.script = "cscript //NoLogo " + filename + " /DoStdIn" #run ssh in a thread log_debug2('About to run "%s"\n' % self.script) self.chan = None self.out = "" self.read_thread = threading.Thread( target=self.ssh.exec_cmd, args=(self.script, Users.CURRENT, None, self.reader, 1, self.save_channel)) self.read_thread.setDaemon(True) self.read_thread.start() except IOError, e: self.ssh.close() self.ssh = None raise e
def do_run(self): global tunnel_serial tunnel_serial += 1 mforms.Utilities.set_thread_name("SSHTunnel%i"%tunnel_serial) sys.stdout.write('Thread started\n') # sys.stdout.write is thread safe while print isn't log_debug2("SSH Tunel %i thread started\n" % tunnel_serial) # Create a socket and pick a random port number for it: self._listen_sock = socket.socket() while True: local_port = random.randint(1024, 65535) try: self._listen_sock.bind(('127.0.0.1', local_port)) self._listen_sock.listen(2) with self.lock: self.local_port = local_port break except socket.error as exc: sys.stdout.write('Socket error: %s for port %d\n' % (exc, local_port) ) err, msg = exc.args if err == 22: continue # retry self.notify_exception_error('ERROR',"Error initializing server end of tunnel", sys.exc_info()) raise exc finally: with self.lock: self.connecting = True self.port_is_set.set() if self._keyfile: self.notify('INFO', 'Connecting to SSH server at %s:%s using key %s...' % (self._server[0], self._server[1], self._keyfile) ) else: self.notify('INFO', 'Connecting to SSH server at %s:%s...' % (self._server[0], self._server[1]) ) connected = self._connect_ssh() if not connected: self._listen_sock.close() self._shutdown = True with self.lock: self.connecting = False if connected: self.notify('INFO', 'Connection opened') del self._password last_activity = time.time() while not self._shutdown: try: socks = [self._listen_sock] for sock, chan in self._connections: socks.append(sock) socks.append(chan) r, w, x = select.select(socks, [], [], TUNNEL_TIMEOUT) except Exception as e: if not self._shutdown: self.notify_exception_error('ERROR', 'Error while forwarding data: %r' % e, sys.exc_info()) break if not r and len(socks) <= 1 and time.time() - last_activity > TUNNEL_TIMEOUT: self.notify('INFO', 'Closing tunnel to %s:%s for inactivity...' % (self._server[0], self._server[1]) ) break last_activity = time.time() if self._listen_sock in r: self.notify('INFO', 'New client connection') self.accept_client() closed = [] for sock, chan in self._connections: if sock in r: data = sock.recv(1024) if not data: closed.append((sock, chan)) else: chan.send(data) if chan in r: data = chan.recv(1024) if not data: closed.append((sock, chan)) else: sock.send(data) for item in set(closed): # set() will remove duplicates from closed list sock, chan = item try: sock.close() except: pass try: chan.close() except: pass self.notify('INFO', 'Client for %s disconnected' % local_port) self._connections.remove(item) if closed and not self._connections and time.time() - last_activity > TUNNEL_TIMEOUT: self.notify('INFO', 'Closing tunnel to %s:%s for inactivity...' % (self._server[0], self._server[1]) ) break # Time to shutdown: for sock, chan in self._connections: try: sock.close() except: pass try: chan.close() except: pass self._listen_sock.close() self._client.close() log_debug("Leaving tunnel thread %s\n" % self.local_port)
def testInstanceSettingByName(what, connection, server_instance): global test_ssh_connection log_debug("Test %s in %s\n" % (what, connection.name)) profile = ServerProfile(connection, server_instance) if what == "connect_to_host": if test_ssh_connection: test_ssh_connection = None log_info("Instance test: Connecting to %s\n" % profile.ssh_hostname) try: test_ssh_connection = wb_admin_control.WbAdminControl( profile, None, connect_sql=False, test_only=True) test_ssh_connection.init() grt.send_info("connected.") except Exception as exc: log_error("Exception: %s\n" % str(exc)) import traceback log_debug2("Backtrace was: ", traceback.format_stack()) return "ERROR " + str(exc) except: return "ERROR" try: test_ssh_connection.acquire_admin_access() except Exception as exc: log_error("Exception: %s\n" % str(exc)) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR " + str(exc) os_info = test_ssh_connection.detect_operating_system_version() if os_info: os_type, os_name, os_variant, os_version = os_info log_info("Instance test: detected remote OS: %s (%s), %s, %s\n" % (os_info)) # check if the admin access error was because of wrong OS set if os_type != profile.target_os: return "ERROR Wrong Remote OS configured for connection. Set to %s, but was detected as %s" % ( profile.target_os, os_type) else: log_warning( "Instance test: could not determine OS version information\n") return "ERROR Could not determine remote OS details" return "OK" elif what == "disconnect": if test_ssh_connection: test_ssh_connection = None return "OK" elif what == "check_privileges": return "ERROR" elif what in ("find_config_file", "check_config_path", "check_config_section"): config_file = profile.config_file_path print("Check if %s exists in remote host" % config_file) try: if not test_ssh_connection.ssh.fileExists(config_file): return "ERROR File %s doesn't exist" % config_file else: print("File was found in expected location") except IOError: return 'ERROR Could not verify the existence of the file %s' % config_file if what == "check_config_path": return "OK" section = profile.config_file_section cfg_file_content = "" print("Check if %s section exists in %s" % (section, config_file)) try: #local_file = test_ssh_connection.fetch_file(config_file) cfg_file_content = test_ssh_connection.server_helper.get_file_content( path=config_file) except Exception as exc: import traceback traceback.print_exc() return "ERROR " + str(exc) if ("[" + section + "]") in cfg_file_content: return "OK" return "ERROR Couldn't find section %s in the remote config file %s" % ( section, config_file) elif what in ("find_config_file/local", "check_config_path/local", "check_config_section/local"): config_file = profile.config_file_path config_file = wb_admin_control.WbAdminControl( profile, None, connect_sql=False).expand_path_variables(config_file) print("Check if %s can be accessed" % config_file) if os.path.exists(config_file): print("File was found at the expected location") else: return "ERROR File %s doesn't exist" % config_file if what == "check_config_path/local": return "OK" section = profile.config_file_section print("Check if section for instance %s exists in %s" % (section, config_file)) if check_if_config_file_has_section(open(config_file, "r"), section): print("[%s] section found in configuration file" % section) return "OK" return "ERROR Couldn't find section [%s] in the config file %s" % ( section, config_file) elif what == "find_error_files": return "ERROR" elif what == "check_admin_commands": path = profile.start_server_cmd cmd_start = None if path.startswith("/"): cmd_start = path.split()[0] if not test_ssh_connection.ssh.fileExists(cmd_start): return "ERROR %s is invalid" % path path = profile.stop_server_cmd if path.startswith("/"): cmd = path.split()[0] if cmd != cmd_start and not test_ssh_connection.ssh.fileExists( cmd): return "ERROR %s is invalid" % path return "OK" elif what == "check_admin_commands/local": path = profile.start_server_cmd cmd_start = None if path.startswith("/"): cmd_start = path.split()[0] if not os.path.exists(cmd_start): return "ERROR %s is invalid" % path path = profile.stop_server_cmd if path.startswith("/"): cmd = path.split()[0] if cmd != cmd_start and not os.path.exists(cmd): return "ERROR %s is invalid" % path return "OK" return "ERROR bad command"
grt.send_info("connected.") except Exception, exc: log_error("Exception: %s" % exc.message) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR " + str(exc) except: return "ERROR" try: test_ssh_connection.acquire_admin_access() except Exception, exc: log_error("Exception: %s" % exc.message) import traceback log_debug2("Backtrace was: " % traceback.format_stack()) return "ERROR " + str(exc) os_info = test_ssh_connection.detect_operating_system_version() if os_info: os_type, os_name, os_variant, os_version = os_info log_info("Instance test: detected remote OS: %s (%s), %s, %s\n" % (os_info)) # check if the admin access error was because of wrong OS set if os_type != profile.target_os: return "ERROR Wrong Remote OS configured for connection. Set to %s, but was detected as %s" % ( profile.target_os, os_type) else: log_warning( "Instance test: could not determine OS version information\n")
def __init__(self, ctrl_be, server_profile, running, cpu_widget): self.ctrl_be = ctrl_be self.ssh = None self.cpu = 0 self.mtx = threading.Lock() self.running = running self.cpu_widget = cpu_widget self.settings = server_profile self.remote_admin_enabled = self.settings.uses_ssh if not self.remote_admin_enabled: return self.ctrl_be.add_me_for_event("shutdown", self) #upload script. Get local name, open ftp session and upload to the directory # where mysql.ini is. self.script = None if self.ctrl_be.server_profile.uses_ssh: if self.ctrl_be.editor.sshConnection.isConnected() == 0: if self.ctrl_be.editor.sshConnection.connect() != 0: raise OperationCancelledError("Could not connect to SSH server") self.ssh = sshConnection if self.ssh is not None: # %APPDATA% is n/a for LocalService # which is a user sshd can be run dirpath = handle_ssh_command_output(self.ssh.executeCommand("cmd /C echo %USERPROFILE%")) dirpath = dirpath.strip(" \r\t\n") if dirpath is not None and dirpath != "%USERPROFILE%": script_path = App.get().get_resource_path("mysql_system_status_rmt.vbs") filename = "\"" + dirpath + "\\mysql_system_status_rmt.vbs\"" log_debug('Script local path is "%s". Will be uploaded to "%s"\n' % (script_path, filename) ) if script_path is not None and script_path != "": #print "Uploading file to ", filename try: f = open(script_path) handle_ssh_command_output(self.ssh.executeCommand("cmd /C echo. > " + filename)) maxsize = 1800 cmd = "" for line in f: line = line.strip("\r\n") tline = line.strip(" \t") if len(tline) > 0: if tline[0] != "'": if len(cmd) > maxsize: handle_ssh_command_output(self.ssh.executeCommand("cmd /C " + cmd.strip(" &"))) handle_ssh_command_output(self.ssh.executeCommand("cmd /C echo " + line + " >> " + filename)) cmd = "" else: cmd += "echo " + line + " >> " + filename cmd += " && " if len(cmd) > 0: handle_ssh_command_output(self.ssh.executeCommand("cmd /C " + cmd.strip(" &"))) cmd = "" self.script = "cscript //NoLogo " + filename + " /DoStdIn" #run ssh in a thread log_debug2('About to run "%s"\n' % self.script) self.chan = None self.out = "" self.read_thread = threading.Thread(target=self.ssh.executeCommand, args=(self.script, Users.CURRENT, None, self.reader, 1, self.save_channel)) self.read_thread.setDaemon(True) self.read_thread.start() except IOError as e: self.ssh.disconnect() self.ssh = None raise e else: print("Can't find a place to upload script dirpath='%s'"%dirpath)
def __init__(self, dbconn): log_debug2("Constructing SQL query runner, dbconn (" + repr(dbconn) + ')\n') self.mtx = threading.Lock() self.dbconn = dbconn