Example #1
0
def execute_script(connection, script, log):
    connection = get_connection(connection)

    ranges = grt.modules.MysqlSqlFacade.getSqlStatementRanges(script)
    for start, length in ranges:
        if grt.query_status():
            raise grt.UserInterrupt()
        statement = script[start:start + length]
        try:
            grt.send_info("Execute statement", statement)
            grt.log_debug3("DbMySQLFE", "Execute %s\n" % statement)
            connection.execute(statement)
        except db_utils.QueryError, exc:
            if log:
                entry = grt.classes.GrtLogEntry()
                entry.owner = log
                entry.name = str(exc)
                entry.entryType = 2
                log.entries.append(entry)
            grt.send_warning("%s" % exc)
            grt.log_error("DbMySQLFE",
                          "Exception executing '%s': %s\n" % (statement, exc))
            return False
        except Exception, exc:
            if log:
                entry = grt.classes.GrtLogEntry()
                entry.owner = log
                entry.name = "Exception: " + str(exc)
                entry.entryType = 2
                log.entries.append(entry)
            grt.send_warning("Exception caught: %s" % exc)
            grt.log_error("DbMySQLFE",
                          "Exception executing '%s': %s\n" % (statement, exc))
            return False
Example #2
0
def execute_query(connection_object, query, *args, **kwargs):
    """Retrieves a connection and executes the given query returning a cursor to iterate over results.

    The remaining positional and keyword arguments are passed with the query to the execute function
    """
    grt.log_debug3("db.sybase", "execute %s %s %s\n" % (query, args, kwargs))
    return get_connection(connection_object).cursor().execute(query, *args, **kwargs)
def execute_script(connection, script, log):
    connection = get_connection(connection)

    ranges = grt.modules.MysqlSqlFacade.getSqlStatementRanges(script)
    for start, length in ranges:
        if grt.query_status():
            raise grt.UserInterrupt()
        statement = script[start:start+length]
        try:
            grt.send_info("Execute statement", statement)
            grt.log_debug3("DbMySQLFE", "Execute %s\n" % statement)
            connection.execute(statement)
        except db_utils.QueryError, exc:
            if log:
                entry = grt.classes.GrtLogEntry()
                entry.owner = log
                entry.name = str(exc)
                entry.entryType = 2
                log.entries.append(entry)
            grt.send_warning("%s" % exc)
            grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc))
            return False
        except Exception, exc:
            if log:
                entry = grt.classes.GrtLogEntry()
                entry.owner = log
                entry.name = "Exception: " + str(exc)
                entry.entryType = 2
                log.entries.append(entry)
            grt.send_warning("Exception caught: %s" % exc)
            grt.log_error("DbMySQLFE", "Exception executing '%s': %s\n" % (statement, exc))
            return False
Example #4
0
    def __setattr__(self, name, value):
        # Verifies the value being set is a valid attribute
        # Also ensures the value is changing from the current value
        if name in self.__dict__ and \
           name != '_ChangeTracker__changed' and \
           name != '_ChangeTracker__ignoring' and \
           not self.__ignoring and \
           self.__dict__[name] != value:

            log_message = "Changed %s from %s to %s at %s\n" % (name, self.__dict__[name], value, self)

            # If the value was already changed and the new value
            # reverts the change then it removes the attribute from
            # the changed map
            if name in self.__dict__["_ChangeTracker__changed"]:
                if self.__dict__["_ChangeTracker__changed"][name] == value:
                    del self.__dict__["_ChangeTracker__changed"][name]
                    log_message = "Reverted change on %s to %s at %s\n" % (name, value, self)

            # If this is the first change to the attribute, registers the
            # Original value on the changed map
            else:
                self.__dict__["_ChangeTracker__changed"][name] = self.__dict__[name]
            
            # Logs the change
            log_debug3(_this_file, log_message)

        # Updates the value
        self.__dict__[name] = value
Example #5
0
    def poll(self):
        log_debug3(
            '%s:%s.poll()' % (_this_file, self.__class__.__name__),
            'SqlDataSource poll. poll_control = %i\n' % self.poll_control)
        if self.poll_control == self.polling and self.dbconn:
            result = None
            try:
                result = self.dbconn.executeQuery(
                    self.sql)  #ctrl_be from DataSource
            except QueryError, e:
                result = None
                if e.is_connection_error():
                    self.dbconn.disconnect()
                    if e.is_error_recoverable():
                        self.dbconn = self.ctrl_be.get_new_sql_connection(
                            False)

            if result is not None:
                while result.nextRow():
                    name = result.stringByName("Variable_name")
                    if name in self.rev_sources:
                        rev_src = self.rev_sources[name]
                        value = float(result.stringByName("Value"))
                        # rev_src contains list of sources and index of current variable in the sources
                        for (src, i) in rev_src:
                            src.set_var(i, value)
                            res = src.calculate()
def execute_query(connection_object, query, *args, **kwargs):
    """Retrieves a connection and executes the given query returning a cursor to iterate over results.

    The remaining positional and keyword arguments are passed with the query to the execute function
    """
    grt.log_debug3("db.sybase", "execute %s %s %s\n" % (query, args, kwargs))
    return get_connection(connection_object).cursor().execute(query, *args, **kwargs)
Example #7
0
 def server_stopped_event(self):
     log_debug3(
         '%s:%s.server_stopped_event()' %
         (_this_file, self.__class__.__name__), 'Enter\n')
     self.running[0] = False
     self.poll_thread = None
     log_debug3(
         '%s:%s.server_stopped_event()' %
         (_this_file, self.__class__.__name__), 'Leave\n')
    def save_file_content_and_backup(self, path, content, backup_extension, as_admin = False, admin_password = None):
        # Check if dir, where config file will be stored is writable
        dirname, filename = splitpath(path)

        if not as_admin and not self.is_dir_writable(dirname.strip(" \r\t\n")):
            raise PermissionDeniedError("Cannot write to directory %s" % dirname)

        if self.ssh is not None:
            ## Get temp dir for using as tmpdir
            tmpdir, status = self.process_ops.get_cmd_output("echo %temp%")
            if type(tmpdir) is unicode:
                tmpdir = tmpdir.encode("utf8")
            if type(tmpdir) is str:
                tmpdir = tmpdir.strip(" \r\t\n")
                if tmpdir[1] == ":":
                    tmpdir = tmpdir[2:]
                else:
                    log_debug(_this_file, '%s: Temp directory path "%s" is not in expected form. The expected form is something like "C:\\Windows\\Temp"\n' % (self.__class__.__name__, tmpdir) )
                    tmpdir = None
                log_debug2(_this_file, '%s: Got temp dir: "%s"\n' % (self.__class__.__name__, tmpdir) )
            else:
                tmpdir = None
            
            if not tmpdir:
                tmpdir = dirname

            tmpfilename = tmpdir + r"\workbench-temp-file.ini"

            log_debug(_this_file, '%s: Remotely writing contents to temporary file "%s"\n' % (self.__class__.__name__, tmpfilename) )
            log_debug3(_this_file, '%s: %s\n' % (self.__class__.__name__, content) )
            self.ssh.set_contents(tmpfilename, content)

            if backup_extension:
                log_debug(_this_file, '%s: Backing up "%s"\n' % (self.__class__.__name__, path) )
                backup_cmd = "copy /y " + quote_path_win(path) + " " + quote_path_win(path+backup_extension)
                msg, code = self.process_ops.get_cmd_output(backup_cmd)
                if code != 0:
                    print backup_cmd, "->", msg
                    log_error(_this_file, '%s: Error backing up file: %s\n' % (self.__class__.__name__, backup_cmd+'->'+msg) )
                    raise RuntimeError("Error backing up file: %s" % msg)

            copy_to_dest = "copy /y " + quote_path_win(tmpfilename) + " " + quote_path_win(path)
            delete_tmp = "del " + quote_path_win(tmpfilename)
            log_debug(_this_file, '%s: Copying file to final destination: "%s"\n' % (self.__class__.__name__, copy_to_dest) )
            msg, code = self.process_ops.get_cmd_output(copy_to_dest)
            if code != 0:
                print copy_to_dest, "->", msg
                log_error(_this_file, '%s: Error copying temporary file over destination file: %s\n%s to %s\n' % (self.__class__.__name__, msg, tmpfilename, path) )
                raise RuntimeError("Error copying temporary file over destination file: %s\n%s to %s" % (msg, tmpfilename, path))
            log_debug(_this_file, '%s: Deleting tmp file: "%s"\n' % (self.__class__.__name__, delete_tmp) )
            msg, code = self.process_ops.get_cmd_output(delete_tmp)
            if code != 0:
                print "Could not delete temporary file %s: %s" % (tmpfilename, msg)
                log_info(_this_file, '%s: Could not delete temporary file "%s": %s\n' % (self.__class__.__name__, tmpfilename, msg) )
        else:
            raise Exception("No SSH session active, cannot save file remotely")
    def server_started_event(self):
        log_debug3('%s:%s.server_started_event()' % (_this_file, self.__class__.__name__), 'Enter\n')

        # This is needed to ensure an existing polling thread is finished
        # Before creating the new one
        if self.poll_thread:
            self.running[0] = False
            self.poll_thread.join()
            self.poll_thread = None

        self.running[0] = True
        self.poll_thread = threading.Thread(target = self.poll_sources)
        self.poll_thread.start()
        log_debug3('%s:%s.server_started_event()' % (_this_file, self.__class__.__name__), 'Leave\n')
 def __init__(self, owner, args, result_queue):
     Thread.__init__(self)
     self._owner = owner
     self.result_queue = result_queue
     self._process_args = args
     grt.log_debug3("Migration", "Spawning copy worker task: %s" % args)
     self._owner.send_info(" ".join(args))
     if sys.platform == "win32":
         # shell=True causes a created window to be hidden by default, this prevents a popup to be shown
         # on the migration wizard
         self.process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                         universal_newlines=True, shell=True)
     else:
         self.process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                         universal_newlines=True, close_fds=True)
Example #11
0
 def __init__(self, owner, args, result_queue):
     Thread.__init__(self)
     self._owner = owner
     self.result_queue = result_queue
     self._process_args = args
     grt.log_debug3("Migration", "Spawning copy worker task: %s" % args)
     self._owner.send_info(" ".join(args))
     if sys.platform == "win32":
         # shell=True causes a created window to be hidden by default, this prevents a popup to be shown
         # on the migration wizard
         self.process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                         universal_newlines=True, shell=True)
     else:
         self.process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                         universal_newlines=True, close_fds=True)
Example #12
0
    def parse_mem(self, text):
        text = text.strip(" \r\t\n")
        value = None
        try:
            value = float(text)
        except ValueError:
            value = None

        if value is not None:
            self.mtx.acquire()
            try:
                self.mem = value
            finally:
                self.mtx.release()
        log_debug3('%s:%s.parse_mem()' % (_this_file, self.__class__.__name__),
                   'Got mem stat value from remote script - "%r"\n' % value)
  def exec_cmd(self, cmd, as_admin = 0, admin_password = None, output_handler = None, read_size = 128, get_channel_cb = None):
    output   = None
    retcode  = None

    self.mtx.acquire()
    log_debug3(_this_file, '%s:exec_cmd(cmd="%s", sudo=%s)\n' % (self.__class__.__name__, cmd, str(as_admin)) )
    try:
      (output, retcode) = WbAdminSSH.exec_cmd(self, cmd, 
                                      as_admin=as_admin, 
                                      admin_password=admin_password, 
                                      output_handler=output_handler,
                                      read_size = read_size,
                                      get_channel_cb = get_channel_cb)
      log_debug3(_this_file, '%s:exec_cmd(): Done cmd="%s"\n' % (self.__class__.__name__, cmd) )
    finally:
      self.mtx.release()
    
    return (output, retcode)
    def save_file_content_and_backup(self, path, content, backup_extension, as_admin = False, admin_password = None):
        # Check if dir, where config file will be stored is writable
        dirname, filename = splitpath(path)

        if not as_admin and not self.is_dir_writable(dirname.strip(" \r\t\n")):
            raise PermissionDeniedError("Cannot write to directory %s" % dirname)

        if self.ssh is not None:
            ## Get home dir for using as tmpdir
            homedir, status = self.process_ops.get_cmd_output("echo ~")
            if type(homedir) is unicode:
                homedir = homedir.encode("utf8")
            if type(homedir) is str:
                homedir = homedir.strip(" \r\t\n")
            else:
                homedir = None
            log_debug2(_this_file, '%s: Got home dir: "%s"\n' % (self.__class__.__name__, homedir) )

            if not homedir:
                raise Exception("Unable to get path for remote home directory")

            tmpfilename = homedir + "/.wba.temp"

            log_debug(_this_file, '%s: Remotely writing contents to temporary file "%s"\n' % (self.__class__.__name__, tmpfilename) )
            log_debug3(_this_file, '%s: %s\n' % (self.__class__.__name__, content) )
            self.ssh.set_contents(tmpfilename, content)

            if backup_extension:
                log_debug(_this_file, '%s: Backing up %s\n' % (self.__class__.__name__, path) )
                backup_cmd = "/bin/cp " + quote_path(path) + " " + quote_path(path+backup_extension)
                self.process_ops.exec_cmd(backup_cmd, as_admin, admin_password)

            copy_to_dest = "/bin/cp " + quote_path(tmpfilename) + " " + quote_path(path)
            delete_tmp = "/bin/rm " + quote_path(tmpfilename)
            log_debug(_this_file, '%s: Copying file to final destination: "%s"\n' % (self.__class__.__name__, copy_to_dest) )
            self.process_ops.exec_cmd(copy_to_dest, as_admin, admin_password)
            log_debug(_this_file, '%s: Deleting tmp file: "%s"\n' % (self.__class__.__name__, delete_tmp) )
            self.process_ops.exec_cmd(delete_tmp)
        else:
            raise Exception("No SSH session active, cannot save file remotely")
 def poll(self):
     log_debug3('%s:%s.poll()' % (_this_file, self.__class__.__name__), 'DBStatsDataSource poll. poll_control = %i\n' %  self.poll_control)
     if self.poll_control == self.polling and self.dbconn:
         result = None
         try:
             result = self.dbconn.executeQuery(self.sql) #ctrl_be from DataSource
         except QueryError, e:
             result = None
             if e.is_connection_error():
                 self.dbconn.disconnect()
                 if e.is_error_recoverable():
                     self.dbconn = self.ctrl_be.get_new_sql_connection()
         
         if result is not None:
             while result.nextRow():
                 name = result.stringByName("Variable_name")
                 if name in self.rev_sources:
                     rev_src = self.rev_sources[name]
                     value = float(result.stringByName("Value"))
                     # rev_src contains list of sources and index of current variable in the sources
                     for (src, i) in rev_src:
                         src.set_var(i, value)
                         res = src.calculate()
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog
    
        mysql_simpleTypes = dict( (datatype.name.upper(), datatype) for datatype in targetCatalog.simpleDatatypes )
        
        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            grt.log_debug3("Migration", "Migrating source column '%s' - type: %s, length: %s\n" % (source_column.name, source_datatype,source_column.length))
            # string data types:
            target_datatype = ''
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if source_column.length == -1:  # VARCHAR(MAX) or NVARCHAR(MAX)
                    target_datatype = 'LONGTEXT'  #TODO: Give the user the choice for this target datatype
                elif 0 < source_column.length < 256:
                    target_datatype = 'VARCHAR'
                else:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    target_datatype = 'TEXT' if targetCatalog.version.majorNumber < 5 else 'VARCHAR'
            elif source_datatype in ['TEXT', 'NTEXT']:
                target_datatype = 'LONGTEXT'
            elif source_datatype in ['CHAR', 'NCHAR']:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                if 0 < source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'TEXT' 
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'TINYINT':
                target_datatype = source_datatype
                target_column.precision = -1
                if 'UNSIGNED' not in target_column.flags:
                    target_column.flags.append('UNSIGNED')  # In MSSQL TINYINT is unsigned
            elif source_datatype == 'UNIQUEIDENTIFIER':
                target_datatype = 'VARCHAR'
                target_column.length = 64
                if 'UNIQUE' not in target_column.flags:
                    target_column.flags.append('UNIQUE') # uniqueid must be UNIQUE... bug #43098
                state.addMigrationLogEntry(0, source_column, target_column,
                        "Source column type %s was migrated to %s(%s)" % (source_datatype, target_datatype, target_column.length))
            elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                target_datatype = 'VARCHAR'
                target_column.length = 160
                state.addMigrationLogEntry(0, source_column, target_column,
                        "Source column type %s was migrated to %s(%s)" % (source_datatype, target_datatype, target_column.length))
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                if source_column.scale == 0:
                    target_datatype = 'BIGINT'
                    if source_column.precision < 5:
                        target_datatype = 'SMALLINT'
                    elif source_column.precision < 7:
                        target_datatype = 'MEDIUMINT'
                    elif source_column.precision < 10:
                        target_datatype = 'INT'
                    target_column.precision = -1
                else:
                    target_datatype = 'DECIMAL'
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'FLOAT':
                if source_column.precision > 24:
                    target_datatype = 'DOUBLE'
                    target_column.precision = -1
            elif source_datatype in ['MONEY', 'SMALLMONEY']:
                target_datatype = 'DECIMAL'
                target_column.precision = source_column.simpleType.numericPrecision
                target_column.scale = source_column.simpleType.numericScale
            # binary datatypes:
            elif source_datatype == 'IMAGE':
                target_datatype = 'LONGBLOB'
            elif source_datatype == 'VARBINARY' and source_column.length == -1:  # VARBINARY(MAX):
                target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype in ['DATETIME', 'SMALLDATETIME', 'DATETIME2', 'DATETIMEOFFSET']:
                target_datatype = 'DATETIME'
                target_column.precision = -1
                target_version = Version.fromgrt(targetCatalog.version)
                if target_version.is_supported_mysql_version_at_least(5,6,4) and source_datatype != 'SMALLDATETIME':
                    target_column.precision = source_column.precision if source_column.precision < 7 else 6
            # timestamp datatypes
            # In MS SQL Server a nonnullable timestamp column is semantically equivalent to a binary(8) column, 
            # and a nullable timestamp column is semantically equivalent to a varbinary(8) column.
            elif source_datatype in ['TIMESTAMP', 'ROWVERSION']:
                target_datatype = 'BINARY' if source_column.isNotNull else 'VARBINARY'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
                target_column.precision = -1
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
                target_column.precision = -1
                target_version = Version.fromgrt(targetCatalog.version)
                if target_version.is_supported_mysql_version_at_least(5,6,4):
                    target_column.precision = source_column.precision if source_column.precision < 7 else 6
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(0, source_column, target_column,
                      "Source column type BIT was migrated to TINYINT(1)")
            elif source_datatype == 'XML':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(0, source_column, target_column,
                      "Source column type XML was migrated to TEXT")
            elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                target_datatype = 'GEOMETRY'
            elif source_datatype == 'HIERARCHYID':
                target_datatype = 'VARCHAR'
                target_column.length = 255
                state.addMigrationLogEntry(1, source_column, target_column,
                        "Source column type HIERARCHYID was migrated to VARCHAR(255)")
            elif source_datatype == 'SQL_VARIANT':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(1, source_column, target_column,
                        "Source column type %s was migrated to %s(%s)" % (source_datatype, target_datatype, target_column.length))
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning("Migration", "MSSQL migrateTableColumnsToMySQL", "Can't find datatype %s for type %s\n" % (target_datatype, source_datatype))
                state.addMigrationLogEntry(2, source_column, target_column, 
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"' % (target_column.name, source_column.owner.name, source_datatype) )
                return False

            return True
        else:
            state.addMigrationLogEntry(2, source_column, target_column, 
                    'Could not migrate type of column "%s" in "%s" (%s)' % (target_column.name, source_column.owner.name, source_column.formattedRawType) )
            return False

        return True
Example #17
0
def log_debug3(msg):
    tb = traceback.extract_stack(limit=2)
    grt.log_debug3("%s:%s:%s"%(os.path.basename(tb[-2][0]),tb[-2][2],tb[-2][1]), msg)
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            target_version = Version.fromgrt(targetCatalog.version)
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            grt.log_debug3(
                "Migration",
                "Migrating source column '%s' - type: %s, length: %s\n" %
                (source_column.name, source_datatype, source_column.length))
            # string data types:
            target_datatype = ''
            #NCHAR and NVARCHAR in Microsoft SQL Server is always encoded as UCS-2 (UTF-16)
            if source_datatype in [
                    'NCHAR', 'NVARCHAR'
            ] and target_version.is_supported_mysql_version_at_least(5, 5, 0):
                target_column.characterSetName = 'utf8mb4'
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if source_column.length == -1:  # VARCHAR(MAX) or NVARCHAR(MAX)
                    target_datatype = 'LONGTEXT'  #TODO: Give the user the choice for this target datatype
                elif 0 < source_column.length < 256:
                    target_datatype = 'VARCHAR'
                else:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    target_datatype = 'TEXT' if targetCatalog.version.majorNumber < 5 else 'VARCHAR'
            elif source_datatype in ['TEXT', 'NTEXT']:
                target_datatype = 'LONGTEXT'
            elif source_datatype in [
                    'CHAR', 'NCHAR'
            ]:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                if 0 < source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'TEXT'
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'TINYINT':
                target_datatype = source_datatype
                target_column.precision = -1
                if 'UNSIGNED' not in target_column.flags:
                    target_column.flags.append(
                        'UNSIGNED')  # In MSSQL TINYINT is unsigned
            elif source_datatype == 'UNIQUEIDENTIFIER':
                target_datatype = 'VARCHAR'
                target_column.length = 64
                if 'UNIQUE' not in target_column.flags:
                    target_column.flags.append(
                        'UNIQUE')  # uniqueid must be UNIQUE... bug #43098
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                target_datatype = 'VARCHAR'
                target_column.length = 160
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                if source_column.scale == 0:
                    target_datatype = 'BIGINT'
                    if source_column.precision < 5:
                        target_datatype = 'SMALLINT'
                    elif source_column.precision < 7:
                        target_datatype = 'MEDIUMINT'
                    elif source_column.precision < 10:
                        target_datatype = 'INT'
                    target_column.precision = -1
                else:
                    target_datatype = 'DECIMAL'
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'FLOAT':
                if source_column.precision > 24:
                    target_datatype = 'DOUBLE'
                    target_column.precision = -1
            elif source_datatype in ['MONEY', 'SMALLMONEY']:
                target_datatype = 'DECIMAL'
                target_column.precision = source_column.simpleType.numericPrecision
                target_column.scale = source_column.simpleType.numericScale
            # binary datatypes:
            elif source_datatype == 'IMAGE':
                target_datatype = 'LONGBLOB'
            elif source_datatype == 'VARBINARY' and source_column.length == -1:  # VARBINARY(MAX):
                target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype in [
                    'DATETIME', 'SMALLDATETIME', 'DATETIME2', 'DATETIMEOFFSET'
            ]:
                target_datatype = 'DATETIME'
                target_column.length = -1
                if target_version.is_supported_mysql_version_at_least(
                        5, 6, 4) and source_datatype != 'SMALLDATETIME':
                    target_column.length = source_column.precision if source_column.precision < 7 else 6
            # timestamp datatypes
            # In MS SQL Server a nonnullable timestamp column is semantically equivalent to a binary(8) column,
            # and a nullable timestamp column is semantically equivalent to a varbinary(8) column.
            elif source_datatype in ['TIMESTAMP', 'ROWVERSION']:
                target_datatype = 'BINARY' if source_column.isNotNull else 'VARBINARY'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
                target_column.precision = -1
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
                target_column.precision = -1
                if target_version.is_supported_mysql_version_at_least(5, 6, 4):
                    target_column.precision = source_column.precision if source_column.precision < 7 else 6
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type BIT was migrated to TINYINT(1)")
            elif source_datatype == 'XML':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type XML was migrated to TEXT")
            elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                target_datatype = 'GEOMETRY'
            elif source_datatype == 'HIERARCHYID':
                target_datatype = 'VARCHAR'
                target_column.length = 255
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type HIERARCHYID was migrated to VARCHAR(255)"
                )
            elif source_datatype == 'SQL_VARIANT':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "Migration", "MSSQL migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
def local_run_cmd_linux(command, as_admin=False, admin_password=None, sudo_prefix=default_sudo_prefix, output_handler=None):
    # pexpect used only in linux
    import pexpect

    # wrap cmd
    if as_admin:
        command = wrap_for_sudo(command, sudo_prefix)

    script = command.strip(" ")
    if script is None or len(script) == 0:
        return None

    script_to_log = script

    temp_file = tempfile.NamedTemporaryFile()

    script = script + " > " + temp_file.name + " 2>&1; echo CMDRESULT$? >> " + temp_file.name

    result = None

    if "'" in script:
      log_debug2(_this_file, "local_run_cmd_linux(): ' found in script:\n%s\n" %  script )
      raise Exception("WBA: Internal error, unexpected character in script to be executed")

    if not as_admin:
      result = pexpect.run("/bin/bash -c '" + script + "'", withexitstatus=True)
    else:
      child = pexpect.spawn("/bin/bash -c '" + script + "'") # script should already have sudo prefix
      try:
          child.expect('assword', timeout=10)
          if admin_password is not None:
              child.write(admin_password + '\n')
          else:
              child.write("\n");
      except pexpect.TIMEOUT:
          #though we are not able to get the expected output, the password is fed anyway
          if admin_password is not None:
            child.write(admin_password + '\n')
          else:
            child.write("\n")
      except pexpect.EOF:
          #Nothing we can do, client is terminatd for some reason, try to read anything available
          log_debug2(_this_file,"local_run_cmd_linux(): Pipe from sudo is closed. script =\n%s\n" % script )

      text = ""

      if child.isalive():
          should_quit_read_loop = False
          while not should_quit_read_loop and child.isalive():
              try:
                  current_text = child.read_nonblocking(256, 30)
                  if current_text.find('EnterPasswordHere') >= 0:
                    try:
                      child.close()
                    except:
                      pass
                    temp_file.close()
                    raise InvalidPasswordError("Incorrect password for sudo")
                  else:
                    text += current_text
              except pexpect.TIMEOUT:
                  pass
              except pexpect.EOF:
                  should_quit_read_loop = True
      else:
          #Try to read
          text = child.read()

      child.close();

    text = temp_file.read()
    temp_file.close()

    idx = text.rfind("CMDRESULT")
    if (idx != -1):
      retcode = int(text[idx+9:].strip(" \r\t\n"))
      if output_handler:
        output_handler(text[0:idx])
      result = retcode

    log_debug3(_this_file, 'local_run_cmd_linux(): script="%s", ret="%s", text="%s"' % (script_to_log, str(result), text[:16].replace('\n', '')) )
    return result
 def server_stopped_event(self):
     log_debug3('%s:%s.server_stopped_event()' % (_this_file, self.__class__.__name__), 'Enter\n')
     self.running[0] = False
     self.poll_thread = None
     log_debug3('%s:%s.server_stopped_event()' % (_this_file, self.__class__.__name__), 'Leave\n')