def generate_report(self):
        self._schema_list = self.main._schema_selection_page.schema_selector.get_selected()
        self._report.set_value(
            "%s transfered.\n"
            % ("1 schema" if len(self._schema_list) == 1 else str(len(self._schema_list)) + " schemas")
        )
        for schema in self._schema_list:
            self._report.append_text(
                str("\n" + 30 * "=" + "\n" + ("Schema: " + schema).center(30) + "\n" + 30 * "=" + "\n")
            )
            try:
                idx = [log.logObject.owner.name for log in grt.root.wb.migration.dataTransferLog].index(schema)
            except ValueError:
                grt.log_warning("Wizard", 'Data transfer log entries for schema "%s" not found' % schema)
                continue
            schema_object = grt.root.wb.migration.dataTransferLog[idx].logObject.owner
            self._report.append_text(
                "\tTables: %d\n\tViews: %d\n\tRoutines: %d\n"
                % (len(schema_object.tables), len(schema_object.views), len(schema_object.routines))
            )

            self._report.append_text("\nData copy report:\n")
            for log in itertools.islice(grt.root.wb.migration.dataTransferLog, idx, None):
                if log.logObject.owner.__id__ != schema_object.__id__:
                    break
                self._report.append_text("\n".join("\t" + entry.name for entry in log.entries) + "\n")

            self._report.append_text("\n")
示例#2
0
    def __init__(self, server_instance_settings):
        AppView.__init__(self, False, "admin", True)

        server_profile = ServerProfile(server_instance_settings, False)

        self.ctrl_be = wb_admin_control.WbAdminControl(server_profile, connect_sql=True)

        self.ctrl_be.init()

        version = self.ctrl_be.get_server_version()
        if type(version) is tuple:
            valid_versions = ((4, 0), (4, 1), (5, 0), (5, 1), (5, 2), (5, 4), (5, 5), (5, 6), (6, 0))
            if version[:2] not in valid_versions:
                print version, "UNSUPPORTED"
                log_warning(
                    _this_file, "%s: Server version %s is NOT supported\n" % (self.__class__.__name__, str(version))
                )
            else:
                log_info(_this_file, "%s: Server version %s is supported\n" % (self.__class__.__name__, str(version)))

        self.on_close(wb_admin_utils.weakcb(self, "handle_on_close"))

        # Create sections and add them to the admin page.
        self.configuration = wb_admin_main.WbAdminMainView(server_profile, self.ctrl_be, self.monitor)
        self.add(self.configuration, True, True)
示例#3
0
    def __init__(self, server_instance_settings):
        AppView.__init__(self, False, "admin", True)

        server_profile = ServerProfile(server_instance_settings, False)

        self.ctrl_be = wb_admin_control.WbAdminControl(server_profile,
                                                       connect_sql=True)

        self.ctrl_be.init()

        version = self.ctrl_be.get_server_version()
        if type(version) is tuple:
            valid_versions = ((4, 0), (4, 1), (5, 0), (5, 1), (5, 2), (5, 4),
                              (5, 5), (5, 6), (6, 0))
            if version[:2] not in valid_versions:
                print version, "UNSUPPORTED"
                log_warning(
                    _this_file, "%s: Server version %s is NOT supported\n" %
                    (self.__class__.__name__, str(version)))
            else:
                log_info(
                    _this_file, "%s: Server version %s is supported\n" %
                    (self.__class__.__name__, str(version)))

        self.on_close(wb_admin_utils.weakcb(self, "handle_on_close"))

        # Create sections and add them to the admin page.
        self.configuration = wb_admin_main.WbAdminMainView(
            server_profile, self.ctrl_be, self.monitor)
        self.add(self.configuration, True, True)
示例#4
0
    def refresh(self):
        self._object_dict = {}
        self._error_tables = []
        self._tree.clear()
        for log in self.main.plan.state.creationLog:
            node = self._tree.add_node()
            obj = log.logObject
            icon = "GrtObject.16x16.png"
            for c in [
                    grt.classes.db_Schema, grt.classes.db_Table,
                    grt.classes.db_View, grt.classes.db_Routine,
                    grt.classes.db_Trigger
            ]:
                if isinstance(obj, c):
                    icon = c.__grtclassname__ + ".16x16.png"
                    break
            if not obj:
                grt.log_warning(
                    "Migration",
                    "Object creation log '%s' referenced no object" % log.name)
                continue
            full_name = obj.name
            o = obj.owner
            while o:
                full_name = o.name + "." + full_name
                if isinstance(o, grt.classes.db_Schema):
                    break
                o = o.owner
            node.set_string(0, full_name)
            node.set_icon_path(0, icon)
            node.set_tag(log.__id__)
            self._object_dict[log.__id__] = log
            text = []
            worst = None
            for entry in log.entries:
                if worst is None:
                    worst = entry.entryType
                else:
                    worst = max(worst, entry.entryType)
                if entry.entryType == 1:
                    text.append("WARNING: %s" % entry.name)
                elif entry.entryType == 2:
                    text.insert(0, "ERROR: %s" % entry.name)
                    if isinstance(obj, grt.classes.db_Table):
                        self._error_tables.append(obj)
                else:
                    text.append("Script executed successfully")

            if worst == 0:
                node.set_icon_path(1, "task_checked.png")
            elif worst == 1:
                node.set_icon_path(1, "task_warning.png")
            elif worst == 2:
                node.set_icon_path(1, "task_error.png")
            node.set_string(1, ",\n".join(text))
示例#5
0
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog
    
        mysql_simpleTypes = dict( (datatype.name.upper(), datatype) for datatype in targetCatalog.simpleDatatypes )

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType
            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name
            target_datatype = source_datatype
            
            # check the type mapping table
            typemap = self.findDatatypeMapping(state, source_column, source_datatype)
            if typemap:
                if not mysql_simpleTypes.has_key(typemap.targetDatatypeName.upper()):
                    grt.log_warning("migrateTableColumnsToMySQL", "Can't find mapped datatype %s for type %s\n" % (typemap.targetDatatypeName, source_datatype))
                    state.addMigrationLogEntry(2, source_column, target_column, 
                        'Unknown mapped datatype "%s" for source type "%s" (check type mapping table)' % (typemap.targetDatatypeName, source_datatype) )
                    return False

                target_column.simpleType = mysql_simpleTypes[typemap.targetDatatypeName.upper()]
                if typemap.length > -2:
                    target_column.length = typemap.length
                if typemap.scale > -2:
                    target_column.scale = typemap.scale
                if typemap.precision > -2:
                    target_column.precision = typemap.precision
                if typemap.isUnsigned > 0:
                    if "UNSIGNED" not in target_column.flags:
                        target_column.flags.append("UNSIGNED")

            # try a direct mapping to mysql types
            elif mysql_simpleTypes.has_key(target_datatype.upper()):
                target_column.simpleType = mysql_simpleTypes[target_datatype.upper()]
            else:
                grt.log_warning("migrateTableColumnsToMySQL", "Can't find datatype %s for type %s\n" % (target_datatype, source_datatype))
                state.addMigrationLogEntry(2, source_column, target_column, 
                    'Unknown datatype "%s"' % (source_datatype) )
                return False

            return True
        else:
            state.addMigrationLogEntry(2, source_column, target_column, 
                    'Could not migrate type of column "%s" in "%s" (%s)' % (target_column.name, source_column.owner.name, source_column.formattedRawType) )
            return False

        return True
 def _check_server_version(self):
     version = self.ctrl_be.get_server_version()
     if type(version) is tuple:
         valid_versions = ((4,0), (4,1), (5,0), (5,1), (5,2), (5,4), (5,5), (5,6), (5, 7))
         if version[:2] not in valid_versions:
             log_warning(_this_file, "%s: Server version %s is NOT supported\n" % (self.__class__.__name__, str(version)) )
             Utilities.show_error("Unsupported Server Version", "The version of the server you're trying to connect to is %i.%i, which is not supported by Workbench."%version[:2],
                                  "Close", "Ignore", "")
             return False
         else:
             log_info(_this_file, "%s: Server version %s is supported\n" % (self.__class__.__name__, str(version)) )
             return True
     return None
def perform_fabric_operation(conn, name, callback=None, callback_params=None):
    """
    Current Fabric operations are done using the next cycle:
    - Open Connection
    - Execute Specific Operation
    - Close Connection

    This method allows performing these operations using this life cycle, removing from the 
    specific operation the burden of common operations such as password retrieval and exception 
    handling.

    The specific operation should be done on a function received as callback.

    To pass data from the caller to the actual Fabric operation method use the
    callback_params, this method will also include the connection_id on such params.
    """
    error = ""

    # Retrieves the Fabric node connection data
    host = conn.parameterValues["hostName"]
    port = conn.parameterValues["port"]
    user = conn.parameterValues["userName"]

    try:
        # Retrieves the required password
        accepted, password = mforms.Utilities.find_or_ask_for_password(
            "Fabric Node Connection", '%s@%s' % (host, port), user, False)
        if accepted:

            # Opens a connection to the Fabric instance
            conn_id = grt.modules.WbFabricInterface.openConnection(
                conn, password)

            if conn_id > 0:
                # Executes the callback function which will interact with Fabric using the
                # created connection.
                if callback:
                    if callback_params:
                        callback_params['conn_id'] = conn_id
                    else:
                        callback_params = {'conn_id': conn_id}

                    error = callback(callback_params)

                # Finally closes the connection
                grt.modules.WbFabricInterface.closeConnection(conn_id)
    except OperationCancelledError, e:
        error = "Operation Cancelled"
        log_warning("WBFabric Module", "User cancelled %s\n" % name)
def perform_fabric_operation(conn, name, callback = None, callback_params = None):
    """
    Current fabric operations are done using the next cycle:
    - Open Connection
    - Execute Specific Operation
    - Close Connection

    This method allows performing these operations using this life cycle, removing from the 
    specific operation the burden of common operations such as password retrieval and exception 
    handling.

    The specific operation should be done on a function received as callback.

    To pass data from the caller to the actual fabric operation method use the
    callback_params, this method will also include the connection_id on such params.
    """
    error = ""

    # Retrieves the fabric node connection data
    host = conn.parameterValues["hostName"]
    port = conn.parameterValues["port"]
    user = conn.parameterValues["userName"]

    try:
        # Retrieves the required password
        accepted, password = mforms.Utilities.find_or_ask_for_password("Fabric Node Connection", '%s@%s' % (host, port), user, False)
        if accepted:

            # Opens a connection to the fabric instance
            conn_id = grt.modules.WbFabricInterface.openConnection(conn, password)

            if conn_id > 0:
                # Executes the callback function which will interact with fabric using the
                # created connection.
                if callback:
                    if callback_params:
                        callback_params['conn_id'] = conn_id
                    else:
                        callback_params = {'conn_id':conn_id}

                    error = callback(callback_params)

                # Finally closes the connection
                grt.modules.WbFabricInterface.closeConnection(conn_id)
    except OperationCancelledError, e:
        error = "Operation Cancelled"
        log_warning("WBFabric Module", "User cancelled %s\n" % name)
    def refresh(self):
        self._object_dict = {}
        self._error_tables = []
        self._tree.clear()
        for log in self.main.plan.state.creationLog:
            node = self._tree.add_node()
            obj = log.logObject
            icon = "GrtObject.16x16.png"
            for c in [grt.classes.db_Schema, grt.classes.db_Table, grt.classes.db_View, grt.classes.db_Routine, grt.classes.db_Trigger]:
                if isinstance(obj, c):
                    icon = c.__grtclassname__+".16x16.png"
                    break
            if not obj:
                grt.log_warning("Migration", "Object creation log '%s' referenced no object" % log.name)
                continue
            full_name = obj.name
            o = obj.owner
            while o:
                full_name = o.name + "." + full_name
                if isinstance(o, grt.classes.db_Schema):
                    break
                o = o.owner
            node.set_string(0, full_name)
            node.set_icon_path(0, icon)
            node.set_tag(log.__id__)
            self._object_dict[log.__id__] = log
            text = []
            worst = None
            for entry in log.entries:
                worst = max(worst, entry.entryType)
                if entry.entryType == 1:
                    text.append("WARNING: %s" % entry.name)
                elif entry.entryType == 2:
                    text.insert(0, "ERROR: %s" % entry.name)
                    if isinstance(obj, grt.classes.db_Table):
                        self._error_tables.append(obj)
                else:
                    text.append("Script executed successfully")

            if worst == 0:
                node.set_icon_path(1, "task_checked.png")
            elif worst == 1:
                node.set_icon_path(1, "task_warning.png")
            elif worst == 2:
                node.set_icon_path(1, "task_error.png")
            node.set_string(1, ",\n".join(text))
示例#10
0
    def strip_ast(self, node):
        # remove useless nodes from the tree
        symbol, value, children = node

        if len(children) == 1 and node_symbol(children[0]) in STRIP_TOKENS:
            if node_value(children[0]):
                grt.log_warning("Reformatter", "Node unexpectedly has a value: %s\n" % repr(children[0]))
                return node
            children = node_children(children[0])

        new_children = []
        # traverse the AST depth-first and build up the formatted expression bottom up
        for node in children:
            processed_node = self.strip_ast(node)
            if processed_node:
                new_children.append(processed_node)

        return symbol, value, new_children
 def validate(self):
     # check if target and source catalogs are the same in the host
     plan = self.main.plan
     if plan.migrationSource.connection.hostIdentifier == plan.migrationTarget.connection.hostIdentifier:
         for schema in plan.migrationSource.catalog.schemata:
             for tschema in plan.migrationTarget.catalog.schemata:
                 if tschema.oldName == schema.name:
                     if tschema.name == schema.name:
                         mforms.Utilities.show_error("Validation Error", 
                             "The source and target are in the same server and a schema being migrated have identical names.\nPlease rename the target schema to allow the migration to continue.",
                             "OK", "", "")
                         idx = self._filter.index_of_item_with_title('All Objects')
                         if idx == -1:
                             grt.log_warning('Migration', 'Could not get the index of the "All Objects" view')
                         else:
                             self._filter.set_selected(idx)
                             self._filter.call_changed_callback()
                         return False
     return True
 def validate(self):
     # check if target and source catalogs are the same in the host
     plan = self.main.plan
     if plan.migrationSource.connection.hostIdentifier == plan.migrationTarget.connection.hostIdentifier:
         for schema in plan.migrationSource.catalog.schemata:
             for tschema in plan.migrationTarget.catalog.schemata:
                 if tschema.oldName == schema.name:
                     if tschema.name == schema.name:
                         mforms.Utilities.show_error("Validation Error", 
                             "The source and target are in the same server and a schema being migrated has identical names.\nPlease rename the target schema to allow the migration to continue.",
                             "OK", "", "")
                         idx = self._filter.index_of_item_with_title('All Objects')
                         if idx == -1:
                             grt.log_warning('Migration', 'Could not get the index of the "All Objects" view')
                         else:
                             self._filter.set_selected(idx)
                             self._filter.call_changed_callback()
                         return False
     return True
示例#13
0
    def strip_ast(self, node):
        # remove useless nodes from the tree
        symbol, value, children = node

        if len(children) == 1 and node_symbol(children[0]) in STRIP_TOKENS:
            if node_value(children[0]):
                grt.log_warning(
                    "Reformatter",
                    "Node unexpectedly has a value: %s\n" % repr(children[0]))
                return node
            children = node_children(children[0])

        new_children = []
        # traverse the AST depth-first and build up the formatted expression bottom up
        for node in children:
            processed_node = self.strip_ast(node)
            if processed_node:
                new_children.append(processed_node)

        return symbol, value, new_children
示例#14
0
    def generate_report(self):
        self._schema_list = self.main._schema_selection_page.schema_selector.get_selected(
        )
        self._report.set_value('%s transfered.\n' %
                               ('1 schema' if len(self._schema_list) == 1 else
                                str(len(self._schema_list)) + ' schemas'))
        for schema in self._schema_list:
            self._report.append_text(
                str('\n' + 30 * '=' + '\n' + ('Schema: ' + schema).center(30) +
                    '\n' + 30 * '=' + '\n'))
            try:
                idx = [
                    log.logObject.owner.name
                    for log in grt.root.wb.migration.dataTransferLog
                ].index(schema)
            except ValueError:
                grt.log_warning(
                    'Wizard',
                    'Data transfer log entries for schema "%s" not found' %
                    schema)
                continue
            schema_object = grt.root.wb.migration.dataTransferLog[
                idx].logObject.owner
            self._report.append_text(
                '\tTables: %d\n\tViews: %d\n\tRoutines: %d\n' %
                (len(schema_object.tables), len(
                    schema_object.views), len(schema_object.routines)))

            self._report.append_text('\nData copy report:\n')
            for log in itertools.islice(grt.root.wb.migration.dataTransferLog,
                                        idx, None):
                if log.logObject.owner.__id__ != schema_object.__id__:
                    break
                self._report.append_text('\n'.join('\t' + entry.name
                                                   for entry in log.entries) +
                                         '\n')

            self._report.append_text('\n')
    def fetch_windows_shell_info(self):
        # get some info from the remote shell
        result, code = self.get_cmd_output("chcp.com")
        if code == 0:
            result = result.strip(" .\r\n").split()
            if len(result) > 0:
                self.cmd_output_encoding = "cp" + result[-1]
        else:
            print "WARNING: Unable to determine codepage from shell: %s" % result
            log_warning(_this_file, '%s.fetch_windows_shell_info(): WARNING: Unable to determine codepage from shell: "%s"\n' % (self.__class__.__name__, str(result)) )

        result, code = self.get_cmd_output("echo %PROCESSOR_ARCHITECTURE%")
        if result:
            result = result.strip()
        ProgramFilesVar = None
        x86var = None
        if result != "x86":#we are on x64 win in x64 mode
            x86var = WIN_PROGRAM_FILES_X86_VAR
            ProgramFilesVar = WIN_PROGRAM_FILES_VAR
        else:
            result, code = self.get_cmd_output("echo %PROCESSOR_ARCHITEW6432%")
            if result:
                result = result.strip()
            if result == "%PROCESSOR_ARCHITEW6432%":#we are on win 32
                x86var = WIN_PROGRAM_FILES_VAR
                ProgramFilesVar = WIN_PROGRAM_FILES_VAR
            else:#32bit app on x64 win
                x86var = WIN_PROGRAM_FILES_VAR
                ProgramFilesVar = WIN_PROGRAM_FILES_X64_VAR

        result, code = self.get_cmd_output("echo "+ ProgramFilesVar)
        if code == 0:
            self.target_shell_variables["%ProgramFiles%"] = result.strip("\r\n")
            if ProgramFilesVar != "%ProgramFiles%":
                self.target_shell_variables[ProgramFilesVar] = result.strip("\r\n")
        else:
            print "WARNING: Unable to fetch ProgramFiles value in Windows machine: %s"%result
            log_warning(_this_file, '%s.fetch_windows_shell_info(): WARNING: Unable to fetch ProgramFiles value in Windows machine: "%s"\n' % (self.__class__.__name__, str(result)) )

        # this one only exists in 64bit windows
        result, code = self.get_cmd_output("echo "+ x86var)
        if code == 0:
            self.target_shell_variables["%ProgramFiles(x86)%"] = result.strip("\r\n")
        else:
            print "WARNING: Unable to fetch ProgramFiles(x86) value in local Windows machine: %s"%result
            log_warning(_this_file, '%s.fetch_windows_shell_info(): WARNING: Unable to fetch ProgramFiles(x86) value in local Windows machine: "%s"\n' % (self.__class__.__name__, str(result)) )
        
        log_debug(_this_file, '%s.fetch_windows_shell_info(): Encoding: "%s", Shell Variables: "%s"\n' % (self.__class__.__name__, self.cmd_output_encoding, str(self.target_shell_variables)))
示例#16
0
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog
    
        mysql_simpleTypes = dict( (datatype.name.upper(), datatype) for datatype in targetCatalog.simpleDatatypes )
        
        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        # SQL expression to use for converting the column data to the target type
        # eg.: CAST(? as NVARCHAR(max))
        type_cast_expression = None
        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype in ['VARCHAR', 'NVARCHAR', 'UNIVARCHAR', 'TEXT', 'NTEXT', 'UNITEXT']:
                if 0 <= source_column.length < 256:
                    target_datatype = 'VARCHAR'
                elif 0 <= source_column.length < 65536:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    if targetCatalog.version.majorNumber < 5:
                        target_datatype = 'MEDIUMTEXT'
                    else:
                        target_datatype = 'VARCHAR'
                else:
                    target_datatype = 'LONGTEXT'
            elif source_datatype in ['CHAR', 'NCHAR', 'UNICHAR']:
                if source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'LONGTEXT'
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'TINYINT':
                target_datatype = source_datatype
                target_column.precision = -1
                if 'UNSIGNED' not in target_column.flags:
                    target_column.flags.append('UNSIGNED')  # In Sybase TINYINT is unsigned
            elif source_datatype == 'SYSNAME':
                target_datatype = 'VARCHAR'
                type_cast_expression = "CONVERT(VARCHAR(30), ?)"
                target_column.length = 30
                state.addMigrationLogEntry(0, source_column, target_column,
                        "Source column type %s was migrated to %s(%s)" % (source_datatype, target_datatype, target_column.length))
            elif source_datatype == 'SYSNAME':
                target_datatype = 'VARCHAR'
                type_cast_expression = "CONVERT(VARCHAR(255), ?)"
                target_column.length = 255
                state.addMigrationLogEntry(0, source_column, target_column,
                        "Source column type %s was migrated to %s(%s)" % (source_datatype, target_datatype, target_column.length))
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
                target_column.precision = -1
            elif source_datatype == 'FLOAT':
                target_datatype = 'FLOAT' if source_column.length == 4 else 'DOUBLE'
                target_column.precision = -1
            elif source_datatype in ['MONEY', 'SMALLMONEY']:
                target_datatype = 'DECIMAL'
                target_column.precision = source_column.simpleType.numericPrecision
                target_column.scale = source_column.simpleType.numericScale
            # binary datatypes:
            elif source_datatype in ['IMAGE', 'BINARY', 'VARBINARY']:
                if 0 <= source_column.length < 256:
                    if source_datatype == 'IMAGE':
                        target_datatype = 'TINYBLOB'
                    else:
                        target_datatype = source_datatype
                elif 0 <= source_column.length < 65536:
                    target_datatype = 'MEDIUMBLOB'
                else:
                    target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype in ['DATETIME', 'SMALLDATETIME']:
                target_datatype = 'DATETIME'
            # timestamp datatypes
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(0, source_column, target_column,
                      "Source column type BIT was migrated to TINYINT(1)")
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning("Sybase migrateTableColumnsToMySQL", "Can't find datatype %s for type %s\n" % (target_datatype, source_datatype))
                state.addMigrationLogEntry(2, source_column, target_column, 
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"' % (target_column.name, source_column.owner.name, source_datatype) )
                return False
            
            if type_cast_expression:
                target_column.owner.customData["columnTypeCastExpression:%s" % target_column.name] = "%s as ?" % type_cast_expression

            return True
        else:
            state.addMigrationLogEntry(2, source_column, target_column, 
                    'Could not migrate type of column "%s" in "%s" (%s)' % (target_column.name, source_column.owner.name, source_column.formattedRawType) )
            return False

        return True
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if 0 <= source_column.length < 256:
                    target_datatype = 'VARCHAR'
                elif 0 <= source_column.length < 65536:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    if targetCatalog.version.majorNumber < 5:
                        target_datatype = 'MEDIUMTEXT'
                    else:
                        target_datatype = 'VARCHAR'
                else:
                    target_datatype = 'LONGTEXT'
            elif source_datatype in ['CHAR', 'NCHAR']:
                if source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'LONGTEXT'
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            elif source_datatype in ['REAL', 'FLOAT']:
                target_datatype = 'FLOAT'
            elif source_datatype == 'DOUBLE PRECISION':
                target_datatype = 'DOUBLE'
            # binary datatypes:
            elif source_datatype == 'BLOB':
                if 0 <= source_column.length < 2**8:
                    target_datatype = 'TINYBLOB'
                if 0 <= source_column.length < 2**16:
                    target_datatype = 'BLOB'
                elif 0 <= source_column.length < 2**24:
                    target_datatype = 'MEDIUMBLOB'
                else:
                    target_datatype = 'LONGBLOB'
            elif source_datatype == 'CLOB':
                if 0 <= source_column.length < 2**8:
                    target_datatype = 'TINYTEXT'
                if 0 <= source_column.length < 2**16:
                    target_datatype = 'TEXT'
                elif 0 <= source_column.length < 2**24:
                    target_datatype = 'MEDIUMTEXT'
                else:
                    target_datatype = 'LONGTEXT'
            # datetime datatypes:
            elif source_datatype == 'TIMESTAMP':
                target_datatype = 'TIMESTAMP'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
            elif source_datatype in ['BIT', 'BIT VARYING']:
                target_datatype = 'BIT'
            elif source_datatype == 'BOOLEAN':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type BOOLEAN was migrated to TINYINT(1)")
            elif source_datatype == 'XML':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type XML was migrated to TEXT")
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if target_datatype in mysql_simpleTypes:
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "SQL-92 migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            if not source_type and source_column.userType.sqlDefinition.startswith(
                    'enum('):
                target_column.simpleType = mysql_simpleTypes['ENUM']
                target_column.datatypeExplicitParams = source_column.userType.sqlDefinition[
                    4:]
                return True

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype == 'VARCHAR':
                if 0 <= source_column.length < 256:
                    target_datatype = 'VARCHAR'
                elif 0 <= source_column.length < 65536:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    if targetCatalog.version.majorNumber < 5:
                        target_datatype = 'MEDIUMTEXT'
                    else:
                        target_datatype = 'VARCHAR'
                else:
                    target_datatype = 'LONGTEXT'
            elif source_datatype == 'CHAR':
                if source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'LONGTEXT'
            # integer data types:
            elif source_datatype in ['SMALLINT', 'INT', 'BIGINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'SMALLSERIAL':
                target_datatype = 'SMALLINT'
                target_column.autoIncrement = 1
            elif source_datatype == 'SERIAL':
                target_datatype = 'INTEGER'
                target_column.autoIncrement = 1
            elif source_datatype == 'BIGSERIAL':
                target_datatype = 'BIGINT'
                target_column.autoIncrement = 1
            # numeric
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            elif source_datatype == 'MONEY':
                target_datatype = 'DECIMAL'
                target_column.precision = 19
                target_column.scale = 2
            # floating point datatypes:
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'DOUBLE PRECISION':
                target_datatype = 'DOUBLE'
            # binary datatypes:
            elif source_datatype == 'BYTEA':
                target_datatype = 'LONGBLOB'
            elif source_datatype == 'TEXT':
                target_datatype = 'LONGTEXT'
            # datetime datatypes:
            elif source_datatype == 'TIMESTAMP':
                target_datatype = 'DATETIME'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
            elif source_datatype == 'INTERVAL':
                target_datatype = 'TIME'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type INTERVAL was migrated to TIME")
            elif source_datatype in ['BIT', 'BIT VARYING']:
                target_datatype = 'BIT'
            elif source_datatype == 'BOOLEAN':
                target_datatype = 'TINYINT'
                target_column.length = 1
            # network address types
            elif source_datatype == 'CIDR':
                target_datatype = 'VARCHAR'
                target_column.length = 43
            elif source_datatype == 'INET':
                target_datatype = 'VARCHAR'
                target_column.length = 43
            elif source_datatype == 'MACADDR':
                target_datatype = 'VARCHAR'
                target_column.length = 17
            # others
            elif source_datatype == 'UUID':
                target_datatype = 'VARCHAR'
                target_column.length = 36
            elif source_datatype in [
                    'XML', 'JSON', 'TSVECTOR', 'TSQUERY', 'ARRAY'
            ]:
                target_datatype = 'LONGTEXT'
            elif source_datatype in [
                    'POINT', 'LINE', 'LSEG', 'BOX', 'PATH', 'POLYGON',
                    'CIRCLE', 'TXID_SNAPSHOT'
            ]:
                target_datatype = 'VARCHAR'
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "PostgreSQL migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            target_version = Version.fromgrt(targetCatalog.version)
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            grt.log_debug3(
                "Migration",
                "Migrating source column '%s' - type: %s, length: %s\n" %
                (source_column.name, source_datatype, source_column.length))
            # string data types:
            target_datatype = ''
            #NCHAR and NVARCHAR in Microsoft SQL Server is always encoded as UCS-2 (UTF-16)
            if source_datatype in [
                    'NCHAR', 'NVARCHAR'
            ] and target_version.is_supported_mysql_version_at_least(5, 5, 0):
                target_column.characterSetName = 'utf8mb4'
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if source_column.length == -1:  # VARCHAR(MAX) or NVARCHAR(MAX)
                    target_datatype = 'LONGTEXT'  #TODO: Give the user the choice for this target datatype
                elif 0 < source_column.length < 256:
                    target_datatype = 'VARCHAR'
                else:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    target_datatype = 'TEXT' if targetCatalog.version.majorNumber < 5 else 'VARCHAR'
            elif source_datatype in ['TEXT', 'NTEXT']:
                target_datatype = 'LONGTEXT'
            elif source_datatype in [
                    'CHAR', 'NCHAR'
            ]:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                if 0 < source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'TEXT'
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'TINYINT':
                target_datatype = source_datatype
                target_column.precision = -1
                if 'UNSIGNED' not in target_column.flags:
                    target_column.flags.append(
                        'UNSIGNED')  # In MSSQL TINYINT is unsigned
            elif source_datatype == 'UNIQUEIDENTIFIER':
                target_datatype = 'VARCHAR'
                target_column.length = 64
                if 'UNIQUE' not in target_column.flags:
                    target_column.flags.append(
                        'UNIQUE')  # uniqueid must be UNIQUE... bug #43098
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                target_datatype = 'VARCHAR'
                target_column.length = 160
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                if source_column.scale == 0:
                    target_datatype = 'BIGINT'
                    if source_column.precision < 5:
                        target_datatype = 'SMALLINT'
                    elif source_column.precision < 7:
                        target_datatype = 'MEDIUMINT'
                    elif source_column.precision < 10:
                        target_datatype = 'INT'
                    target_column.precision = -1
                else:
                    target_datatype = 'DECIMAL'
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'FLOAT':
                if source_column.precision > 24:
                    target_datatype = 'DOUBLE'
                    target_column.precision = -1
            elif source_datatype in ['MONEY', 'SMALLMONEY']:
                target_datatype = 'DECIMAL'
                target_column.precision = source_column.simpleType.numericPrecision
                target_column.scale = source_column.simpleType.numericScale
            # binary datatypes:
            elif source_datatype == 'IMAGE':
                target_datatype = 'LONGBLOB'
            elif source_datatype == 'VARBINARY' and source_column.length == -1:  # VARBINARY(MAX):
                target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype in [
                    'DATETIME', 'SMALLDATETIME', 'DATETIME2', 'DATETIMEOFFSET'
            ]:
                target_datatype = 'DATETIME'
                target_column.length = -1
                if target_version.is_supported_mysql_version_at_least(
                        5, 6, 4) and source_datatype != 'SMALLDATETIME':
                    target_column.length = source_column.precision if source_column.precision < 7 else 6
            # timestamp datatypes
            # In MS SQL Server a nonnullable timestamp column is semantically equivalent to a binary(8) column,
            # and a nullable timestamp column is semantically equivalent to a varbinary(8) column.
            elif source_datatype in ['TIMESTAMP', 'ROWVERSION']:
                target_datatype = 'BINARY' if source_column.isNotNull else 'VARBINARY'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
                target_column.precision = -1
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
                target_column.precision = -1
                if target_version.is_supported_mysql_version_at_least(5, 6, 4):
                    target_column.precision = source_column.precision if source_column.precision < 7 else 6
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type BIT was migrated to TINYINT(1)")
            elif source_datatype == 'XML':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type XML was migrated to TEXT")
            elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                target_datatype = 'GEOMETRY'
            elif source_datatype == 'HIERARCHYID':
                target_datatype = 'VARCHAR'
                target_column.length = 255
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type HIERARCHYID was migrated to VARCHAR(255)"
                )
            elif source_datatype == 'SQL_VARIANT':
                target_datatype = 'TEXT'
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "Migration", "MSSQL migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
示例#20
0
def connectionFromString(connstr):
    valid = False
    
    def get_driver(name):
        for d in grt.root.wb.rdbmsMgmt.rdbms[0].drivers:
            if d.name == name:
                return d
        return None
    
    # parse as a one of our connection strings
    g = re.match("(.*?)(?::(.*))?@(.*?)(?::([0-9]+|)(?::(.+|))?)?$", connstr)
    if g:
        user, password, host, port, socket = g.groups()
        valid = True
    else:
        user, password, host, port, socket = None, None, None, None, None
        # check if this is a mysql cmdline client command
        tokens = shlex.split(connstr.strip())
        if tokens:
            print tokens
            if tokens[0].endswith("mysql") or tokens[0].endswith("mysql.exe"):
                i = 1
                valid = True
                while i < len(tokens):
                    if tokens[i] == "-u":
                        i += 1
                        user = tokens[i]
                    elif tokens[i].startswith("-u"):
                        user = tokens[i][2:]
                    elif tokens[i] == "-h":
                        i += 1
                        host = tokens[i]
                    elif tokens[i].startswith("-h"):
                        host = tokens[i][2:]
                    elif tokens[i] == "-p":
                        i += 1
                        password = tokens[i]
                    elif tokens[i].startswith("-p"):
                        password = tokens[i][2:] # noqa
                    elif tokens[i] == "-P":
                        i += 1
                        port = tokens[i]
                    elif tokens[i].startswith("-P"):
                        port = tokens[i][2:]
                    elif tokens[i] == "-S":
                        i += 1
                        socket = tokens[i]
                    elif tokens[i].startswith("-S"):
                        socket = tokens[i][2:]
                    i += 1
    if valid:
        if port:
            try:
                port = int(port)
            except:
                log_warning("wb_utils", "Error parsing connstring, port value '%s' should be a number\n" % port)
                port = None
        if not port:
            port = 3306
        conn = grt.classes.db_mgmt_Connection()
        conn.owner = grt.root.wb.rdbmsMgmt
        conn.name = connstr
        if socket:
            conn.driver = get_driver("MysqlNativeSocket")
        else:
            conn.driver = get_driver("MysqlNative")

        if user:
            conn.parameterValues["userName"] = user
        if host:
            conn.parameterValues["hostName"] = host
        if port:
            conn.parameterValues["port"] = port
        if socket:
            conn.parameterValues["socket"] = socket

        hostIdentifier = conn.driver.hostIdentifierTemplate
        for key, value in conn.parameterValues.items():
            hostIdentifier = hostIdentifier.replace("%"+key+"%", str(value))
        conn.hostIdentifier = hostIdentifier

        return conn

    return None
示例#21
0
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            if not source_type and source_column.userType.sqlDefinition.startswith(
                    'enum('):
                target_column.simpleType = mysql_simpleTypes['ENUM']
                target_column.datatypeExplicitParams = source_column.userType.sqlDefinition[
                    4:]
                return True

            target_column.flags.extend(source_column.userType.flags)

        # SQL expression to use for converting the column data to the target type
        # eg.: CAST(? as NVARCHAR(max))
        type_cast_expression = None
        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if 0 <= source_column.length < 256:
                    target_datatype = 'VARCHAR'
                elif 0 <= source_column.length < 65536:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    if targetCatalog.version.majorNumber < 5:
                        target_datatype = 'MEDIUMTEXT'
                    else:
                        target_datatype = 'VARCHAR'
                else:
                    target_datatype = 'LONGTEXT'
            elif source_datatype in ['CHAR', 'NCHAR']:
                if source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'LONGTEXT'
            # integer data types:
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
            elif source_datatype == 'INTEGER':
                target_datatype = 'INT'
            elif source_datatype in ['SMALLINT', 'INT', 'BIGINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            # numeric
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            # floating point datatypes:
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'DOUBLE PRECISION':
                target_datatype = 'DOUBLE'
            # binary datatypes:
            elif source_datatype in ['CLOB', 'TEXT']:
                target_datatype = 'LONGTEXT'
            elif source_datatype == 'BLOB':
                target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype == 'TIMESTAMP':
                target_datatype = 'DATETIME'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
            elif source_datatype == 'DATETIMEOFFSET':
                target_datatype = 'TIME'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type DATETIMEOFFSET was migrated to TIME")
            # others
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "SQLite migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            if type_cast_expression:
                target_column.owner.customData[
                    "columnTypeCastExpression:%s" %
                    target_column.name] = "%s as ?" % type_cast_expression

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
示例#22
0
                if callback:
                    if callback_params:
                        callback_params['conn_id'] = conn_id
                    else:
                        callback_params = {'conn_id': conn_id}

                    error = callback(callback_params)

                # Finally closes the connection
                grt.modules.WbFabricInterface.closeConnection(conn_id)
    except OperationCancelledError, e:
        error = "Operation Cancelled"
        log_warning("WBFabric Module", "User cancelled %s\n" % name)
    except Exception, e:
        error = str(e)
        log_warning("WBFabric Module",
                    "Error %s : %s\n" % (conn.name, traceback.format_exc()))

    return error


def _execute_fabric_command(conn_id, fabric_command):
    """
    This function will be used to actually execute a valid Fabric command
    and process the result.

    The data resulting from Fabric operations is returned in JSON format.

    The Fabric commands will return 2 recordsets which are returned as 2 lists on the
    returned json data:
    - The first element is a status record, it is processed here and if there were errors
      an exception is thrown.
示例#23
0
            test_ssh_connection.acquire_admin_access()
        except Exception, exc:
            import traceback
            traceback.print_exc()
            return "ERROR "+str(exc)

        os_info = test_ssh_connection.detect_operating_system_version()
        if os_info:
            os_type, os_name, os_variant, os_version = os_info
            log_info(_this_file, "Instance test: detected remote OS: %s (%s), %s, %s\n" % (os_info))

            # check if the admin access error was because of wrong OS set
            if os_type != profile.target_os:
                return "ERROR Wrong Remote OS configured for connection. Set to %s, but was detected as %s" % (profile.target_os, os_type)
        else:
            log_warning(_this_file, "Instance test: could not determine OS version information\n")

            return "ERROR Could not determine remote OS details"

        return "OK"

    elif what == "disconnect":
        if test_ssh_connection:
            test_ssh_connection = None
        return "OK"

    elif what == "check_privileges":
        return "ERROR"

    elif what in ("find_config_file", "check_config_path", "check_config_section"):
        config_file = profile.config_file_path
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            target_column.flags.extend(source_column.userType.flags)

        # SQL expression to use for converting the column data to the target type
        # eg.: CAST(? as NVARCHAR(max))
        type_cast_expression = None
        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype in ['VARCHAR', 'NVARCHAR']:
                if source_datatype == 'VARCHAR':
                    type_cast_expression = "CAST(? as NVARCHAR(%d))" % target_column.length
                if source_column.length == -1:  # VARCHAR(MAX) or NVARCHAR(MAX)
                    target_datatype = 'LONGTEXT'  #TODO: Give the user the choice for this target datatype
                elif 0 < source_column.length < 256:
                    target_datatype = 'VARCHAR'
                else:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    target_datatype = 'TEXT' if targetCatalog.version.majorNumber < 5 else 'VARCHAR'
            elif source_datatype in ['TEXT', 'NTEXT']:
                if source_datatype == 'TEXT':
                    type_cast_expression = "CAST(? as NTEXT)"
                target_datatype = 'LONGTEXT'
            elif source_datatype in [
                    'CHAR', 'NCHAR'
            ]:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                if source_datatype == 'CHAR':
                    type_cast_expression = "CAST(? as NCHAR(%d))" % target_column.length
                if 0 < source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'TEXT'
            # integer data types:
            elif source_datatype in ['BIGINT', 'INT', 'SMALLINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            elif source_datatype == 'TINYINT':
                target_datatype = source_datatype
                target_column.precision = -1
                if 'UNSIGNED' not in target_column.flags:
                    target_column.flags.append(
                        'UNSIGNED')  # In MSSQL TINYINT is unsigned
            elif source_datatype == 'UNIQUEIDENTIFIER':
                target_datatype = 'VARCHAR'
                type_cast_expression = "CAST(? as VARCHAR(64))"
                target_column.length = 64
                if 'UNIQUE' not in target_column.flags:
                    target_column.flags.append(
                        'UNIQUE')  # uniqueid must be UNIQUE... bug #43098
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                target_datatype = 'VARCHAR'
                type_cast_expression = "CAST(? as VARCHAR(160))"
                target_column.length = 160
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            # floating point datatypes:
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'FLOAT':
                if source_column.precision > 24:
                    target_datatype = 'DOUBLE'
                    target_column.precision = -1
            elif source_datatype in ['MONEY', 'SMALLMONEY']:
                target_datatype = 'DECIMAL'
                target_column.precision = source_column.simpleType.numericPrecision
                target_column.scale = source_column.simpleType.numericScale
            # binary datatypes:
            elif source_datatype in ['IMAGE', 'BINARY', 'VARBINARY']:
                if source_column.length == -1:  # VARBINARY(MAX)
                    target_datatype = 'LONGBLOB'  #TODO: Give the user the choice for this target datatype
                elif 0 <= source_column.length < 256:
                    if source_datatype == 'IMAGE':
                        target_datatype = 'TINYBLOB'
                    else:
                        target_datatype = source_datatype
                elif 0 <= source_column.length < 65536:
                    target_datatype = 'MEDIUMBLOB'
                else:
                    target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype in [
                    'DATETIME', 'SMALLDATETIME', 'DATETIME2', 'DATETIMEOFFSET'
            ]:
                target_datatype = 'DATETIME'
                target_column.precision = -1
            # timestamp datatypes
            elif source_datatype in ['TIMESTAMP', 'ROWVERSION']:
                target_datatype = 'TIMESTAMP'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
                target_column.precision = -1
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
                target_column.precision = -1
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
                target_column.length = 1
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type BIT was migrated to TINYINT(1)")
            elif source_datatype == 'XML':
                target_datatype = 'TEXT'
                type_cast_expression = "CAST(? as NVARCHAR(max))"
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type XML was migrated to TEXT")
            elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                target_datatype = 'GEOMETRY'
                type_cast_expression = '?.STAsText()'
            elif source_datatype == 'HIERARCHYID':
                target_datatype = 'VARCHAR'
                type_cast_expression = "CAST(? as VARCHAR(max))"
                target_column.length = 255
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type HIERARCHYID was migrated to VARCHAR(255)"
                )
            elif source_datatype == 'SQL_VARIANT':
                target_datatype = 'TEXT'
                type_cast_expression = "CAST(? as NVARCHAR(max))"
                state.addMigrationLogEntry(
                    1, source_column, target_column,
                    "Source column type %s was migrated to %s(%s)" %
                    (source_datatype, target_datatype, target_column.length))
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "MSSQL migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            if type_cast_expression:
                target_column.owner.customData[
                    "columnTypeCastExpression:%s" %
                    target_column.name] = "%s as ?" % type_cast_expression

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
示例#25
0
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict((datatype.name.upper(), datatype)
                                 for datatype in targetCatalog.simpleDatatypes)

        source_type = source_column.simpleType
        if not source_type and source_column.userType:
            # evaluate user type
            source_type = source_column.userType.actualType

            if not source_type and source_column.userType.sqlDefinition.startswith(
                    'enum('):
                target_column.simpleType = mysql_simpleTypes['ENUM']
                target_column.datatypeExplicitParams = source_column.userType.sqlDefinition[
                    4:]
                return True

            target_column.flags.extend(source_column.userType.flags)

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            # string data types:
            target_datatype = ''
            if source_datatype == 'VARCHAR':
                if 0 <= source_column.length < 256:
                    target_datatype = 'VARCHAR'
                elif 0 <= source_column.length < 65536:  # MySQL versions > 5.0 can hold up to 65535 chars in a VARCHAR column
                    if targetCatalog.version.majorNumber < 5:
                        target_datatype = 'MEDIUMTEXT'
                    else:
                        target_datatype = 'VARCHAR'
                else:
                    target_datatype = 'LONGTEXT'
            elif source_datatype == 'CHAR':
                if source_column.length < 256:
                    target_datatype = 'CHAR'
                else:
                    target_datatype = 'LONGTEXT'
            # integer data types:
            elif source_datatype == 'BIT':
                target_datatype = 'TINYINT'
            elif source_datatype == 'INTEGER':
                target_datatype = 'INT'
            elif source_datatype in ['SMALLINT', 'INT', 'BIGINT']:
                target_datatype = source_datatype
                target_column.precision = -1
            # numeric
            elif source_datatype in ['DECIMAL', 'NUMERIC']:
                target_datatype = 'DECIMAL'
            elif source_datatype == 'SMALLMONEY':
                target_datatype = 'DECIMAL'
                target_column.precision = 10
                target_column.scale = 4
            elif source_datatype == 'MONEY':
                target_datatype = 'DECIMAL'
                target_column.precision = 19
                target_column.scale = 4
            # floating point datatypes:
            elif source_datatype == 'REAL':
                target_datatype = 'FLOAT'
            elif source_datatype == 'DOUBLE PRECISION':
                target_datatype = 'DOUBLE'
            # binary datatypes:
            elif source_datatype == 'VARBIT':
                if 1 <= source_column.length <= 64:
                    target_datatype = 'BIT'
                else:
                    target_datatype = 'LONGBLOB'
            elif source_datatype == 'LONG VARBIT':
                target_datatype = 'LONGBLOB'
            elif source_datatype in ['XML', 'TEXT', 'NTEXT']:
                target_datatype = 'LONGTEXT'
            elif source_datatype == 'BINARY':
                if 1 <= source_column.length <= 8:
                    target_datatype = 'TINYBLOB'
                elif 8 < source_column.length <= 16:
                    target_datatype = 'BLOB'
                elif 16 < source_column.length <= 24:
                    target_datatype = 'MEDIUMBLOB'
                else:
                    target_datatype = 'LONGBLOB'
            elif source_datatype == 'LONG BINARY':
                target_datatype = 'LONGBLOB'
            # datetime datatypes:
            elif source_datatype == 'TIMESTAMP':
                target_datatype = 'DATETIME'
            elif source_datatype == 'DATE':
                target_datatype = 'DATE'
            elif source_datatype == 'TIME':
                target_datatype = 'TIME'
            elif source_datatype == 'DATETIMEOFFSET':
                target_datatype = 'TIME'
                state.addMigrationLogEntry(
                    0, source_column, target_column,
                    "Source column type DATETIMEOFFSET was migrated to TIME")
            # others
            elif source_datatype == 'UNIQUEIDENTIFIERSTR':
                target_datatype = 'VARCHAR'
                target_column.length = 36
            elif source_datatype.startswith('ST_'):  # Spatial datatypes
                target_datatype = 'VARCHAR'
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning(
                    "SQLAnywhere migrateTableColumnsToMySQL",
                    "Can't find datatype %s for type %s\n" %
                    (target_datatype, source_datatype))
                state.addMigrationLogEntry(
                    2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"'
                    % (target_column.name, source_column.owner.name,
                       source_datatype))
                return False

            return True
        else:
            state.addMigrationLogEntry(
                2, source_column, target_column,
                'Could not migrate type of column "%s" in "%s" (%s)' %
                (target_column.name, source_column.owner.name,
                 source_column.formattedRawType))
            return False

        return True
示例#26
0
    def migrateDatatypeForColumn(self, state, source_column, target_column):
        targetCatalog = state.targetCatalog

        mysql_simpleTypes = dict( (datatype.name.upper(), datatype) for datatype in targetCatalog.simpleDatatypes )

        source_type = source_column.simpleType

        if source_type:
            # Decide which mysql datatype corresponds to the column datatype:
            source_datatype = source_type.name.upper()
            target_datatype = ''

            if source_datatype == "VARCHAR":
                target_datatype = "VARCHAR"
                target_column.length = source_column.length
            elif source_datatype == "INTEGER" or source_datatype == "INT":
                target_datatype = "INT"
            elif source_datatype == "SMALLINT":
                target_datatype = "SMALLINT"
            elif source_datatype == "COUNTER":
                target_datatype = "INT"
                # check if this column is part of an index and that there are no other auto_inc columns
                flag = False
                for col in target_column.owner.columns:
                    if col == target_column:
                       flag = True #this is the 1st auto-inc column
                       break
                    elif col.autoIncrement:
                        break
                if flag:
                    is_indexed = False
                    # now check if this is part of some index
                    for idx in target_column.owner.indices:
                        for c in idx.columns:
                            if c.referencedColumn == target_column:
                                is_indexed = True
                                break
                    if is_indexed:
                        target_column.autoIncrement = 1
                        target_column.isNotNull = 1
            elif source_datatype == "BIT":
                target_datatype = "TINYINT"
            elif source_datatype == "BYTE":
                target_datatype = "TINYINT"
                target_column.flags.append("UNSIGNED")
            elif source_datatype == "SINGLE":
                target_datatype = "FLOAT"
            elif source_datatype == "REAL" or source_datatype == "DOUBLE":
                target_datatype = "DOUBLE"
            elif source_datatype == "CURRENCY" or source_datatype == "DECIMAL":
                target_datatype = "DECIMAL"
            elif source_datatype == "BINARY":
                target_datatype = "BLOB"
            elif source_datatype == "LONGBINARY":
                target_datatype = "LONGBLOB"
            elif source_datatype == "LONGCHAR":
                target_datatype = "LONGTEXT"
            elif source_datatype == "DATETIME":
                target_datatype = "DATETIME"
                target_column.precision = -1
            else:
                # just fall back to same type name and hope for the best
                target_datatype = source_datatype

            if mysql_simpleTypes.has_key(target_datatype):
                target_column.simpleType = mysql_simpleTypes[target_datatype]
            else:
                grt.log_warning("MSAccess migrateTableColumnsToMySQL", "Can't find datatype %s for type %s\n" % (target_datatype, source_datatype))
                state.addMigrationLogEntry(2, source_column, target_column,
                    'Could not migrate column "%s" in "%s": Unknown datatype "%s"' % (target_column.name, source_column.owner.name, source_datatype) )
                return False

            return True
        else:
            state.addMigrationLogEntry(2, source_column, target_column,
                    'Could not migrate type of column "%s" in "%s" (%s)' % (target_column.name, source_column.owner.name, source_column.formattedRawType) )
            return False

        return True
示例#27
0
 def missing_host_key(self, client, hostname, key):
     import binascii
     log_warning(_this_file, 'WARNING: Unknown %s host key for %s: %s\n' % (key.get_name(), hostname, binascii.hexlify(key.get_fingerprint())))