def migrateTableColumnToMySQL(self, state, source_column, targetTable):
        target_column = GenericMigration.migrateTableColumnToMySQL(self, state, source_column, targetTable)
        # MySQL specific
        for attr in ["autoIncrement", "expression", "generated", "generatedStorage"]:
            setattr(target_column, attr, getattr(source_column, attr))

        return target_column
    def migrateUpdateForChanges(self, state, target_catalog):
        """
        Create datatype cast expression for target column based on source datatype.
        """
        for targetSchema in target_catalog.schemata:
            for targetTable in targetSchema.tables:
                for target_column in targetTable.columns:
                    # SQL expression to use for converting the column data to the target type
                    # eg.: CAST(? as NVARCHAR(max))
                    type_cast_expression = None
                    source_datatype = None
                    source_column = state.lookupSourceObject(target_column)
                    if source_column:
                        source_datatype = GenericMigration.getColumnDataType(
                            self, source_column)
                    if source_column and source_datatype:
                        if source_datatype == 'XML':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"

                        if type_cast_expression:
                            target_column.owner.customData[
                                "columnTypeCastExpression:%s" % target_column.
                                name] = "%s as ?" % type_cast_expression

        return target_catalog
Пример #3
0
    def migrateTableColumnToMySQL(self, state, source_column, targetTable):
        target_column = GenericMigration.migrateTableColumnToMySQL(self, state, source_column, targetTable)
        # MySQL specific
        for attr in ["autoIncrement"]:
            setattr(target_column, attr, getattr(source_column, attr))

        return target_column
    def migrateUpdateForChanges(self, state, target_catalog):
        """
        Create datatype cast expression for target column based on source datatype.
        """
        for targetSchema in target_catalog.schemata:
            for targetTable in targetSchema.tables:
                for target_column in targetTable.columns:
                    # SQL expression to use for converting the column data to the target type
                    # eg.: CAST(? as NVARCHAR(max))
                    type_cast_expression = None
                    source_datatype = None
                    source_column = state.lookupSourceObject(target_column)
                    if source_column:
                        source_datatype = GenericMigration.getColumnDataType(
                            self, source_column)
                    if source_column and source_datatype:
                        if source_datatype == 'SYSNAME':
                            type_cast_expression = "CONVERT(VARCHAR(30), ?)"
                        elif source_datatype == 'LONGSYSNAME':
                            type_cast_expression = "CONVERT(VARCHAR(255), ?)"
                        elif source_datatype in ['DECIMAL', 'NUMERIC'
                                                 ] and source_column.identity:
                            if source_column.precision < 5:
                                type_cast_expression = "CONVERT(SMALLINT, ?)"
                            elif source_column.precision < 10:
                                type_cast_expression = "CONVERT(INT, ?)"
                            else:
                                type_cast_expression = "CONVERT(BIGINT, ?)"

                        if type_cast_expression:
                            target_column.owner.customData[
                                "columnTypeCastExpression:%s" % target_column.
                                name] = "%s as ?" % type_cast_expression

        return target_catalog
    def migrateTableColumnToMySQL(self, state, source_column, targetTable):
        target_column = GenericMigration.migrateTableColumnToMySQL(self, state, source_column, targetTable)
        if target_column:
            # Autoincrement for integer datatypes:
            if source_column.simpleType:
                source_datatype = source_column.simpleType.name
                if source_datatype in ['INT', 'TINYINT', 'SMALLINT', 'BIGINT']:
                    target_column.autoIncrement = source_column.identity

            # TODO set charset/collation
            #target_column.characterSetName = 
            
        return target_column
Пример #6
0
    def migrateTableColumnToMySQL(self, state, source_column, targetTable):
        target_column = GenericMigration.migrateTableColumnToMySQL(self, state, source_column, targetTable)
        if target_column:
            # Autoincrement for integer datatypes:
            if source_column.simpleType:
                source_datatype = source_column.simpleType.name
                if source_datatype in ['INT', 'TINYINT', 'SMALLINT', 'BIGINT']:
                    target_column.autoIncrement = source_column.identity

            # TODO set charset/collation
            #target_column.characterSetName = 
            
        return target_column
    def migrateUpdateForChanges(self, state, target_catalog):
        """
        Create datatype cast expression for target column based on source datatype.
        """
        for targetSchema in target_catalog.schemata:
            for targetTable in targetSchema.tables:
                for target_column in targetTable.columns:
                    # SQL expression to use for converting the column data to the target type
                    # eg.: CAST(? as NVARCHAR(max))
                    type_cast_expression = None
                    source_datatype = None
                    source_column = state.lookupSourceObject(target_column)
                    if source_column:
                        source_datatype = GenericMigration.getColumnDataType(self, source_column)
                    if source_column and source_datatype:
                        target_datatype = target_column.simpleType.name.upper()
                        if source_datatype in ['VARCHAR', 'NVARCHAR']:
                            if source_datatype == 'VARCHAR':
                                if target_column.length > 4000 or target_column.length == -1: # NVARCHAR is limited to 4000 - if you need more, you must use MAX insted BUG #18167872
                                    type_cast_expression = "CAST(? as NVARCHAR(MAX))"         # If in source table is VARCHAR(MAX) column lnegth is 0 - so must be casted to NVARCHAR(MAX) BUG #18105486
                                else:
                                    type_cast_expression = "CAST(? as NVARCHAR(%d))" % target_column.length
                        elif source_datatype in ['TEXT', 'NTEXT']:
                            if source_datatype == 'TEXT':
                                type_cast_expression = "CAST(? as NTEXT)"
                        elif source_datatype in ['CHAR', 'NCHAR']:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                            if source_datatype == 'CHAR':
                                type_cast_expression = "CAST(? as NCHAR(%d))" % target_column.length
                        elif source_datatype == 'UNIQUEIDENTIFIER':
                            type_cast_expression = "CAST(? as VARCHAR(64))"
                        elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                            type_cast_expression = "CAST(? as VARCHAR(128))"
                        # floating point datatypes:
                        elif source_datatype in ['DECIMAL', 'NUMERIC']:
                            if source_column.scale == 0:
                                type_cast_expression = "CAST(? as %s)" % ('INT' if target_datatype == 'MEDIUMINT' else target_datatype)
                        elif source_datatype == 'XML':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"
                        elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                            type_cast_expression = '?.STAsText()'
                        elif source_datatype == 'HIERARCHYID':
                            type_cast_expression = "CAST(? as VARCHAR(max))"
                        elif source_datatype == 'SQL_VARIANT':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"
                        elif source_datatype in ['BINARY', 'VARBINARY', 'TIMESTAMP', 'ROWVERSION']:
                            type_cast_expression = 'CONVERT(VARBINARY(MAX), ?, 0)'

                        if type_cast_expression:
                            target_column.owner.customData["columnTypeCastExpression:%s" % target_column.name] = "%s as ?" % type_cast_expression

        return target_catalog
    def migrateTablePrimaryKeyToMySQL(self, state, sourceTable, targetTable):

        res = GenericMigration.migrateTablePrimaryKeyToMySQL(self, state, sourceTable, targetTable)

        if targetTable.primaryKey:
            for icolumn in targetTable.primaryKey.columns:
                if not icolumn.referencedColumn.isNotNull:
                    icolumn.referencedColumn.isNotNull = 1
                    icolumn.referencedColumn.defaultValueIsNull = 0
                    icolumn.referencedColumn.defaultValue = ''
                    # force primary keys to be NOT NULL
                    state.addMigrationLogEntry(1, sourceTable, targetTable,
                                              'Source table has a PRIMARY KEY allowing NULL values, which is not supported by MySQL. Column was changed to NOT NULL.')

        return res
    def migrateTablePrimaryKeyToMySQL(self, state, sourceTable, targetTable):

        res = GenericMigration.migrateTablePrimaryKeyToMySQL(
            self, state, sourceTable, targetTable)

        if targetTable.primaryKey:
            for icolumn in targetTable.primaryKey.columns:
                if not icolumn.referencedColumn.isNotNull:
                    icolumn.referencedColumn.isNotNull = 1
                    icolumn.referencedColumn.defaultValueIsNull = 0
                    icolumn.referencedColumn.defaultValue = ''
                    # force primary keys to be NOT NULL
                    state.addMigrationLogEntry(
                        1, sourceTable, targetTable,
                        'Source table has a PRIMARY KEY allowing NULL values, which is not supported by MySQL. Column was changed to NOT NULL.'
                    )

        return res
    def migrateTableToMySQL(self, state, sourceTable, target_schema):
        targetTable = GenericMigration.migrateTableToMySQL(
            self, state, sourceTable, target_schema)

        # MySQL attributes
        for attr in [
                "tableEngine", "nextAutoInc", "password", "delayKeyWrite",
                "defaultCharacterSetName", "defaultCollationName",
                "mergeUnion", "mergeInsert", "tableDataDir", "tableIndexDir",
                "packKeys", "raidType", "raidChunks", "raidChunkSize",
                "checksum", "rowFormat", "keyBlockSize", "avgRowLength",
                "minRows", "maxRows", "partitionType", "partitionExpression",
                "partitionCount", "subpartitionType", "subpartitionExpression",
                "subpartitionCount"
        ]:
            setattr(targetTable, attr, getattr(sourceTable, attr))

        if True:

            def copy_partitions(owner, part_list):
                l = []
                for src in part_list:
                    dst = grt.classes.db_mysql_PartitionDefinition()
                    for attr in [
                            "name", "value", "comment", "dataDirectory",
                            "indexDirectory", "maxRows", "minRows"
                    ]:
                        setattr(dst, attr, getattr(src, attr))
                    dst.owner = owner
                    dst.subpartitionDefinitions.extend(
                        copy_partitions(dst, src.subpartitionDefinitions))
                    l.append(dst)
                return l

            # partition defs
            targetTable.partitionDefinitions.extend(
                copy_partitions(targetTable, sourceTable.partitionDefinitions))

        return targetTable
    def migrateUpdateForChanges(self, state, target_catalog):
        """
        Create datatype cast expression for target column based on source datatype.
        """   
        for targetSchema in target_catalog.schemata:
            for targetTable in targetSchema.tables:
                for target_column in targetTable.columns:                    
                    # SQL expression to use for converting the column data to the target type
                    # eg.: CAST(? as NVARCHAR(max))
                    type_cast_expression = None
                    source_datatype = None
                    source_column = state.lookupSourceObject(target_column)
                    if source_column:
                        source_datatype = GenericMigration.getColumnDataType(self, source_column)
                    if source_column and source_datatype:
                        if source_datatype == 'XML':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"

                        if type_cast_expression:
                            target_column.owner.customData["columnTypeCastExpression:%s" % target_column.name] = "%s as ?" % type_cast_expression

        return target_catalog
    def migrateTableToMySQL(self, state, sourceTable, target_schema):
        targetTable = GenericMigration.migrateTableToMySQL(self, state, sourceTable, target_schema)

        # MySQL attributes
        for attr in ["tableEngine", "nextAutoInc", "password", "delayKeyWrite", "defaultCharacterSetName", "defaultCollationName", "mergeUnion", "mergeInsert",
                      "tableDataDir", "tableIndexDir", "packKeys", "raidType", "raidChunks", "raidChunkSize", "checksum", "rowFormat", "keyBlockSize", "avgRowLength", "minRows", "maxRows",
                      "partitionType", "partitionExpression", "partitionCount", "subpartitionType", "subpartitionExpression", "subpartitionCount"]:
            setattr(targetTable, attr, getattr(sourceTable, attr))

        if True:
            def copy_partitions(owner, part_list):
                l = []
                for src in part_list:
                    dst = grt.classes.db_mysql_PartitionDefinition()
                    for attr in ["name", "value", "comment", "dataDirectory", "indexDirectory", "maxRows", "minRows"]:
                        setattr(dst, attr, getattr(src, attr))
                    dst.owner = owner
                    dst.subpartitionDefinitions.extend(copy_partitions(dst, src.subpartitionDefinitions))
                    l.append(dst)
                return l
            # partition defs
            targetTable.partitionDefinitions.extend(copy_partitions(targetTable, sourceTable.partitionDefinitions))

        return targetTable
 def migrateTriggerToMySQL(self, state, source_trigger, target_table):
     target_trigger = GenericMigration.migrateTriggerToMySQL(
         self, state, source_trigger, target_table)
     target_trigger.commentedOut = 0
     return target_trigger
 def migrateTableForeignKeyToMySQL(self, state, source_fk, targetTable):
     target_fk = GenericMigration.migrateTableForeignKeyToMySQL(self, state, source_fk, targetTable)
     ### TODO: migrate constraints
     return target_fk
    def migrateUpdateForChanges(self, state, target_catalog):
        """
        Create datatype cast expression for target column based on source datatype.
        """
        for targetSchema in target_catalog.schemata:
            for targetTable in targetSchema.tables:
                for target_column in targetTable.columns:
                    # SQL expression to use for converting the column data to the target type
                    # eg.: CAST(? as NVARCHAR(max))
                    type_cast_expression = None
                    source_datatype = None
                    source_column = state.lookupSourceObject(target_column)
                    if source_column:
                        source_datatype = GenericMigration.getColumnDataType(
                            self, source_column)
                    if source_column and source_datatype:
                        target_datatype = target_column.simpleType.name.upper()
                        if source_datatype in ['VARCHAR', 'NVARCHAR']:
                            if source_datatype == 'VARCHAR':
                                if target_column.length > 4000 or target_column.length == -1:  # NVARCHAR is limited to 4000 - if you need more, you must use MAX insted BUG #18167872
                                    type_cast_expression = "CAST(? as NVARCHAR(MAX))"  # If in source table is VARCHAR(MAX) column lnegth is 0 - so must be casted to NVARCHAR(MAX) BUG #18105486
                                else:
                                    type_cast_expression = "CAST(? as NVARCHAR(%d))" % target_column.length
                        elif source_datatype in ['TEXT', 'NTEXT']:
                            if source_datatype == 'TEXT':
                                type_cast_expression = "CAST(? as NTEXT)"
                        elif source_datatype in [
                                'CHAR', 'NCHAR'
                        ]:  # MSSQL CHAR's (also VARCHAR's) max length is 8000 non Unicode characters
                            if source_datatype == 'CHAR':
                                type_cast_expression = "CAST(? as NCHAR(%d))" % target_column.length
                        elif source_datatype == 'UNIQUEIDENTIFIER':
                            type_cast_expression = "CAST(? as VARCHAR(64))"
                        elif source_datatype == 'SYSNAME':  # the relevant info is in http://msdn.microsoft.com/en-us/library/ms191240(v=sql.105).aspx
                            type_cast_expression = "CAST(? as VARCHAR(128))"
                        # floating point datatypes:
                        elif source_datatype in ['DECIMAL', 'NUMERIC']:
                            if source_column.scale == 0:
                                type_cast_expression = "CAST(? as %s)" % (
                                    'INT' if target_datatype == 'MEDIUMINT'
                                    else target_datatype)
                        elif source_datatype == 'XML':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"
                        elif source_datatype in ['GEOMETRY', 'GEOGRAPHY']:
                            type_cast_expression = '?.STAsText()'
                        elif source_datatype == 'HIERARCHYID':
                            type_cast_expression = "CAST(? as VARCHAR(max))"
                        elif source_datatype == 'SQL_VARIANT':
                            type_cast_expression = "CAST(? as NVARCHAR(max))"
                        elif source_datatype in [
                                'BINARY', 'VARBINARY', 'TIMESTAMP',
                                'ROWVERSION'
                        ]:
                            type_cast_expression = 'CONVERT(VARBINARY(MAX), ?, 0)'

                        if type_cast_expression:
                            target_column.owner.customData[
                                "columnTypeCastExpression:%s" % target_column.
                                name] = "%s as ?" % type_cast_expression

        return target_catalog
 def migrateRoutineToMySQL(self, state, source_routine, target_schema):
     target_routine = GenericMigration.migrateRoutineToMySQL(
         self, state, source_routine, target_schema)
     target_routine.commentedOut = 0
     return target_routine
 def migrateRoutineToMySQL(self, state, source_routine, target_schema):
     target_routine = GenericMigration.migrateRoutineToMySQL(self, state, source_routine, target_schema)
     target_routine.commentedOut = 0
     return target_routine
 def migrateViewToMySQL(self, state, source_view, target_schema):
     target_view = GenericMigration.migrateViewToMySQL(self, state, source_view, target_schema)
     target_view.commentedOut = 0
     return target_view
 def migrateTriggerToMySQL(self, state, source_trigger, target_table):
     target_trigger = GenericMigration.migrateTriggerToMySQL(self, state, source_trigger, target_table)
     target_trigger.commentedOut = 0
     return target_trigger
Пример #20
0
 def migrateTableForeignKeyToMySQL(self, state, source_fk, targetTable):
     target_fk = GenericMigration.migrateTableForeignKeyToMySQL(self, state, source_fk, targetTable)
     ### TODO: migrate constraints
     return target_fk
 def migrateViewToMySQL(self, state, source_view, target_schema):
     target_view = GenericMigration.migrateViewToMySQL(
         self, state, source_view, target_schema)
     target_view.commentedOut = 0
     return target_view