def main(source, target, options=()): merged = {'OmitSchemas': True} merged.update(options) options = merged log.debug('Using config %s', options) log.info('Loading %s', source) if not path.isfile(source): log.fatal('Model does not exist') return -1 Workbench.openModel(source) if grt.root.wb.doc is None: log.fatal('Model failed to load') return -1 schemas = grt.root.wb.doc.physicalModels[0].catalog log.info('Building initial %s', target) DbMySQLFE.generateSQLCreateStatements(schemas, schemas.version, options) DbMySQLFE.createScriptForCatalogObjects(target, schemas, options) # For some reason 'OmitSchemas' still drops/creates the schema # and 'GenerateSchemaDrops' doesn't seem to change anything # and I can't figure out how to not output to file log.info('Patching %s', target) with open(target, 'r') as orig_script: script_src = re.sub(r'(create|drop) schema .*?;\s*', '', orig_script.read(), flags=re.DOTALL | re.IGNORECASE) log.info('Writing %s', target) with open(target, 'w') as new_script: new_script.write(script_src)
def changeStorageEngines(cat): new_engine = Workbench.input( 'Type the new storage engine name for all tables in your model:') if not new_engine: return 1 def getTableEngines(): result = grt.root.wb.options.options[ '@db.mysql.Table:tableEngine/Items'] items = [item.strip(' \t') for item in result.split(',')] return items # validate the engine name and fix its case engines = getTableEngines() engine_found = False for engine_name in engines: if engine_name.find(':') != -1: engine_name = engine_name[engine_name.find(':') + 1:] if new_engine.lower() == engine_name.lower(): engine_found = True new_engine = engine_name if not engine_found: Workbench.confirm('Change Storage Engines', 'Invalid storage engine name: ' + new_engine) return 2 for schema in cat.schemata: for tbl in schema.tables: tbl.tableEngine = new_engine return 0
def changeStorageEngines(cat): ret, new_engine = mforms.Utilities.request_input("Change the Storage Engine of All Tables", "Type the new storage engine name:", "") if not ret: return 1 def getTableEngines(): result = grt.root.wb.options.options['@db.mysql.Table:tableEngine/Items'] items = [item.strip(' \t') for item in result.split(',')] return items # validate the engine name and fix its case engines = getTableEngines() engine_found = False for engine_name in engines: if engine_name.find(':') != -1: engine_name = engine_name[engine_name.find(':') + 1:] if new_engine.lower() == engine_name.lower(): engine_found = True new_engine = engine_name if not engine_found: Workbench.confirm('Change Storage Engines', 'Invalid storage engine name: ' + new_engine) return 2 for schema in cat.schemata: for tbl in schema.tables: tbl.tableEngine = new_engine return 0
def copyTableListToClipboard(cat): #insert = ['`'+schema.name+'`.`'+tbl.name+'`' for tbl in schema.tables for schema in cat.schemata ] insert = '' for schema in cat.schemata: insert = insert + ', '.join(['`'+schema.name+'`.`'+tbl.name+'`' for tbl in schema.tables]) Workbench.copyToClipboard(insert) return 0
def copyTableListToClipboard(cat): #insert = ['`'+schema.name+'`.`'+tbl.name+'`' for tbl in schema.tables for schema in cat.schemata ] insert = '' for schema in cat.schemata: insert = ', '.join([ '`' + schema.name + '`.`' + tbl.name + '`' for tbl in schema.tables ]) Workbench.copyToClipboard(insert) return 0
def copySQLToClipboard(obj): script = [] # workaround until diff sql generator handles routine groups if isinstance(obj, grt.classes.db_RoutineGroup): for routine in obj.routines: script.append(grt.modules.DbMySQL.makeCreateScriptForObject(routine)) script.append(';\n\n') else: script.append(grt.modules.DbMySQL.makeCreateScriptForObject(obj)) Workbench.copyToClipboard(''.join(script)) return 0
def copySQLToClipboard(obj): script = [] # workaround until diff sql generator handles routine groups if isinstance(obj, grt.classes.db_RoutineGroup): for routine in obj.routines: script.append( grt.modules.DbMySQL.makeCreateScriptForObject(routine)) script.append(';\n\n') else: script.append(grt.modules.DbMySQL.makeCreateScriptForObject(obj)) Workbench.copyToClipboard(''.join(script)) return 0
def prefixTables(cat): prefix = Workbench.input('Please specify the prefix') if not prefix: return 1 for schema in cat.schemata: for tbl in schema.tables: tbl.name = prefix + tbl.name return 0
def exportSQLite(cat): """Function to go through all schemata in catalog and rename all FKs of table-objects """ def validate_for_sqlite_export(cat): """Check uniqueness of schema, table and index names. Return 0 on success otherwise return 1 (the export process should abort) """ have_errors = False idt = {} for i, schema in enumerate(cat.schemata): if schema.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Schemas %d and %d have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[schema.name], i, schema.name)) == 0: return False else: idt[schema.name] = i # Do not continue looking for errors on schema name error if have_errors: return False for schema in cat.schemata: idt = {} for i, tbl in enumerate(schema.tables): if tbl.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Table %d in schema "%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name)) == 0: return False if tbl.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Tables %d and %d in schema "%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[tbl.name], i, schema.name, tbl.name)) == 0: return False else: idt[tbl.name] = i if have_errors: return False for schema in cat.schemata: for tbl in schema.tables: idt = {} for i, column in enumerate(tbl.columns): if column.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Column %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name, tbl.name)) == 0: return False if column.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Columns %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[column.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[column.name] = i # Now check indices (except primary/unique) idt = {} for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': if index.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Index %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name, tbl.name)) == 0: return False if index.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Indices %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[index.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[index.name] = i if have_errors: return False return True def is_deferred(fkey): # Hack: if comment starts with "Defer..." we make it a deferred FK could # use member 'deferability' (WB has it), but there is no GUI for it return fkey.comment.lstrip().lower()[0:5] == 'defer' def export_table(out, db_name, schema, tbl): if len(tbl.columns) == 0: return out.write('CREATE TABLE %s%s(\n%s' % ( db_name, dq(tbl.name), schema_comment_format(tbl.comment))) primary_key = [i for i in tbl.indices if i.isPrimary == 1] primary_key = primary_key[0] if len(primary_key) > 0 else None pk_column = None if primary_key and len(primary_key.columns) == 1: pk_column = primary_key.columns[0].referencedColumn col_comment = '' for i, column in enumerate(tbl.columns): check, sqlite_type, flags = '', None, None if column.simpleType: sqlite_type = column.simpleType.name flags = column.simpleType.flags else: sqlite_type = column.userType.name flags = column.flags length = column.length # For INTEGER PRIMARY KEY column to become an alias for the rowid # the type needs to be "INTEGER" not "INT" # we fix it for other columns as well if 'INT' in sqlite_type or sqlite_type == 'LONG': sqlite_type = 'INTEGER' length = -1 # Check flags for "unsigned" if 'UNSIGNED' in column.flags: check = dq(column.name) + '>=0' # We even implement ENUM (because we can) if sqlite_type == 'ENUM': sqlite_type = 'TEXT' if column.datatypeExplicitParams: check = (dq(column.name) + ' IN' + column.datatypeExplicitParams) if i > 0: out.write(',' + comment_format(col_comment) + '\n') out.write(' ' + dq(column.name)) # Type is optional in SQLite if sqlite_type != '': out.write(' ' + sqlite_type) # For [VAR]CHAR and such types specify length even though this is # not used in SQLite if length > 0: out.write('(%d)' % length) # Must specify single-column PKs as column-constraints for AI/rowid # behaviour if column == pk_column: out.write(' PRIMARY KEY') if primary_key.columns[0].descend == 1: out.write(' DESC') # Only PK columns can be AI in SQLite if column.autoIncrement == 1: out.write(' AUTOINCREMENT') # Check for NotNull if column.isNotNull == 1: out.write(' NOT NULL') if check != '': out.write(' CHECK(' + check + ')') if column.defaultValue != '': out.write(' DEFAULT ' + column.defaultValue) col_comment = column.comment # For multicolumn PKs if primary_key and not pk_column: out.write(',%s\n PRIMARY KEY(%s)' % ( comment_format(col_comment), print_index_columns(primary_key))) col_comment = '' # Put non-primary, UNIQUE Keys in CREATE TABLE as well (because we can) for index in tbl.indices: if index != primary_key and index.indexType == 'UNIQUE': out.write(',%s\n' % comment_format(col_comment)) col_comment = '' if index.name != '': out.write(' CONSTRAINT %s\n ' % dq(index.name)) out.write(' UNIQUE(%s)' % print_index_columns(index)) for fkey in tbl.foreignKeys: have_fkeys = 1 out.write(',%s\n' % comment_format(col_comment)) col_comment = '' if fkey.name != '': out.write(' CONSTRAINT %s\n ' % dq(fkey.name)) out.write(' FOREIGN KEY(%s)\n' % print_fk_columns(fkey.columns)) out.write(' REFERENCES %s(%s)' % ( dq(fkey.referencedTable.name), print_fk_columns(fkey.referencedColumns))) if fkey.deleteRule in ['RESTRICT', 'CASCADE', 'SET NULL']: out.write('\n ON DELETE ' + fkey.deleteRule) if fkey.updateRule in ['RESTRICT', 'CASCADE', 'SET NULL']: out.write('\n ON UPDATE ' + fkey.updateRule) if is_deferred(fkey): out.write(' DEFERRABLE INITIALLY DEFERRED') out.write(comment_format(col_comment) + '\n);\n') # CREATE INDEX statements for all non-primary, non-unique, non-foreign # indexes for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': index_name = tbl.name + '.' + index.name if index.name == '': index_name = tbl.name + '.index' + i out.write('CREATE INDEX %s%s ON %s (%s);\n' % ( db_name, dq(index_name), dq(tbl.name), print_index_columns(index))) # Write the INSERTS (currently always) for insert in tbl.inserts().splitlines(): columns_values = '' insert_start = 'insert into `%s`.`%s` (' % (schema.name, tbl.name) if insert[0:len(insert_start)].lower() == insert_start.lower(): columns_values = insert[len(insert_start):] else: raise ExportSQLiteError( 'Error', 'Unrecognized command in insert') last_column = 0 for i, column in enumerate(tbl.columns): column_name = '`' + column.name + '`' if columns_values[0:len(column_name)] == column_name: columns_values = columns_values[len(column_name):] if columns_values[0:1] == ')': columns_values = columns_values[1:] last_column = i break else: if columns_values[0:2] == ', ': columns_values = columns_values[2:] else: raise ExportSQLiteError( 'Error', 'Unrecognized character in column list') else: raise ExportSQLiteError( 'Error', 'Unrecognized column in inserts') out.write('INSERT INTO %s(' % dq(tbl.name)) for i in range(last_column + 1): if i > 0: out.write(',') out.write(dq(tbl.columns[i].name)) if columns_values[0:9].lower() != ' values (': raise ExportSQLiteError( 'Error', 'Unrecognized SQL in insert') columns_values = columns_values[9:] out.write(') VALUES(') out.write(columns_values.replace("\\'", "''")) out.write('\n') def order_tables(out, db_name, schema, unordered, respect_deferredness): have_ordered = False while not have_ordered: if len(unordered) == 0: have_ordered = True for tbl in unordered.values(): has_forward_reference = False for fkey in tbl.foreignKeys: if (fkey.referencedTable.name in unordered and fkey.referencedTable.name != tbl.name and not ( respect_deferredness and is_deferred(fkey))): has_forward_reference = True break if not has_forward_reference: export_table(out, db_name, schema, tbl) del unordered[tbl.name] have_ordered = True def export_schema(out, schema, is_main_schema): if len(schema.tables) == 0: return out.write('\n-- Schema: %s\n' % schema.name) out.write(schema_comment_format(schema.comment)) db_name = '' if not is_main_schema: db_name = dq(schema.name) + '.' out.write('ATTACH "%s" AS %s;\n' % ( safe_file_name(schema.name + '.sdb'), dq(schema.name))) out.write('BEGIN;\n') # Find a valid table order for inserts from FK constraints unordered = {t.name: t for t in schema.tables} # Try treating deferred keys like non-deferred keys first for ordering order_tables(out, db_name, schema, unordered, False) # Now try harder (leave out deferred keys from determining an order) order_tables(out, db_name, schema, unordered, True) # Loop through all remaining tables, if any. Have circular FK refs. # How to handle? for tbl in unordered.values(): export_table(out, db_name, schema, tbl) out.write('COMMIT;\n') def print_index_columns(index): s = '' for i, column in enumerate(index.columns): if i > 0: s += ',' s += dq(column.referencedColumn.name) if column.descend == 1: s += ' DESC' return s def print_fk_columns(columns): s = '' for i, column in enumerate(columns): if i > 0: s += ',' s += dq(column.name) return s def dq(ident): """Double quote identifer, replacing " by "" """ return '"' + re.sub(r'"', '""', ident) + '"' def safe_file_name(ident): """Create safe filename from identifer""" def repl(c): return ["%%%02x" % c for c in bytearray(c, 'ascii')] return re.sub(r'[/\:*?"<>|%]', repl, ident) def info_format(header, body): """Format a info field as SQL comment""" body = body.strip() if body == '': return '' elif '\n' in body: # Multiline comment return '-- %s:\n-- %s\n' % ( header, re.sub(r'\n', '\n-- ', body)) else: # Single line return '-- %-14s %s\n' % (header + ':', body) def schema_comment_format(body): """Format a schema or table comment as SQL comment table comments to be stored in SQLite schema """ body = body.strip() if body == '': return '' else: # Multiline comment return '-- %s\n' % re.sub(r'\n', '\n-- ', body) def comment_format(body): body = body.strip() if body == '': return '' elif '\n' in body: # Multiline comment return '\n-- %s' % re.sub(r'\n', '\n-- ', body) else: # Single line return '-- %s' % body if not validate_for_sqlite_export(cat): return 1 out = StringIO.StringIO() out.write(info_format( 'Creator', 'MySQL Workbench %d.%d.%d/ExportSQLite Plugin %s\n' % ( grt.root.wb.info.version.majorNumber, grt.root.wb.info.version.minorNumber, grt.root.wb.info.version.releaseNumber, ModuleInfo.version))) out.write(info_format('Author', grt.root.wb.doc.info.author)) out.write(info_format('Caption', grt.root.wb.doc.info.caption)) out.write(info_format('Project', grt.root.wb.doc.info.project)) out.write(info_format('Changed', grt.root.wb.doc.info.dateChanged)) out.write(info_format('Created', grt.root.wb.doc.info.dateCreated)) out.write(info_format('Description', grt.root.wb.doc.info.description)) out.write('PRAGMA foreign_keys = OFF;\n') # Loop over all catalogs in schema, find main schema main schema is first # nonempty schema or nonempty schema named "main" try: for schema in [(s, s.name == 'main') for s in cat.schemata]: export_schema(out, schema[0], schema[1]) except ExportSQLiteError as e: Workbench.confirm(e.typ, e.message) return 1 sql_text = out.getvalue() out.close() wizard = ExportSQLiteWizard(sql_text) wizard.run() return 0
def copyColumnNamesToClipboard(table): data = ', '.join([column.name for column in table.columns]) Workbench.copyToClipboard(data) return 0
def generate_laravel5_migration(cat): def create_tree(table_schema): tree = {} for tbl in sorted(table_schema.tables, key=lambda table: table.name): table_references = [] for key in tbl.foreignKeys: if key.name != '' and tbl.name != key.referencedColumns[ 0].owner.name and hasattr(key, 'referencedColumns'): table_references.append( key.referencedColumns[0].owner.name) tree[tbl.name] = table_references d = dict((k, set(tree[k])) for k in tree) r = [] while d: # values not in keys (items without dep) t = set(i for v in d.values() for i in v) - set(d.keys()) # and keys without value (items without dep) t.update(k for k, v in d.items() if not v) # can be done right away r.append(t) # and cleaned up d = dict(((k, v - t) for k, v in d.items() if v)) return r def export_schema(table_schema, tree): if len(table_schema.tables) == 0: return foreign_keys = {} global migration_tables global migrations tables = sorted(table_schema.tables, key=lambda table: table.name) ti = 0 for reference_tables in tree: for reference in reference_tables: for tbl in tables: if tbl.name != reference: continue table_name = tbl.name table_engine = tbl.tableEngine components = table_name.split('_') migration_tables.append(table_name) migrations[ti] = [] migrations[ti].append( migrationTemplate.format(tableNameCamelCase=("".join( x.title() for x in components[0:])), tableName=table_name)) migrations[ti].append( " $table->engine = '{tableEngine}';\n". format(tableEngine=table_engine)) created_at = created_at_nullable \ = updated_at \ = updated_at_nullable \ = deleted_at \ = timestamps \ = timestamps_nullable = False for col in tbl.columns: if col.name == 'created_at': created_at = True if col.isNotNull != 1: created_at_nullable = True elif col.name == 'updated_at': updated_at = True if col.isNotNull != 1: updated_at_nullable = True if created_at is True and updated_at is True and created_at_nullable is True: if updated_at_nullable is True: timestamps_nullable = True elif created_at is True and updated_at is True: timestamps = True elif created_at is True and updated_at is True: timestamps = True primary_key = [ col for col in tbl.indices if col.isPrimary == 1 ] primary_key = primary_key[0] if len( primary_key) > 0 else None if hasattr(primary_key, 'columns'): primary_col = primary_key.columns[0].referencedColumn else: primary_col = None default_time_values = [ 'CURRENT_TIMESTAMP', 'NULL ON UPDATE CURRENT_TIMESTAMP', 'CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP' ] for col in tbl.columns: if (col.name == 'created_at' or col.name == 'updated_at') and ( timestamps is True or timestamps_nullable is True): continue if col.name == 'deleted_at': deleted_at = True continue if col.simpleType: col_type = col.simpleType.name else: col_type = col.userType.name if col == primary_col: if col_type == "BIGINT": col_type = "BIG_INCREMENTS" elif col_type == "MEDIUMINT": col_type = "MEDIUM_INCREMENTS" elif col_type == "CHAR" and col.length == 36: col_type = "UUID" else: col_type = "INCREMENTS" if (col_type == 'BIGINT' or col_type == 'INT' or col_type == 'TINYINT' or col_type == 'MEDIUMINT' or col_type == 'SMALLINT') and 'UNSIGNED' in col.flags: col_type = "u" + col_type col_data = '\'' # Continue if type is not in dictionary if col_type not in typesDict: continue if typesDict[col_type] == 'char': if col.length > -1: col_data = '\', %s' % (str(col.length)) elif typesDict[col_type] == 'decimal': if col.precision > -1 and col.scale > -1: col_data = '\', %s, %s' % (str( col.precision), str(col.scale)) elif typesDict[col_type] == 'double': if col.precision > -1 and col.length > -1: col_data = '\', %s, %s' % (str( col.length), str(col.precision)) elif typesDict[col_type] == 'enum': col_data = '\', [%s]' % ( col.datatypeExplicitParams[1:-1]) elif typesDict[col_type] == 'string': if col.length > -1 and col.length < 255: col_data = '\', %s' % (str(col.length)) else: col_data = '\'' if col.name == 'remember_token' and typesDict[ col_type] == 'string' and str( col.length) == 100: migrations[ti].append( ' $table->rememberToken();\n') elif typesDict[col_type]: migrations[ti].append( ' $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data)) if typesDict[ col_type] == 'integer' and 'UNSIGNED' in col.flags: migrations[ti].append('->unsigned()') if col.isNotNull != 1: migrations[ti].append('->nullable()') if col.defaultValue != '' and col.defaultValueIsNull != 0: migrations[ti].append('->default(null)') elif col.defaultValue != '': default_value = col.defaultValue.replace( "'", "") if default_value in default_time_values: migrations[ti].append( "->default(DB::raw('{}'))".format( default_value)) else: migrations[ti].append( "->default('{}')".format( default_value)) if col.comment != '': migrations[ti].append( "->comment('{comment}')".format( comment=col.comment)) migrations[ti].append(';\n') if col.name == 'id' and typesDict[col_type] == 'uuid': migrations[ti].append( ' $table->primary(\'id\');\n') # Generate indexes indexes = {"primary": [], "unique": [], "index": []} for col in tbl.indices: for index in col.columns: index_type = index.owner.indexType.lower() key = index.referencedColumn.name if (col != primary_col and index_type != "primary" ) and index_type != "index": indexes[index_type].append(key) for index_type in indexes: if len(indexes[index_type]) != 0: index_key_template = indexKeyTemplate.format( indexType=index_type, keys=", ".join([ '"{}"'.format(value) for value in indexes[index_type] ]), tableName=table_name) migrations[ti].append(index_key_template) if deleted_at is True: migrations[ti].append( ' $table->softDeletes();\n') if timestamps is True: migrations[ti].append( ' $table->timestamps();\n') elif timestamps_nullable is True: migrations[ti].append( ' $table->nullableTimestamps();\n') first_foreign_created = False for key in tbl.foreignKeys: if key.name != '' and hasattr(key.index, 'name'): index_name = key.index.name foreign_key = key.columns[0].name if index_name == 'PRIMARY': index_name = tbl.name + "_" + key.columns[ 0].name if key.referencedColumns[ 0].owner.name in migration_tables: if not first_foreign_created: migrations[ti].append('\n') first_foreign_created = True migrations[ti].append( foreignKeyTemplate.format( foreignKey=foreign_key, foreignKeyName=index_name, tableKeyName=key.referencedColumns[0]. name, foreignTableName=key. referencedColumns[0].owner.name, onDeleteAction=key.deleteRule.lower(), onUpdateAction=key.updateRule.lower())) else: if key.referencedColumns[ 0].owner.name not in foreign_keys: foreign_keys[key.referencedColumns[0]. owner.name] = [] foreign_keys[key.referencedColumns[ 0].owner.name].append({ 'table': key.columns[0].owner.name, 'key': foreign_key, 'name': index_name, 'referenced_table': key.referencedColumns[0].owner.name, 'referenced_name': key.referencedColumns[0].name, 'update_rule': key.updateRule, 'delete_rule': key.deleteRule }) migrations[ti].append(" });\n") for key, val in foreign_keys.iteritems(): if key == tbl.name: keyed_tables = [] schema_table = 0 for item in val: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) foreign_table_name = item['table'] if schema_table == 0: migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) schema_table = 1 elif foreign_table_name != item['table']: migrations[ti].append(" });\n") migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) migrations[ti].append( foreignKeyTemplate.format( foreignKey=item['key'], foreignKeyName=item['name'], tableKeyName=item[ 'referenced_name'], foreignTableName=item[ 'referenced_table'], onDeleteAction=item['delete_rule']. lower(), onUpdateAction=item['update_rule']. lower())) if schema_table == 1: migrations[ti].append(" });\n") migrations[ti].append('\n') migrations[ti].append(' }\n') ########## # Reverse ########## migrations[ti].append(migrationDownTemplate) migrations[ti].append( migrationEndingTemplate.format(tableName=table_name)) ti += 1 return migrations out = cStringIO.StringIO() try: for schema in [(s, s.name == 'main') for s in cat.schemata]: table_tree = create_tree(schema[0]) migrations = export_schema(schema[0], table_tree) except GenerateLaravel5MigrationError as e: Workbench.confirm(e.typ, e.message) return 1 now = datetime.datetime.now() for name in sorted(migrations): save_format = '{year}_{month}_{day}_{number}_create_{tableName}_table.php'.format( year=now.strftime('%Y'), month=now.strftime('%m'), day=now.strftime('%d'), number="".zfill(6), tableName=migration_tables[name]) out.write('Table name: {0} Migration File: {1}\n\n'.format( migration_tables[name], save_format)) out.write(''.join(migrations[name])) out.write('\n\n\n'.format(name)) sql_text = out.getvalue() out.close() wizard = GenerateLaravel5MigrationWizard(sql_text) wizard.run() return 0
def generateLaravel5Migration(cat): def export_schema(out, schema, is_main_schema): if len(schema.tables) == 0: return foreign_keys = {} migration_tables = [] global migrations for tbl in schema.tables: migration_tables.append(tbl.name) migrations[tbl.name] = [] migrations[tbl.name].append('<?php\n') migrations[tbl.name].append('\n') migrations[tbl.name].append( 'use Illuminate\Database\Schema\Blueprint;\n') migrations[tbl.name].append( 'use Illuminate\Database\Migrations\Migration;\n') migrations[tbl.name].append('\n') components = tbl.name.split('_') migrations[tbl.name].append( 'class Create%sTable extends Migration\n' % ("".join(x.title() for x in components[0:]))) migrations[tbl.name].append('{\n') migrations[tbl.name].append(' /**\n') migrations[tbl.name].append(' * Run the migrations.\n') migrations[tbl.name].append(' *\n') migrations[tbl.name].append(' * @return void\n') migrations[tbl.name].append(' */\n') migrations[tbl.name].append(' public function up()\n') migrations[tbl.name].append(' {\n') migrations[tbl.name].append( ' Schema::create(\'%s\', function (Blueprint $table) {\n' % (tbl.name)) for col in tbl.columns: if col.simpleType: col_type = col.simpleType.name col_flags = col.simpleType.flags else: col_type = col.userType.name col_flags = col.flags primary_key = [i for i in tbl.indices if i.isPrimary == 1] primary_key = primary_key[0] if len(primary_key) > 0 else None pk_column = None if primary_key and len(primary_key.columns) == 1: pk_column = primary_key.columns[0].referencedColumn if col == pk_column: if col_type == 'BIGINT': col_type = 'BIGINCREMENTS' else: col_type = 'INCREMENTS' col_data = '\'' if typesDict[col_type] == 'char': if col.length > -1: col_data = '\', %s' % (str(col.length)) elif typesDict[col_type] == 'decimal': if col.precision > -1 and col.scale > -1: col_data = '\', %s, %s' % (str( col.precision), str(col.scale)) elif typesDict[col_type] == 'double': if col.precision > -1 and col.length > -1: col_data = '\', %s, %s' % (str( col.length), str(col.precision)) elif typesDict[col_type] == 'enum': col_data = '\', [%s]' % (col.datatypeExplicitParams[1:-1]) elif typesDict[col_type] == 'string': if col.length > -1: col_data = '\', %s' % (str(col.length)) if (typesDict[col_type]): migrations[tbl.name].append( ' $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data)) if typesDict[ col_type] == 'integer' and 'UNSIGNED' in col.flags: migrations[tbl.name].append('->unsigned()') if col.isNotNull != 1: migrations[tbl.name].append('->nullable()') if col.defaultValue != '' and col.defaultValueIsNull != 0: migrations[tbl.name].append('->default(NULL)') elif col.defaultValue != '': migrations[tbl.name].append('->default(%s)' % (col.defaultValue)) if col.comment != '': migrations[tbl.name].append('->comment(\'%s\')' % (col.comment)) migrations[tbl.name].append(";") migrations[tbl.name].append('\n') first_foreign_created = 0 for fkey in tbl.foreignKeys: if fkey.name != '': if fkey.referencedColumns[ 0].owner.name in migration_tables: if first_foreign_created == 0: migrations[tbl.name].append('\n') first_foreign_created = 1 migrations[tbl.name].append( ' $table->foreign(\'%s\')->references(\'%s\')->on(\'%s\')->onDelete(\'%s\')->onUpdate(\'%s\');' % (fkey.columns[0].name, fkey.referencedColumns[0].name, fkey.referencedColumns[0].owner.name, fkey.deleteRule.lower(), fkey.updateRule.lower())) migrations[tbl.name].append('\n') else: if fkey.referencedColumns[ 0].owner.name not in foreign_keys: foreign_keys[ fkey.referencedColumns[0].owner.name] = [] foreign_keys[ fkey.referencedColumns[0].owner.name].append({ 'table': fkey.columns[0].owner.name, 'name': fkey.columns[0].name, 'referenced_table': fkey.referencedColumns[0].owner.name, 'referenced_name': fkey.referencedColumns[0].name, 'update_rule': fkey.updateRule, 'delete_rule': fkey.deleteRule }) migrations[tbl.name].append(" });\n") for fkey, fval in foreign_keys.iteritems(): if fkey == tbl.name: keyed_tables = [] schema_table = 0 for item in fval: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) if schema_table == 0: migrations[tbl.name].append('\n') migrations[tbl.name].append( ' Schema::table(\'%s\', function (Blueprint $table) {\n' % (item['table'])) schema_table = 1 migrations[tbl.name].append( ' $table->foreign(\'%s\')->references(\'%s\')->on(\'%s\')->onDelete(\'%s\')->onUpdate(\'%s\');\n' % (item['name'], item['referenced_name'], item['referenced_table'], item['delete_rule'].lower(), item['update_rule'].lower())) if schema_table == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') migrations[tbl.name].append(' }\n') migrations[tbl.name].append('\n') migrations[tbl.name].append(' /**\n') migrations[tbl.name].append(' * Reverse the migrations.\n') migrations[tbl.name].append(' *\n') migrations[tbl.name].append(' * @return void\n') migrations[tbl.name].append(' */\n') migrations[tbl.name].append(' public function down()\n') migrations[tbl.name].append(' {\n') first_foreign_created = 0 for fkey in tbl.foreignKeys: if fkey.name != '': if fkey.referencedColumns[ 0].owner.name in migration_tables: if first_foreign_created == 0: migrations[tbl.name].append( ' Schema::table(\'%s\', function (Blueprint $table) {\n' % (tbl.name)) first_foreign_created = 1 migrations[tbl.name].append( ' $table->dropForeign([\'%s\']);\n' % (fkey.columns[0].name)) if first_foreign_created == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') for fkey, fval in foreign_keys.iteritems(): if fkey == tbl.name: keyed_tables = [] schema_table = 0 for item in fval: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) if schema_table == 0: migrations[tbl.name].append( ' Schema::table(\'%s\', function (Blueprint $table) {\n' % (item['table'])) schema_table = 1 migrations[tbl.name].append( ' $table->dropForeign([\'%s\');\n' % (item['name'])) if schema_table == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') migrations[tbl.name].append(' Schema::drop(\'%s\');\n' % (tbl.name)) migrations[tbl.name].append(' }\n') migrations[tbl.name].append('}') return migrations out = cStringIO.StringIO() try: for schema in [(s, s.name == 'main') for s in cat.schemata]: migrations = export_schema(out, schema[0], schema[1]) except GenerateLaravel5MigrationError as e: Workbench.confirm(e.typ, e.message) return 1 for mkey in sorted(migrations): out.write(''.join(migrations[mkey])) out.write('\n\n\n') sql_text = out.getvalue() out.close() wizard = GenerateLaravel5MigrationWizard(sql_text) wizard.run() return 0
def exportSQLite(cat): """Function to go through all schemata in catalog and rename all FKs of table-objects """ def validate_for_sqlite_export(cat): """Check uniqueness of schema, table and index names. Return 0 on success otherwise return 1 (the export process should abort) """ have_errors = False idt = {} for i, schema in enumerate(cat.schemata): if schema.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Schemas %d and %d have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[schema.name], i, schema.name)) == 0: return False else: idt[schema.name] = i # Do not continue looking for errors on schema name error if have_errors: return False for schema in cat.schemata: idt = {} for i, tbl in enumerate(schema.tables): if tbl.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Table %d in schema "%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name)) == 0: return False if tbl.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Tables %d and %d in schema "%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[tbl.name], i, schema.name, tbl.name)) == 0: return False else: idt[tbl.name] = i if have_errors: return False for schema in cat.schemata: for tbl in schema.tables: idt = {} for i, column in enumerate(tbl.columns): if column.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Column %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name, tbl.name)) == 0: return False if column.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Columns %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[column.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[column.name] = i # Now check indices (except primary/unique) idt = {} for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': if index.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Index %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name, tbl.name)) == 0: return False if index.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Indices %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[index.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[index.name] = i if have_errors: return False return True def is_deferred(fkey): # Hack: if comment starts with "Defer..." we make it a deferred FK could # use member 'deferability' (WB has it), but there is no GUI for it return fkey.comment.lstrip().lower()[0:5] == 'defer' def export_table(out, db_name, schema, tbl): if len(tbl.columns) == 0: return out.write('CREATE TABLE %s%s(\n%s' % (db_name, dq(tbl.name), schema_comment_format(tbl.comment))) primary_key = [i for i in tbl.indices if i.isPrimary == 1] primary_key = primary_key[0] if len(primary_key) > 0 else None pk_column = None if primary_key and len(primary_key.columns) == 1: pk_column = primary_key.columns[0].referencedColumn col_comment = '' for i, column in enumerate(tbl.columns): check, sqlite_type, flags = '', None, None if column.simpleType: sqlite_type = column.simpleType.name flags = column.simpleType.flags else: sqlite_type = column.userType.name flags = column.flags length = column.length # For INTEGER PRIMARY KEY column to become an alias for the rowid # the type needs to be "INTEGER" not "INT" # we fix it for other columns as well if 'INT' in sqlite_type or sqlite_type == 'LONG': sqlite_type = 'INTEGER' length = -1 # Check flags for "unsigned" if 'UNSIGNED' in column.flags: check = dq(column.name) + '>=0' # We even implement ENUM (because we can) if sqlite_type == 'ENUM': sqlite_type = 'TEXT' if column.datatypeExplicitParams: check = (dq(column.name) + ' IN' + column.datatypeExplicitParams) if i > 0: out.write(',' + comment_format(col_comment) + '\n') out.write(' ' + dq(column.name)) # Type is optional in SQLite if sqlite_type != '': out.write(' ' + sqlite_type) # For [VAR]CHAR and such types specify length even though this is # not used in SQLite if length > 0: out.write('(%d)' % length) # Must specify single-column PKs as column-constraints for AI/rowid # behaviour if column == pk_column: out.write(' PRIMARY KEY') if primary_key.columns[0].descend == 1: out.write(' DESC') # Only PK columns can be AI in SQLite if column.autoIncrement == 1: out.write(' AUTOINCREMENT') # Check for NotNull if column.isNotNull == 1: out.write(' NOT NULL') if check != '': out.write(' CHECK(' + check + ')') if column.defaultValue != '': out.write(' DEFAULT ' + column.defaultValue) col_comment = column.comment # For multicolumn PKs if primary_key and not pk_column: out.write(',%s\n PRIMARY KEY(%s)' % (comment_format(col_comment), print_index_columns(primary_key))) col_comment = '' # Put non-primary, UNIQUE Keys in CREATE TABLE as well (because we can) for index in tbl.indices: if index != primary_key and index.indexType == 'UNIQUE': out.write(',%s\n' % comment_format(col_comment)) col_comment = '' if index.name != '': out.write(' CONSTRAINT %s\n ' % dq(index.name)) out.write(' UNIQUE(%s)' % print_index_columns(index)) for fkey in tbl.foreignKeys: have_fkeys = 1 out.write(',%s\n' % comment_format(col_comment)) col_comment = '' if fkey.name != '': out.write(' CONSTRAINT %s\n ' % dq(fkey.name)) out.write(' FOREIGN KEY(%s)\n' % print_fk_columns(fkey.columns)) out.write(' REFERENCES %s(%s)' % (dq(fkey.referencedTable.name), print_fk_columns(fkey.referencedColumns))) if fkey.deleteRule in ['RESTRICT', 'CASCADE', 'SET NULL']: out.write('\n ON DELETE ' + fkey.deleteRule) if fkey.updateRule in ['RESTRICT', 'CASCADE', 'SET NULL']: out.write('\n ON UPDATE ' + fkey.updateRule) if is_deferred(fkey): out.write(' DEFERRABLE INITIALLY DEFERRED') out.write(comment_format(col_comment) + '\n);\n') # CREATE INDEX statements for all non-primary, non-unique, non-foreign # indexes for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': index_name = tbl.name + '.' + index.name if index.name == '': index_name = tbl.name + '.index' + i out.write('CREATE INDEX %s%s ON %s (%s);\n' % (db_name, dq(index_name), dq( tbl.name), print_index_columns(index))) # Write the INSERTS (currently always) for insert in tbl.inserts().splitlines(): columns_values = '' insert_start = 'insert into `%s`.`%s` (' % (schema.name, tbl.name) if insert[0:len(insert_start)].lower() == insert_start.lower(): columns_values = insert[len(insert_start):] else: raise ExportSQLiteError('Error', 'Unrecognized command in insert') last_column = 0 for i, column in enumerate(tbl.columns): column_name = '`' + column.name + '`' if columns_values[0:len(column_name)] == column_name: columns_values = columns_values[len(column_name):] if columns_values[0:1] == ')': columns_values = columns_values[1:] last_column = i break else: if columns_values[0:2] == ', ': columns_values = columns_values[2:] else: raise ExportSQLiteError( 'Error', 'Unrecognized character in column list') else: raise ExportSQLiteError('Error', 'Unrecognized column in inserts') out.write('INSERT OR REPLACE INTO %s(' % dq(tbl.name)) for i in range(last_column + 1): if i > 0: out.write(',') out.write(dq(tbl.columns[i].name)) if columns_values[0:9].lower() != ' values (': raise ExportSQLiteError('Error', 'Unrecognized SQL in insert') columns_values = columns_values[9:] out.write(') VALUES(') out.write(columns_values.replace("\\'", "''")) out.write('\n') def order_tables(out, db_name, schema, unordered, respect_deferredness): have_ordered = False while not have_ordered: if len(unordered) == 0: have_ordered = True for tbl in unordered.values(): has_forward_reference = False for fkey in tbl.foreignKeys: if (fkey.referencedTable.name in unordered and fkey.referencedTable.name != tbl.name and not (respect_deferredness and is_deferred(fkey))): has_forward_reference = True break if not has_forward_reference: export_table(out, db_name, schema, tbl) del unordered[tbl.name] have_ordered = True def export_schema(out, schema, is_main_schema): if len(schema.tables) == 0: return out.write('\n-- Schema: %s\n' % schema.name) out.write(schema_comment_format(schema.comment)) db_name = '' if not is_main_schema: db_name = dq(schema.name) + '.' out.write('ATTACH "%s" AS %s;\n' % (safe_file_name(schema.name + '.sdb'), dq(schema.name))) out.write('BEGIN;\n') # Find a valid table order for inserts from FK constraints unordered = {t.name: t for t in schema.tables} # Try treating deferred keys like non-deferred keys first for ordering order_tables(out, db_name, schema, unordered, False) # Now try harder (leave out deferred keys from determining an order) order_tables(out, db_name, schema, unordered, True) # Loop through all remaining tables, if any. Have circular FK refs. # How to handle? for tbl in unordered.values(): export_table(out, db_name, schema, tbl) out.write('COMMIT;\n') def print_index_columns(index): s = '' for i, column in enumerate(index.columns): if i > 0: s += ',' s += dq(column.referencedColumn.name) if column.descend == 1: s += ' DESC' return s def print_fk_columns(columns): s = '' for i, column in enumerate(columns): if i > 0: s += ',' s += dq(column.name) return s def dq(ident): """Double quote identifer, replacing " by "" """ return '"' + re.sub(r'"', '""', ident) + '"' def safe_file_name(ident): """Create safe filename from identifer""" def repl(c): return ["%%%02x" % c for c in bytearray(c, 'ascii')] return re.sub(r'[/\:*?"<>|%]', repl, ident) def info_format(header, body): """Format a info field as SQL comment""" body = body.strip() if body == '': return '' elif '\n' in body: # Multiline comment return '-- %s:\n-- %s\n' % (header, re.sub( r'\n', '\n-- ', body)) else: # Single line return '-- %-14s %s\n' % (header + ':', body) def schema_comment_format(body): """Format a schema or table comment as SQL comment table comments to be stored in SQLite schema """ body = body.strip() if body == '': return '' else: # Multiline comment return '-- %s\n' % re.sub(r'\n', '\n-- ', body) def comment_format(body): body = body.strip() if body == '': return '' elif '\n' in body: # Multiline comment return '\n-- %s' % re.sub(r'\n', '\n-- ', body) else: # Single line return '-- %s' % body if not validate_for_sqlite_export(cat): return 1 out = StringIO.StringIO() out.write( info_format( 'Creator', 'MySQL Workbench %d.%d.%d/ExportSQLite Plugin %s\n' % (grt.root.wb.info.version.majorNumber, grt.root.wb.info.version.minorNumber, grt.root.wb.info.version.releaseNumber, ModuleInfo.version))) out.write(info_format('Author', grt.root.wb.doc.info.author)) out.write(info_format('Caption', grt.root.wb.doc.info.caption)) out.write(info_format('Project', grt.root.wb.doc.info.project)) out.write(info_format('Changed', grt.root.wb.doc.info.dateChanged)) out.write(info_format('Created', grt.root.wb.doc.info.dateCreated)) out.write(info_format('Description', grt.root.wb.doc.info.description)) out.write('PRAGMA foreign_keys = OFF;\n') # Loop over all catalogs in schema, find main schema main schema is first # nonempty schema or nonempty schema named "main" try: for schema in [(s, s.name == 'main') for s in cat.schemata]: export_schema(out, schema[0], schema[1]) except ExportSQLiteError as e: Workbench.confirm(e.typ, e.message) return 1 sql_text = out.getvalue() out.close() wizard = ExportSQLiteWizard(sql_text) wizard.run() return 0
def validate_for_sqlite_export(cat): """Check uniqueness of schema, table and index names. Return 0 on success otherwise return 1 (the export process should abort) """ have_errors = False idt = {} for i, schema in enumerate(cat.schemata): if schema.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Schemas %d and %d have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[schema.name], i, schema.name)) == 0: return False else: idt[schema.name] = i # Do not continue looking for errors on schema name error if have_errors: return False for schema in cat.schemata: idt = {} for i, tbl in enumerate(schema.tables): if tbl.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Table %d in schema "%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name)) == 0: return False if tbl.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Tables %d and %d in schema "%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[tbl.name], i, schema.name, tbl.name)) == 0: return False else: idt[tbl.name] = i if have_errors: return False for schema in cat.schemata: for tbl in schema.tables: idt = {} for i, column in enumerate(tbl.columns): if column.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Column %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name, tbl.name)) == 0: return False if column.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Columns %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[column.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[column.name] = i # Now check indices (except primary/unique) idt = {} for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': if index.name == '': have_errors = True if Workbench.confirm( 'Name conflict', 'Index %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % (i, schema.name, tbl.name)) == 0: return False if index.name in idt: have_errors = True if Workbench.confirm( 'Name conflict', 'Indices %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % (idt[index.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[index.name] = i if have_errors: return False return True
def generateLaravel5Migration(cat): def create_tree(out, schema, is_main_schema): table_tree = {} for tbl in sorted(schema.tables, key=lambda table: table.name): table_references = [] for fkey in tbl.foreignKeys: if fkey.name != '': table_references.append( fkey.referencedColumns[0].owner.name) table_tree[tbl.name] = table_references d = dict((k, set(table_tree[k])) for k in table_tree) r = [] while d: # values not in keys (items without dep) t = set(i for v in d.values() for i in v) - set(d.keys()) # and keys without value (items without dep) t.update(k for k, v in d.items() if not v) # can be done right away r.append(t) # and cleaned up d = dict(((k, v - t) for k, v in d.items() if v)) return r def export_schema(out, schema, is_main_schema, table_tree): if len(schema.tables) == 0: return foreign_keys = {} global migration_tables global migrations tables = sorted(schema.tables, key=lambda table: table.name) ti = 0 for reference_tables in table_tree: for reference in reference_tables: for tbl in tables: if tbl.name != reference: continue table_name = tbl.name components = table_name.split('_') migration_tables.append(table_name) migrations[ti] = [] migrations[ti].append( migrationBegginingTemplate.format( tableNameCamelCase=("".join( x.title() for x in components[0:])), tableName=table_name)) created_at = created_at_nullable \ = updated_at \ = updated_at_nullable \ = deleted_at \ = timestamps \ = timestamps_nullable = False for col in tbl.columns: if col.name == 'created_at': created_at = True if col.isNotNull != 1: created_at_nullable = True elif col.name == 'updated_at': updated_at = True if col.isNotNull != 1: updated_at_nullable = True if created_at is True and updated_at is True and created_at_nullable is True and updated_at_nullable is True: timestamps_nullable = True elif created_at is True and updated_at is True: timestamps = True pk_column = None for col in tbl.columns: if (col.name == 'created_at' or col.name == 'updated_at') and ( timestamps is True or timestamps_nullable is True): continue if col.name == 'deleted_at': deleted_at = True continue if col.simpleType: col_type = col.simpleType.name col_flags = col.simpleType.flags else: col_type = col.userType.name col_flags = col.flags primary_key = [ i for i in tbl.indices if i.isPrimary == 1 ] primary_key = primary_key[0] if len( primary_key) > 0 else None if primary_key and len(primary_key.columns) == 1: pk_column = primary_key.columns[0].referencedColumn if col == pk_column: if col_type == 'BIGINT': col_type = 'BIGINCREMENTS' else: col_type = 'INCREMENTS' col_data = '\'' if typesDict[col_type] == 'char': if col.length > -1: col_data = '\', %s' % (str(col.length)) elif typesDict[col_type] == 'decimal': if col.precision > -1 and col.scale > -1: col_data = '\', %s, %s' % (str( col.precision), str(col.scale)) elif typesDict[col_type] == 'double': if col.precision > -1 and col.length > -1: col_data = '\', %s, %s' % (str( col.length), str(col.precision)) elif typesDict[col_type] == 'enum': col_data = '\', [%s]' % ( col.datatypeExplicitParams[1:-1]) elif typesDict[col_type] == 'string': if col.length > -1: col_data = '\', %s' % (str(col.length)) if col.name == 'remember_token' and typesDict[ col_type] == 'string' and str( col.length) == 100: migrations[ti].append( ' $table->rememberToken();\n') elif typesDict[col_type]: migrations[ti].append( ' $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data)) if typesDict[ col_type] == 'integer' and 'UNSIGNED' in col.flags: migrations[ti].append('->unsigned()') if col.isNotNull != 1: migrations[ti].append('->nullable()') if col.defaultValue != '' and col.defaultValueIsNull != 0: migrations[ti].append('->default(NULL)') elif col.defaultValue != '': migrations[ti].append('->default(%s)' % col.defaultValue) if col.comment != '': migrations[ti].append('->comment(\'%s\')' % col.comment) migrations[ti].append(';\n') if tbl.indices: migrations[ti].append(" # Indexes\n") for column in tbl.indices: for index in column.columns: index_type = index.owner.indexType.lower() key = index.referencedColumn.name # Do not add index for increments if not column.isPrimary: index_key_template = " $table->{indexType}('{key}');\n".format( indexType=index_type, key=key) migrations[ti].append(index_key_template) if deleted_at is True: migrations[ti].append( ' $table->softDeletes();\n') if timestamps is True: migrations[ti].append( ' $table->timestamps();\n') elif timestamps_nullable is True: migrations[ti].append( ' $table->nullableTimestamps();\n') first_foreign_created = False for fkey in tbl.foreignKeys: if fkey.name != '': #mforms.Utilities.set_clipboard_text(fkey.name) #index_name = fkey.index.name index_name = fkey.index foreign_key = fkey.columns[0].name if index_name == 'PRIMARY': index_name = tbl.name + "_" + fkey.columns[ 0].name if fkey.referencedColumns[ 0].owner.name in migration_tables: if not first_foreign_created: migrations[ti].append('\n') first_foreign_created = True migrations[ti].append( foreignKeyTemplate.format( foreignKey=foreign_key, tableKeyName=fkey.referencedColumns[0]. name, foreignTableName=fkey. referencedColumns[0].owner.name, onDeleteAction=fkey.deleteRule.lower(), onUpdateAction=fkey.updateRule.lower()) ) else: if fkey.referencedColumns[ 0].owner.name not in foreign_keys: foreign_keys[fkey.referencedColumns[0]. owner.name] = [] foreign_keys[fkey.referencedColumns[ 0].owner.name].append({ 'table': fkey.columns[0].owner.name, 'key': foreign_key, 'referenced_table': fkey.referencedColumns[0].owner.name, 'referenced_name': fkey.referencedColumns[0].name, 'update_rule': fkey.updateRule, 'delete_rule': fkey.deleteRule }) migrations[ti].append(" });\n") for fkey, fval in foreign_keys.iteritems(): if fkey == tbl.name: keyed_tables = [] schema_table = 0 for item in fval: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) if schema_table == 0: foreign_table_name = item['table'] migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) schema_table = 1 elif foreign_table_name != item['table']: foreign_table_name = item['table'] migrations[ti].append(" });\n") migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) migrations[ti].append( foreignKeyTemplate.format( foreignKey=item['key'], tableKeyName=item[ 'referenced_name'], foreignTableName=item[ 'referenced_table'], onDeleteAction=item['delete_rule']. lower(), onUpdateAction=item['update_rule']. lower())) if schema_table == 1: migrations[ti].append(" });\n") migrations[ti].append('\n') migrations[ti].append(' }\n') ########## # Reverse ########## migrations[ti].append(migrationDownTemplate) migrations[ti].append( migrationEndingTemplate.format(tableName=table_name)) ti += 1 return migrations out = cStringIO.StringIO() try: for schema in [(s, s.name == 'main') for s in cat.schemata]: table_tree = create_tree(out, schema[0], schema[1]) migrations = export_schema(out, schema[0], schema[1], table_tree) except GenerateLaravel5MigrationError as e: Workbench.confirm(e.typ, e.message) return 1 for name in sorted(migrations): out.write('Table name: {0}\n\n\n'.format(migration_tables[name])) out.write(''.join(migrations[name])) out.write('\n\n\n'.format(name)) sql_text = out.getvalue() out.close() wizard = GenerateLaravel5MigrationWizard(sql_text) wizard.run() return 0
def export_table_clipboard(self): #print table.inserts.methods #print table.foreignKeys.columns #print table.name #Primary key table = self.catalog dict_pk = {} for column in table.primaryKey.columns: dict_pk[column.referencedColumn.name] = 1 no_default_key = "" if dict_pk.get('id') == None or len(table.primaryKey.columns) > 1: no_default_key = ",{id: false} " #foreign key dict_fk = {} for keys in table.foreignKeys: quantidade_colunas = len(keys.columns) maiu_column = "" # if it dos have more than one column the code is broken for column in keys.columns: dict_fk[column.name] = {} maiu_column = column.name for column in keys.referencedColumns: dict_fk[maiu_column]['primary_key'] = column.name dict_fk[maiu_column]['table'] = column.owner.name rails_name = self.camelize(table.name) rails_field_name = "" rails_format = "" instrucao_coluna = '{"mockup":\n' instrucao_coluna += ' {"controls":\n' instrucao_coluna += ' {"control":[ \n' instrucao_chaves = "" quantidade_colunas = len(table.columns) contador = 1 posicao_x = 108 posicao_y = 124 tamanho_label_x = 56 posicao_x_inputs = 251 colunas_grid = "" dados_dump = "" dados_dump_2 = "" comments = "" for column in table.columns: #print column #print column.name #print dict.get(column.name) converted_field = self.convert_migration_type(column) if column.name != 'created_at' and column.name != 'updated_at' and column.name != 'deleted_at' and column.name != 'id': instrucao_coluna += '{"ID":"' + str(self.generate_id( )) + '","measuredH":"21","measuredW":"' + str( tamanho_label_x ) + '","properties":{"text":"' + self.humanize( column.name) + ': "},"typeID":"Label","x":"' + str( posicao_x) + '","y":"' + str( posicao_y) + '","zOrder":"1"}, \n' colunas_grid += self.humanize(column.name) + " ^v," if column.comment != None and column.comment != '': comments += column.name + ": " + column.comment + "\\n" if dict_fk.get(column.name) == None: if converted_field == "TextInput": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"TextInput","w":"283", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "Dado qualquer ," dados_dump_2 += "Dado 2 ," elif converted_field == "ComboBox": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"ComboBox","w":"283", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "Dado ," dados_dump_2 += "Dado Combo ," elif converted_field == "DateChooser": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":" / / "},"typeID":"DateChooser","w":"148", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "25/08/2020 ," dados_dump_2 += "11/04/2010 ," elif converted_field == "NumericStepper": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"NumericStepper","w":"148", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "42 ," dados_dump_2 += "3.1418 ," elif converted_field == "CheckBox": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"CheckBox", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "Sim ," dados_dump_2 += "Nao ," elif converted_field == "TextArea": instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"10","measuredW":"100","typeID":"HRule","w":"283","x":"251","y":"' + str( posicao_y + 4) + '","zOrder":"1"},' instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"TextArea", "w":"416","h":"100" ,"x":"117","y":"' + str( posicao_y + 21) + '","zOrder":"1"},\n' posicao_y += 100 dados_dump += "Texto Longo ," dados_dump_2 += "Texto Curto ," else: instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"200","properties":{"text":""},"typeID":"ComboBox","w":"283", "x":"' + str( posicao_x_inputs) + '","y":"' + str( posicao_y - 3) + '","zOrder":"1"},\n' dados_dump += "Dado ," dados_dump_2 += "Dado ," posicao_y += 30 instrucao_coluna += '{"ID":"' + str(self.generate_id( )) + '","h":"' + str( posicao_y - 88 ) + '","measuredH":"123","measuredW":"109","properties":{"align":"left","size":"10","text":"' + comments + '"},"typeID":"StickyNote","w":"450","x":"567","y":"188","zOrder":"0"},\n' instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"21","measuredW":"328","properties":{"text":"' + colunas_grid + "\\n" + dados_dump + "\\n" + dados_dump_2 + '"},"typeID":"DataGrid","x":"583","y":"90","zOrder":"0"},\n' instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"62","properties":{"text":"Salvar"},"typeID":"Button","x":"416","y":"' + str( posicao_y + 42) + '","zOrder":"1"},\n' instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"27","measuredW":"59","properties":{"text":"Voltar"},"typeID":"Button","x":"147","y":"' + str( posicao_y + 42) + '","zOrder":"1"},\n' instrucao_coluna += '{"ID":"' + str( self.generate_id() ) + '","measuredH":"214","measuredW":"450","properties":{"text":"' + self.humanize( table.name) + '"},"typeID":"TitleWindow","h":"' + str( posicao_y + 10) + '","x":"100","y":"90","zOrder":"0"}\n' instrucao_coluna += ']},"measuredH":"453","measuredW":"1199","mockupH":"400","mockupW":"914","version":"1.0"}}\n' Workbench.copyToClipboard(instrucao_coluna)
def validate_for_sqlite_export(cat): """Check uniqueness of schema, table and index names. Return 0 on success otherwise return 1 (the export process should abort) """ have_errors = False idt = {} for i, schema in enumerate(cat.schemata): if schema.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Schemas %d and %d have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[schema.name], i, schema.name)) == 0: return False else: idt[schema.name] = i # Do not continue looking for errors on schema name error if have_errors: return False for schema in cat.schemata: idt = {} for i, tbl in enumerate(schema.tables): if tbl.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Table %d in schema "%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name)) == 0: return False if tbl.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Tables %d and %d in schema "%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[tbl.name], i, schema.name, tbl.name)) == 0: return False else: idt[tbl.name] = i if have_errors: return False for schema in cat.schemata: for tbl in schema.tables: idt = {} for i, column in enumerate(tbl.columns): if column.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Column %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name, tbl.name)) == 0: return False if column.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Columns %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[column.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[column.name] = i # Now check indices (except primary/unique) idt = {} for i, index in enumerate(tbl.indices): if index.indexType == 'INDEX': if index.name == '': have_errors = True if Workbench.confirm('Name conflict', 'Index %d in table "%s"."%s". has no name.' ' Please rename.\n' 'Search for more such errors?' % ( i, schema.name, tbl.name)) == 0: return False if index.name in idt: have_errors = True if Workbench.confirm('Name conflict', 'Indices %d and %d in table "%s"."%s"' ' have the same name "%s".' ' Please rename one of them.\n' 'Search for more such errors?' % ( idt[index.name], i, schema.name, tbl.name, column.name)) == 0: return False else: idt[index.name] = i if have_errors: return False return True
def generate_knex_migration(cat): def create_tree(table_schema): tree = {} for tbl in sorted(table_schema.tables, key=lambda table: table.name): table_references = [] for key in tbl.foreignKeys: if key.name != '' and hasattr( key, 'referencedColumns' ) and len( key.referencedColumns ) > 0 and tbl.name != key.referencedColumns[0].owner.name: table_references.append( key.referencedColumns[0].owner.name) tree[tbl.name] = table_references d = dict((k, set(tree[k])) for k in tree) r = [] while d: # values not in keys (items without dep) t = set(i for v in d.values() for i in v) - set(d.keys()) # and keys without value (items without dep) t.update(k for k, v in d.items() if not v) # can be done right away r.append(t) # and cleaned up d = dict(((k, v - t) for k, v in d.items() if v)) return r def addslashes(s): l = [ "\\", "'", "\0", ] for i in l: if i in s: s = s.replace(i, '\\' + i) return s def export_schema(table_schema, tree): if len(table_schema.tables) == 0: return foreign_keys = {} global migration_tables global migrations tables = sorted(table_schema.tables, key=lambda table: table.name) ti = 0 for reference_tables in tree: for reference in reference_tables: for tbl in tables: if tbl.name != reference: continue table_name = tbl.name table_engine = tbl.tableEngine components = table_name.split('_') migration_tables.append(table_name) migrations[ti] = [] migrations[ti].append( migrationTemplate.format(tableNameCamelCase=("".join( x.title() for x in components[0:])), tableName=table_name)) # migrations[ti].append(" table.engine ('{tableEngine}')\n".format( # tableEngine=table_engine # )) created_at = created_at_nullable \ = updated_at \ = updated_at_nullable \ = deleted_at \ = timestamps \ = timestamps_nullable = False for col in tbl.columns: if col.name == 'created_at': created_at = True if col.isNotNull != 1: created_at_nullable = True elif col.name == 'updated_at': updated_at = True if col.isNotNull != 1: updated_at_nullable = True if created_at is True and updated_at is True and created_at_nullable is True: if updated_at_nullable is True: timestamps_nullable = True elif created_at is True and updated_at is True: timestamps = True elif created_at is True and updated_at is True: timestamps = True primary_key = [ col for col in tbl.indices if col.isPrimary == 1 ] primary_key = primary_key[0] if len( primary_key) > 0 else None if hasattr(primary_key, 'columns'): primary_col = primary_key.columns[0].referencedColumn else: primary_col = None default_time_values = [ 'CURRENT_TIMESTAMP', 'NULL ON UPDATE CURRENT_TIMESTAMP', 'CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP' ] for col in tbl.columns: # Name is important attribute so it has to be set # in order to make this work # https://github.com/beckenrode/mysql-workbench-export-laravel-5-migrations/issues/18#issuecomment-272152778 try: if (col.name == 'created_at' or col.name == 'updated_at') and ( timestamps is True or timestamps_nullable is True): continue if col.name == 'deleted_at': deleted_at = True continue if col.simpleType: col_type = col.simpleType.name else: col_type = col.userType.name if col == primary_col: if col_type == "BIGINT": col_type = "BIG_INCREMENTS" elif col_type == "MEDIUMINT": col_type = "MEDIUM_INCREMENTS" elif col_type == "CHAR" and col.length == 36: col_type = "UUID" else: col_type = "INCREMENTS" if (col_type == 'BIGINT' or col_type == 'INT' or col_type == 'TINYINT' or col_type == 'MEDIUMINT' or col_type == 'SMALLINT') and 'UNSIGNED' in col.flags: col_type = "u" + col_type col_data = '\'' # Continue if type is not in dictionary if col_type not in typesDict: continue if typesDict[col_type] == 'char': if col.length > -1: col_data = '\', %s' % (str(col.length)) elif typesDict[col_type] == 'decimal': if col.precision > -1 and col.scale > -1: col_data = '\', %s, %s' % (str( col.precision), str(col.scale)) elif col_type == 'DOUBLE': if col.precision > -1 and col.length > -1: col_data = '\', %s, %s' % (str( col.length), str(col.precision)) elif typesDict[col_type] == 'enum': col_data = '\', [%s]' % ( col.datatypeExplicitParams[1:-1]) elif typesDict[col_type] == 'string': if col.length > -1 and col.length < 255: col_data = '\', %s' % (str(col.length)) else: col_data = '\'' if col.name == 'remember_token' and typesDict[ col_type] == 'string' and str( col.length) == '100': migrations[ti].append( ' table.rememberToken()\n') elif typesDict[col_type]: migrations[ti].append( ' table.%s(\'%s%s)' % (typesDict[col_type], col.name, col_data)) # Chain unsigned to the table for all column type with unsigned if col_type == 'uTINYINT' or col_type == 'uSMALLINT' or col_type == 'uMEDIUMINT' or col_type == 'uINT' or col_type == 'uBIGINT' or col_type == 'uINT1' or col_type == 'uINT2' or col_type == 'uINT3' or col_type == 'uINT4' or col_type == 'uINT8' or col_type == 'uINTEGER': migrations[ti].append('.unsigned()') # Chain collate for column types string and text # if typesDict[col_type] == 'string' or typesDict[col_type] == 'text' or typesDict[col_type] == 'enum': # migrations[ti].append('.collate(\'utf8_unicode_ci\')') if col.isNotNull != 1: migrations[ti].append('.nullable()') if col.isNotNull != 0: migrations[ti].append('.notNullable()') if col.defaultValue != '' and col.defaultValueIsNull != 0: migrations[ti].append('.defaultTo(null)') elif col.defaultValue != '': default_value = col.defaultValue.replace( "'", "") if default_value in default_time_values: migrations[ti].append( ".defaultTo(knex.fn.now())") else: migrations[ti].append( ".defaultTo('{}')".format( default_value)) if col == primary_col: migrations[ti].append('.primary()') if col.comment != '': migrations[ti].append( ".comment('{comment}')".format( comment=addslashes(col.comment))) migrations[ti].append('\n') except AttributeError: pass # Generate indexes indexes = {"primary": {}, "unique": {}, "index": {}} for index in tbl.indices: index_type = index.indexType.lower() if (index_type == "primary"): continue index_name = index.name indexes[index_type][index_name] = [] index_columns = [] for column in index.columns: indexes[index_type][index_name].append( column.referencedColumn.name) indexes_created = False for index_type in indexes: for index_name in indexes[index_type]: if len(indexes[index_type][index_name]) != 0: indexes_created = True index_key_template = indexKeyTemplate.format( indexType=index_type, indexColumns=", ".join([ "'{}'".format(column_name) for column_name in indexes[index_type] [index_name] ]), indexName=index_name) migrations[ti].append(index_key_template) if indexes_created: migrations[ti].append('\n') if timestamps is True: migrations[ti].append(' table.timestamps()\n') elif timestamps_nullable is True: migrations[ti].append(' table.timestamps()\n') first_foreign_created = False for key in tbl.foreignKeys: if key.name != '' and hasattr(key.index, 'name'): index_name = key.index.name foreign_key = key.columns[0].name if index_name == 'PRIMARY': index_name = tbl.name + "_" + key.columns[ 0].name if key.referencedColumns[ 0].owner.name in migration_tables: if not first_foreign_created: migrations[ti].append('\n') first_foreign_created = True delete_rule = key.deleteRule if delete_rule == "": delete_rule = "RESTRICT" update_rule = key.updateRule if update_rule == "": update_rule = "RESTRICT" migrations[ti].append( foreignKeyTemplate.format( foreignKey=foreign_key, foreignKeyName=index_name, tableKeyName=key.referencedColumns[0]. name, foreignTableName=key. referencedColumns[0].owner.name, onDeleteAction=delete_rule, onUpdateAction=update_rule)) else: if key.referencedColumns[ 0].owner.name not in foreign_keys: foreign_keys[key.referencedColumns[0]. owner.name] = [] foreign_keys[key.referencedColumns[ 0].owner.name].append({ 'table': key.columns[0].owner.name, 'key': foreign_key, 'name': index_name, 'referenced_table': key.referencedColumns[0].owner.name, 'referenced_name': key.referencedColumns[0].name, 'update_rule': key.updateRule, 'delete_rule': key.deleteRule }) for key, val in foreign_keys.iteritems(): if key == tbl.name: keyed_tables = [] schema_table = 0 for item in val: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) foreign_table_name = item['table'] if schema_table == 0: migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) schema_table = 1 elif foreign_table_name != item['table']: migrations[ti].append(" })\n") migrations[ti].append('\n') migrations[ti].append( schemaCreateTemplate.format( tableName=item['table'])) migrations[ti].append( foreignKeyTemplate.format( foreignKey=item['key'], foreignKeyName=item['name'], tableKeyName=item[ 'referenced_name'], foreignTableName=item[ 'referenced_table'], onDeleteAction=item['delete_rule'], onUpdateAction=item['update_rule']) ) migrations[ti].append(' })\n') migrations[ti].append('}\n') ########## # Reverse ########## migrations[ti].append(migrationDownTemplate) migrations[ti].append( migrationEndingTemplate.format( tableName=table_name, tableNameCamelCase=("".join( x.title() for x in components[0:])))) ti += 1 return migrations out = cStringIO.StringIO() try: for schema in [(s, s.name == 'main') for s in cat.schemata]: table_tree = create_tree(schema[0]) migrations = export_schema(schema[0], table_tree) except GenerateKnexMigrationError as e: Workbench.confirm(e.typ, e.message) return 1 unixTimestamp = time.strftime('%Y%m%d%H%M%S') for name in sorted(migrations): save_format = '{timestamp}_add_{tableName}_table.js'.format( timestamp=unixTimestamp, tableName=migration_tables[name]) out.write('Table name: {0} Migration File: {1}\n\n'.format( migration_tables[name], save_format)) out.write(''.join(migrations[name])) out.write('\n\n\n'.format(name)) sql_text = out.getvalue() out.close() wizard = GenerateKnexMigrationWizard(sql_text) wizard.run() return 0
def generateLaravel5Migration(cat): def export_schema(out, schema, is_main_schema): if len(schema.tables) == 0: return foreign_keys = {} migration_tables = [] global migrations for tbl in schema.tables: migration_tables.append(tbl.name) migrations[tbl.name] = [] migrations[tbl.name].append('<?php\n') migrations[tbl.name].append('\n') migrations[tbl.name].append('use Illuminate\Database\Schema\Blueprint;\n') migrations[tbl.name].append('use Illuminate\Database\Migrations\Migration;\n') migrations[tbl.name].append('\n') components = tbl.name.split('_') migrations[tbl.name].append('class Create%sTable extends Migration\n' % ("".join(x.title() for x in components[0:]))) migrations[tbl.name].append('{\n') migrations[tbl.name].append(' /**\n') migrations[tbl.name].append(' * Run the migrations.\n') migrations[tbl.name].append(' *\n') migrations[tbl.name].append(' * @return void\n') migrations[tbl.name].append(' */\n') migrations[tbl.name].append(' public function up()\n') migrations[tbl.name].append(' {\n') migrations[tbl.name].append(' Schema::create(\'%s\', function (Blueprint $table) {\n' % (tbl.name)) created_at = created_at_nullable = updated_at = updated_at_nullable = deleted_at = timestamps = timestamps_nullable = False for col in tbl.columns: if col.name == 'created_at': created_at = True if col.isNotNull != 1: created_at_nullable = True elif col.name == 'updated_at': updated_at = True if col.isNotNull != 1: updated_at_nullable = True if created_at is True and updated_at is True and created_at_nullable is True and updated_at_nullable is True: timestamps_nullable = True elif created_at is True and updated_at is True: timestamps = True for col in tbl.columns: if (col.name == 'created_at' or col.name == 'updated_at') and (timestamps is True or timestamps_nullable is True): continue if col.name == 'deleted_at': deleted_at = True continue if col.simpleType: col_type = col.simpleType.name col_flags = col.simpleType.flags else: col_type = col.userType.name col_flags = col.flags primary_key = [i for i in tbl.indices if i.isPrimary == 1] primary_key = primary_key[0] if len(primary_key) > 0 else None pk_column = None if primary_key and len(primary_key.columns) == 1: pk_column = primary_key.columns[0].referencedColumn if col == pk_column: if col_type == 'BIGINT': col_type = 'BIGINCREMENTS' else: col_type = 'INCREMENTS' col_data = '\'' if typesDict[col_type] == 'char': if col.length > -1: col_data = '\', %s' % (str(col.length)) elif typesDict[col_type] == 'decimal': if col.precision > -1 and col.scale > -1: col_data = '\', %s, %s' % (str(col.precision), str(col.scale)) elif typesDict[col_type] == 'double': if col.precision > -1 and col.length > -1: col_data = '\', %s, %s' % (str(col.length), str(col.precision)) elif typesDict[col_type] == 'enum': col_data = '\', [%s]' % (col.datatypeExplicitParams[1:-1]) elif typesDict[col_type] == 'string': if col.length > -1: col_data = '\', %s' % (str(col.length)) if col.name == 'remember_token' and typesDict[col_type] == 'string' and str(col.length) == 100: migrations[tbl.name].append(' $table->rememberToken();\n') elif(typesDict[col_type]) : migrations[tbl.name].append(' $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data)) if typesDict[col_type] == 'integer' and 'UNSIGNED' in col.flags: migrations[tbl.name].append('->unsigned()') if col.isNotNull != 1: migrations[tbl.name].append('->nullable()') if col.defaultValue != '' and col.defaultValueIsNull != 0: migrations[tbl.name].append('->default(NULL)') elif col.defaultValue != '': migrations[tbl.name].append('->default(%s)' % (col.defaultValue)) if col.comment != '': migrations[tbl.name].append('->comment(\'%s\')' % (col.comment)) migrations[tbl.name].append(";") migrations[tbl.name].append('\n') if deleted_at is True: migrations[tbl.name].append(' $table->softDeletes();\n') if timestamps is True: migrations[tbl.name].append(' $table->timestamps();\n') elif timestamps_nullable is True: migrations[tbl.name].append(' $table->nullableTimestamps();\n') first_foreign_created = 0 for fkey in tbl.foreignKeys: if fkey.name != '': if fkey.referencedColumns[0].owner.name in migration_tables: if first_foreign_created == 0: migrations[tbl.name].append('\n') first_foreign_created = 1 migrations[tbl.name].append(' $table->foreign(\'%s\')->references(\'%s\')->on(\'%s\')->onDelete(\'%s\')->onUpdate(\'%s\');' % (fkey.columns[0].name, fkey.referencedColumns[0].name, fkey.referencedColumns[0].owner.name, fkey.deleteRule.lower(), fkey.updateRule.lower())) migrations[tbl.name].append('\n') else: if fkey.referencedColumns[0].owner.name not in foreign_keys: foreign_keys[fkey.referencedColumns[0].owner.name] = [] foreign_keys[fkey.referencedColumns[0].owner.name].append({'table':fkey.columns[0].owner.name, 'name':fkey.columns[0].name, 'referenced_table':fkey.referencedColumns[0].owner.name, 'referenced_name':fkey.referencedColumns[0].name, 'update_rule':fkey.updateRule, 'delete_rule':fkey.deleteRule}) migrations[tbl.name].append(" });\n") for fkey, fval in foreign_keys.iteritems(): if fkey == tbl.name: keyed_tables = [] schema_table = 0 for item in fval: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) if schema_table == 0: migrations[tbl.name].append('\n') migrations[tbl.name].append(' Schema::table(\'%s\', function (Blueprint $table) {\n' % (item['table'])) schema_table = 1 migrations[tbl.name].append(' $table->foreign(\'%s\')->references(\'%s\')->on(\'%s\')->onDelete(\'%s\')->onUpdate(\'%s\');\n' % (item['name'], item['referenced_name'], item['referenced_table'], item['delete_rule'].lower(), item['update_rule'].lower())) if schema_table == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') migrations[tbl.name].append(' }\n') migrations[tbl.name].append('\n') migrations[tbl.name].append(' /**\n') migrations[tbl.name].append(' * Reverse the migrations.\n') migrations[tbl.name].append(' *\n') migrations[tbl.name].append(' * @return void\n') migrations[tbl.name].append(' */\n') migrations[tbl.name].append(' public function down()\n') migrations[tbl.name].append(' {\n') first_foreign_created = 0 for fkey in tbl.foreignKeys: if fkey.name != '': if fkey.referencedColumns[0].owner.name in migration_tables: if first_foreign_created == 0: migrations[tbl.name].append(' Schema::table(\'%s\', function (Blueprint $table) {\n' % (tbl.name)) first_foreign_created = 1 migrations[tbl.name].append(' $table->dropForeign([\'%s\']);\n' % (fkey.columns[0].name)) if first_foreign_created == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') for fkey, fval in foreign_keys.iteritems(): if fkey == tbl.name: keyed_tables = [] schema_table = 0 for item in fval: if item['table'] not in keyed_tables: keyed_tables.append(item['table']) if schema_table == 0: migrations[tbl.name].append(' Schema::table(\'%s\', function (Blueprint $table) {\n' % (item['table'])) schema_table = 1 migrations[tbl.name].append(' $table->dropForeign([\'%s\']);\n' % (item['name'])) if schema_table == 1: migrations[tbl.name].append(" });\n") migrations[tbl.name].append('\n') migrations[tbl.name].append(' Schema::drop(\'%s\');\n' % (tbl.name)) migrations[tbl.name].append(' }\n') migrations[tbl.name].append('}') return migrations out = cStringIO.StringIO() try: for schema in [(s, s.name == 'main') for s in cat.schemata]: migrations = export_schema(out, schema[0], schema[1]) except GenerateLaravel5MigrationError as e: Workbench.confirm(e.typ, e.message) return 1 for mkey in sorted(migrations): out.write(''.join(migrations[mkey])) out.write('\n\n\n') sql_text = out.getvalue() out.close() wizard = GenerateLaravel5MigrationWizard(sql_text) wizard.run() return 0