def cli( # noqa: ignore=C0330 # pylint: disable=C0330,R0913 sqlite_file, mysql_user, mysql_password, mysql_database, mysql_host, mysql_port, chunk, log_file, vacuum, use_buffered_cursors, ): """Transfer MySQL to SQLite using the provided CLI options.""" try: converter = MySQLtoSQLite( sqlite_file=sqlite_file, mysql_user=mysql_user, mysql_password=mysql_password, mysql_database=mysql_database, mysql_host=mysql_host, mysql_port=mysql_port, chunk=chunk, vacuum=vacuum, buffered=use_buffered_cursors, log_file=log_file, ) converter.transfer() except KeyboardInterrupt: print("\nProcess interrupted. Exiting...") sys.exit(1) except Exception as err: # pylint: disable=W0703 print(err) sys.exit(1)
def test_create_table_unknown_mysql_connector_error( self, sqlite_database, mysql_database, mysql_credentials, mocker, caplog): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, ) class FakeSQLiteCursor: def executescript(self, statement): raise mysql.connector.Error( msg="Error Code: 2000. Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR, ) mysql_inspect = inspect(mysql_database.engine) mysql_tables = mysql_inspect.get_table_names() mocker.patch.object(proc, "_sqlite_cur", FakeSQLiteCursor()) caplog.set_level(logging.DEBUG) with pytest.raises(mysql.connector.Error): proc._create_table(choice(mysql_tables))
def test_create_table_server_lost_connection_error(self, sqlite_database, mysql_database, mysql_credentials, mocker, caplog): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, ) class FakeSQLiteCursor: def executescript(self, *args, **kwargs): raise mysql.connector.Error( msg= "Error Code: 2013. Lost connection to MySQL server during query", errno=errorcode.CR_SERVER_LOST, ) class FakeSQLiteConnector: def commit(self, *args, **kwargs): return True mysql_inspect = inspect(mysql_database.engine) mysql_tables = mysql_inspect.get_table_names() mocker.patch.object(proc, "_sqlite_cur", FakeSQLiteCursor()) mocker.patch.object(proc._mysql, "reconnect", return_value=True) mocker.patch.object(proc, "_sqlite", FakeSQLiteConnector()) caplog.set_level(logging.DEBUG) with pytest.raises(mysql.connector.Error): proc._create_table(choice(mysql_tables))
def test_translate_type_from_mysql_to_sqlite_invalid_column_type( self, mocker): with pytest.raises(ValueError) as excinfo: mocker.patch.object(MySQLtoSQLite, "_valid_column_type", return_value=False) MySQLtoSQLite._translate_type_from_mysql_to_sqlite("text") assert "Invalid column_type!" in str(excinfo.value)
def test_data_type_collation_sequence_is_not_applied_on_non_textual_data_types( self, ): for column_type in ( "BIGINT", "BINARY", "BIT", "BLOB", "BOOLEAN", "DATE", "DATETIME", "DATETIME", "DECIMAL", "DOUBLE", "FLOAT", "INTEGER", "INTEGER", "LONGBLOB", "MEDIUMBLOB", "MEDIUMINT", "NUMERIC", "REAL", "SMALLINT", "TIME", "TINYBLOB", "TINYINT", "VARBINARY", "YEAR", ): for collation in ( CollatingSequences.BINARY, CollatingSequences.NOCASE, CollatingSequences.RTRIM, ): assert (MySQLtoSQLite._data_type_collation_sequence( collation, column_type) == "")
def test_log_to_file( self, sqlite_database, mysql_database, mysql_credentials, caplog, tmpdir, faker, quiet, ): log_file = tmpdir.join("db.log") caplog.set_level(logging.DEBUG) with pytest.raises(mysql.connector.Error): MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=faker.first_name().lower(), mysql_password=faker.password(length=16), mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, log_file=str(log_file), quiet=quiet, ) assert any("Access denied for user" in message for message in caplog.messages) with log_file.open("r") as log_fh: log = log_fh.read() if len(caplog.messages) > 1: assert caplog.messages[1] in log else: assert caplog.messages[0] in log assert ( re.match(r"^\d{4,}-\d{2,}-\d{2,}\s+\d{2,}:\d{2,}:\d{2,}\s+\w+\s+", log) is not None )
def test_transfer_table_data_exceptions( self, sqlite_database, mysql_database, mysql_credentials, mocker, caplog, exception, quiet, ): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, quiet=quiet, ) class FakeMySQLCursor: def fetchall(self): raise exception def fetchmany(self, size=1): raise exception mysql_inspect = inspect(mysql_database.engine) mysql_tables = mysql_inspect.get_table_names() table_name = choice(mysql_tables) columns = [ column["name"] for column in mysql_inspect.get_columns(table_name) ] sql = ( 'INSERT OR IGNORE INTO "{table}" ({fields}) VALUES ({placeholders})' .format( table=table_name, fields=('"{}", ' * len(columns)).rstrip(" ,").format(*columns), placeholders=("?, " * len(columns)).rstrip(" ,"), )) mocker.patch.object(proc, "_mysql_cur", FakeMySQLCursor()) with pytest.raises((mysql.connector.Error, sqlite3.Error)): proc._transfer_table_data(table_name, sql)
def test_data_type_collation_sequence_is_applied_on_textual_data_types( self, collation, resulting_column_collation, column_type, ): assert (MySQLtoSQLite._data_type_collation_sequence( collation, column_type) == resulting_column_collation)
def test_non_existing_mysql_database_raises_exception( self, sqlite_database, mysql_database, mysql_credentials, faker, mocker, caplog, exception, quiet, ): class FakeMySQLConnection(MySQLConnection): @property def database(self): return self._database @database.setter def database(self, value): self._database = value # raise a fake exception raise exception def is_connected(self): return True def cursor( self, buffered=None, raw=None, prepared=None, cursor_class=None, dictionary=None, named_tuple=None, ): return True caplog.set_level(logging.DEBUG) mocker.patch.object( mysql.connector, "connect", return_value=FakeMySQLConnection() ) with pytest.raises((mysql.connector.Error, Exception)) as excinfo: MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, quiet=quiet, ) assert any( "MySQL Database does not exist!" in message for message in caplog.messages ) assert "Unknown database" in str(excinfo.value)
def test_create_table_sqlite3_error(self, sqlite_database, mysql_database, mysql_credentials, mocker, caplog): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, ) class FakeSQLiteCursor: def executescript(self, *args, **kwargs): raise sqlite3.Error("Unknown SQLite error") mysql_inspect = inspect(mysql_database.engine) mysql_tables = mysql_inspect.get_table_names() mocker.patch.object(proc, "_sqlite_cur", FakeSQLiteCursor()) caplog.set_level(logging.DEBUG) with pytest.raises(sqlite3.Error): proc._create_table(choice(mysql_tables))
def test_invalid_mysql_credentials_raises_access_denied_exception( self, sqlite_database, mysql_database, mysql_credentials, faker): with pytest.raises(mysql.connector.Error) as excinfo: MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=faker.first_name().lower(), mysql_password=faker.password(length=16), mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, ) assert "Access denied for user" in str(excinfo.value)
def test_bad_mysql_connection(self, sqlite_database, mysql_credentials, mocker): FakeConnector = namedtuple("FakeConnector", ["is_connected"]) mocker.patch.object( mysql.connector, "connect", return_value=FakeConnector(is_connected=lambda: False), ) with pytest.raises((ConnectionError, IOError)) as excinfo: MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, mysql_database=mysql_credentials.database, chunk=1000, ) assert "Unable to connect to MySQL" in str(excinfo.value)
def test_missing_mysql_database_raises_exception(self, faker, quiet): with pytest.raises(ValueError) as excinfo: MySQLtoSQLite(mysql_user=faker.first_name().lower(), quiet=quiet) assert "Please provide a MySQL database" in str(excinfo.value)
def test_missing_mysql_user_raises_exception(self, mysql_credentials, quiet): with pytest.raises(ValueError) as excinfo: MySQLtoSQLite(mysql_database=mysql_credentials.database, quiet=quiet) assert "Please provide a MySQL user" in str(excinfo.value)
def test_translate_type_from_mysql_to_sqlite_all_valid_columns(self): for column_type in mysql_column_types + ( "BIGINT UNSIGNED", "INTEGER UNSIGNED", "INT", "INT UNSIGNED", "SMALLINT UNSIGNED", "TINYINT UNSIGNED", "MEDIUMINT UNSIGNED", "CHAR(2)", "NCHAR(7)", "NVARCHAR(17)", "VARCHAR(123)", ): if column_type in {"dialect", "insert", "Insert"}: continue elif column_type == "INT": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "INTEGER") elif column_type == "DECIMAL": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "DECIMAL") elif column_type == "YEAR": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "YEAR") elif column_type == "TIME": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "TIME") elif column_type == "TIMESTAMP": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "DATETIME") elif column_type in { "BINARY", "BIT", "LONGBLOB", "MEDIUMBLOB", "TINYBLOB", "VARBINARY", }: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "BLOB") elif column_type == "CHAR": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "CHARACTER") elif column_type == "CHAR(2)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "CHARACTER(2)") elif column_type == "NCHAR(7)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "NCHAR(7)") elif column_type == "NVARCHAR(17)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "NVARCHAR(17)") elif column_type == "VARCHAR(123)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "VARCHAR(123)") elif column_type in { "ENUM", "LONGTEXT", "MEDIUMTEXT", "SET", "TINYTEXT", }: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "TEXT") elif column_type == "JSON": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "TEXT") assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type, sqlite_json1_extension_enabled=True) == "JSON") elif column_type.endswith(" UNSIGNED"): if column_type.startswith("INT "): assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "INTEGER") else: assert MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == column_type.replace(" UNSIGNED", "") else: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == column_type)
def test_transfer_transfers_all_tables_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, capsys, caplog, chunk, vacuum, buffered, ): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, chunk=chunk, vacuum=vacuum, buffered=buffered, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in { "Transferring table article_authors", "Transferring table article_images", "Transferring table article_tags", "Transferring table articles", "Transferring table authors", "Transferring table images", "Transferring table tags", "Transferring table misc", "Done!", } ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) out, err = capsys.readouterr() assert "Done!" in out.splitlines()[-1] sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() """ Test if both databases have the same table names """ assert sqlite_tables == mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = ("name", "column_names", "unique") mysql_indices = tuple( {key: index[key] for key in index_keys} for index in ( chain.from_iterable( mysql_inspect.get_indexes(table_name) for table_name in mysql_tables ) ) ) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Test if all the tables have the same foreign keys """ for table_name in mysql_tables: mysql_fk_stmt = text( """ SELECT k.COLUMN_NAME AS `from`, k.REFERENCED_TABLE_NAME AS `table`, k.REFERENCED_COLUMN_NAME AS `to`, c.UPDATE_RULE AS `on_update`, c.DELETE_RULE AS `on_delete` FROM information_schema.TABLE_CONSTRAINTS AS i LEFT JOIN information_schema.KEY_COLUMN_USAGE AS k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME LEFT JOIN information_schema.REFERENTIAL_CONSTRAINTS c ON c.CONSTRAINT_NAME = i.CONSTRAINT_NAME WHERE i.TABLE_SCHEMA = :table_schema AND i.TABLE_NAME = :table_name AND i.CONSTRAINT_TYPE = :constraint_type """ ).bindparams( table_schema=mysql_credentials.database, table_name=table_name, constraint_type="FOREIGN KEY", ) mysql_fk_result = mysql_cnx.execute(mysql_fk_stmt) mysql_foreign_keys = [dict(row) for row in mysql_fk_result] sqlite_fk_stmt = 'PRAGMA foreign_key_list("{table}")'.format( table=table_name ) sqlite_fk_result = sqlite_cnx.execute(sqlite_fk_stmt) if sqlite_fk_result.returns_rows: for row in sqlite_fk_result: fk = dict(row) assert { "table": fk["table"], "from": fk["from"], "to": fk["to"], "on_update": fk["on_update"], "on_delete": fk["on_delete"], } in mysql_foreign_keys """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results
def test_transfer_specific_tables_transfers_only_specified_tables_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, caplog, chunk, vacuum, buffered, prefix_indices, ): mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() if six.PY2: table_number = choice(xrange(1, len(mysql_tables))) else: table_number = choice(range(1, len(mysql_tables))) random_mysql_tables = sample(mysql_tables, table_number) random_mysql_tables.sort() proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_tables=random_mysql_tables, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, prefix_indices=prefix_indices, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in set( ["Transferring table {}".format(table) for table in random_mysql_tables] + ["Done!"] ) ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() """ Test if both databases have the same table names """ assert sqlite_tables == random_mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = {"name", "column_names", "unique"} mysql_indices = [] for table_name in random_mysql_tables: for index in mysql_inspect.get_indexes(table_name): mysql_index = {} for key in index_keys: if key == "name" and prefix_indices: mysql_index[key] = "{table}_{name}".format( table=table_name, name=index[key] ) else: mysql_index[key] = index[key] mysql_indices.append(mysql_index) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in random_mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results
def test_translate_type_from_mysql_to_sqlite_all_valid_columns(self): for column_type in mysql_column_types + ( "CHAR(2)", "NCHAR(7)", "NVARCHAR(17)", "VARCHAR(123)", ): if column_type in {"dialect", "insert", "Insert"}: continue elif column_type == "INT": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "INTEGER") elif column_type == "DECIMAL": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "DECIMAL") elif column_type == "YEAR": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "YEAR") elif column_type == "TIME": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "TIME") elif column_type == "TIMESTAMP": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "DATETIME") elif column_type in { "BINARY", "BIT", "LONGBLOB", "MEDIUMBLOB", "TINYBLOB", "VARBINARY", }: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "BLOB") elif column_type == "CHAR": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "CHARACTER") elif column_type == "CHAR(2)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "CHARACTER(2)") elif column_type == "NCHAR(7)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "NCHAR(7)") elif column_type == "NVARCHAR(17)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "NVARCHAR(17)") elif column_type == "VARCHAR(123)": assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "VARCHAR(123)") elif column_type in { "ENUM", "JSON", "LONGTEXT", "MEDIUMTEXT", "SET", "TINYTEXT", }: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == "TEXT") else: assert (MySQLtoSQLite._translate_type_from_mysql_to_sqlite( column_type) == column_type)
def test_translate_default_numbers_from_mysql_to_sqlite( self, column_default, sqlite_default_translation, column_type): assert (MySQLtoSQLite._translate_default_from_mysql_to_sqlite( column_default, column_type) == sqlite_default_translation)
def test_transfer_limited_rows_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, caplog, chunk, vacuum, buffered, prefix_indices, ): if six.PY2: limit_rows = choice(xrange(1, 10)) else: limit_rows = choice(range(1, 10)) proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, limit_rows=limit_rows, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, prefix_indices=prefix_indices, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in { "Transferring table article_authors", "Transferring table article_images", "Transferring table article_tags", "Transferring table articles", "Transferring table authors", "Transferring table images", "Transferring table tags", "Transferring table misc", "Done!", } ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() mysql_connector_connection = mysql.connector.connect( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) server_version = mysql_connector_connection.get_server_version() """ Test if both databases have the same table names """ assert sqlite_tables == mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = {"name", "column_names", "unique"} mysql_indices = [] for table_name in mysql_tables: for index in mysql_inspect.get_indexes(table_name): mysql_index = {} for key in index_keys: if key == "name" and prefix_indices: mysql_index[key] = "{table}_{name}".format( table=table_name, name=index[key] ) else: mysql_index[key] = index[key] mysql_indices.append(mysql_index) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Test if all the tables have the same foreign keys """ for table_name in mysql_tables: mysql_fk_stmt = text( """ SELECT k.COLUMN_NAME AS `from`, k.REFERENCED_TABLE_NAME AS `table`, k.REFERENCED_COLUMN_NAME AS `to`, c.UPDATE_RULE AS `on_update`, c.DELETE_RULE AS `on_delete` FROM information_schema.TABLE_CONSTRAINTS AS i {JOIN} information_schema.KEY_COLUMN_USAGE AS k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME {JOIN} information_schema.REFERENTIAL_CONSTRAINTS c ON c.CONSTRAINT_NAME = i.CONSTRAINT_NAME WHERE i.TABLE_SCHEMA = :table_schema AND i.TABLE_NAME = :table_name AND i.CONSTRAINT_TYPE = :constraint_type """.format( # MySQL 8.0.19 still works with "LEFT JOIN" everything above requires "JOIN" JOIN="JOIN" if (server_version[0] == 8 and server_version[2] > 19) else "LEFT JOIN" ) ).bindparams( table_schema=mysql_credentials.database, table_name=table_name, constraint_type="FOREIGN KEY", ) mysql_fk_result = mysql_cnx.execute(mysql_fk_stmt) mysql_foreign_keys = [dict(row) for row in mysql_fk_result] sqlite_fk_stmt = 'PRAGMA foreign_key_list("{table}")'.format( table=table_name ) sqlite_fk_result = sqlite_cnx.execute(sqlite_fk_stmt) if sqlite_fk_result.returns_rows: for row in sqlite_fk_result: fk = dict(row) assert { "table": fk["table"], "from": fk["from"], "to": fk["to"], "on_update": fk["on_update"], "on_delete": fk["on_delete"], } in mysql_foreign_keys """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]).limit(limit_rows) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results
def test_translate_default_booleans_from_mysql_to_sqlite( self, mocker, column_default, sqlite_default_translation, sqlite_version): mocker.patch.object(sqlite3, "sqlite_version", sqlite_version) assert (MySQLtoSQLite._translate_default_from_mysql_to_sqlite( column_default, "BOOLEAN") == sqlite_default_translation)