def cli( # noqa: ignore=C0330 # pylint: disable=C0330,R0913 sqlite_file, mysql_user, mysql_password, mysql_database, mysql_host, mysql_port, chunk, log_file, vacuum, use_buffered_cursors, ): """Transfer MySQL to SQLite using the provided CLI options.""" try: converter = MySQLtoSQLite( sqlite_file=sqlite_file, mysql_user=mysql_user, mysql_password=mysql_password, mysql_database=mysql_database, mysql_host=mysql_host, mysql_port=mysql_port, chunk=chunk, vacuum=vacuum, buffered=use_buffered_cursors, log_file=log_file, ) converter.transfer() except KeyboardInterrupt: print("\nProcess interrupted. Exiting...") sys.exit(1) except Exception as err: # pylint: disable=W0703 print(err) sys.exit(1)
def test_transfer_limited_rows_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, caplog, chunk, vacuum, buffered, prefix_indices, ): if six.PY2: limit_rows = choice(xrange(1, 10)) else: limit_rows = choice(range(1, 10)) proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, limit_rows=limit_rows, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, prefix_indices=prefix_indices, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in { "Transferring table article_authors", "Transferring table article_images", "Transferring table article_tags", "Transferring table articles", "Transferring table authors", "Transferring table images", "Transferring table tags", "Transferring table misc", "Done!", } ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() mysql_connector_connection = mysql.connector.connect( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) server_version = mysql_connector_connection.get_server_version() """ Test if both databases have the same table names """ assert sqlite_tables == mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = {"name", "column_names", "unique"} mysql_indices = [] for table_name in mysql_tables: for index in mysql_inspect.get_indexes(table_name): mysql_index = {} for key in index_keys: if key == "name" and prefix_indices: mysql_index[key] = "{table}_{name}".format( table=table_name, name=index[key] ) else: mysql_index[key] = index[key] mysql_indices.append(mysql_index) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Test if all the tables have the same foreign keys """ for table_name in mysql_tables: mysql_fk_stmt = text( """ SELECT k.COLUMN_NAME AS `from`, k.REFERENCED_TABLE_NAME AS `table`, k.REFERENCED_COLUMN_NAME AS `to`, c.UPDATE_RULE AS `on_update`, c.DELETE_RULE AS `on_delete` FROM information_schema.TABLE_CONSTRAINTS AS i {JOIN} information_schema.KEY_COLUMN_USAGE AS k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME {JOIN} information_schema.REFERENTIAL_CONSTRAINTS c ON c.CONSTRAINT_NAME = i.CONSTRAINT_NAME WHERE i.TABLE_SCHEMA = :table_schema AND i.TABLE_NAME = :table_name AND i.CONSTRAINT_TYPE = :constraint_type """.format( # MySQL 8.0.19 still works with "LEFT JOIN" everything above requires "JOIN" JOIN="JOIN" if (server_version[0] == 8 and server_version[2] > 19) else "LEFT JOIN" ) ).bindparams( table_schema=mysql_credentials.database, table_name=table_name, constraint_type="FOREIGN KEY", ) mysql_fk_result = mysql_cnx.execute(mysql_fk_stmt) mysql_foreign_keys = [dict(row) for row in mysql_fk_result] sqlite_fk_stmt = 'PRAGMA foreign_key_list("{table}")'.format( table=table_name ) sqlite_fk_result = sqlite_cnx.execute(sqlite_fk_stmt) if sqlite_fk_result.returns_rows: for row in sqlite_fk_result: fk = dict(row) assert { "table": fk["table"], "from": fk["from"], "to": fk["to"], "on_update": fk["on_update"], "on_delete": fk["on_delete"], } in mysql_foreign_keys """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]).limit(limit_rows) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results
def test_transfer_specific_tables_transfers_only_specified_tables_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, caplog, chunk, vacuum, buffered, prefix_indices, ): mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() if six.PY2: table_number = choice(xrange(1, len(mysql_tables))) else: table_number = choice(range(1, len(mysql_tables))) random_mysql_tables = sample(mysql_tables, table_number) random_mysql_tables.sort() proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_tables=random_mysql_tables, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, prefix_indices=prefix_indices, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in set( ["Transferring table {}".format(table) for table in random_mysql_tables] + ["Done!"] ) ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() """ Test if both databases have the same table names """ assert sqlite_tables == random_mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = {"name", "column_names", "unique"} mysql_indices = [] for table_name in random_mysql_tables: for index in mysql_inspect.get_indexes(table_name): mysql_index = {} for key in index_keys: if key == "name" and prefix_indices: mysql_index[key] = "{table}_{name}".format( table=table_name, name=index[key] ) else: mysql_index[key] = index[key] mysql_indices.append(mysql_index) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in random_mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results
def test_transfer_transfers_all_tables_from_mysql_to_sqlite( self, sqlite_database, mysql_database, mysql_credentials, helpers, capsys, caplog, chunk, vacuum, buffered, ): proc = MySQLtoSQLite( sqlite_file=sqlite_database, mysql_user=mysql_credentials.user, mysql_password=mysql_credentials.password, mysql_database=mysql_credentials.database, mysql_host=mysql_credentials.host, mysql_port=mysql_credentials.port, chunk=chunk, vacuum=vacuum, buffered=buffered, ) caplog.set_level(logging.DEBUG) proc.transfer() assert all( message in [record.message for record in caplog.records] for message in { "Transferring table article_authors", "Transferring table article_images", "Transferring table article_tags", "Transferring table articles", "Transferring table authors", "Transferring table images", "Transferring table tags", "Transferring table misc", "Done!", } ) assert all(record.levelname == "INFO" for record in caplog.records) assert not any(record.levelname == "ERROR" for record in caplog.records) out, err = capsys.readouterr() assert "Done!" in out.splitlines()[-1] sqlite_engine = create_engine( "sqlite:///{database}".format( database=sqlite_database, json_serializer=json.dumps, json_deserializer=json.loads, ) ) sqlite_cnx = sqlite_engine.connect() sqlite_inspect = inspect(sqlite_engine) sqlite_tables = sqlite_inspect.get_table_names() mysql_engine = create_engine( "mysql+mysqldb://{user}:{password}@{host}:{port}/{database}".format( user=mysql_credentials.user, password=mysql_credentials.password, host=mysql_credentials.host, port=mysql_credentials.port, database=mysql_credentials.database, ) ) mysql_cnx = mysql_engine.connect() mysql_inspect = inspect(mysql_engine) mysql_tables = mysql_inspect.get_table_names() """ Test if both databases have the same table names """ assert sqlite_tables == mysql_tables """ Test if all the tables have the same column names """ for table_name in sqlite_tables: assert [ column["name"] for column in sqlite_inspect.get_columns(table_name) ] == [column["name"] for column in mysql_inspect.get_columns(table_name)] """ Test if all the tables have the same indices """ index_keys = ("name", "column_names", "unique") mysql_indices = tuple( {key: index[key] for key in index_keys} for index in ( chain.from_iterable( mysql_inspect.get_indexes(table_name) for table_name in mysql_tables ) ) ) for table_name in sqlite_tables: for sqlite_index in sqlite_inspect.get_indexes(table_name): sqlite_index["unique"] = bool(sqlite_index["unique"]) assert sqlite_index in mysql_indices """ Test if all the tables have the same foreign keys """ for table_name in mysql_tables: mysql_fk_stmt = text( """ SELECT k.COLUMN_NAME AS `from`, k.REFERENCED_TABLE_NAME AS `table`, k.REFERENCED_COLUMN_NAME AS `to`, c.UPDATE_RULE AS `on_update`, c.DELETE_RULE AS `on_delete` FROM information_schema.TABLE_CONSTRAINTS AS i LEFT JOIN information_schema.KEY_COLUMN_USAGE AS k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME LEFT JOIN information_schema.REFERENTIAL_CONSTRAINTS c ON c.CONSTRAINT_NAME = i.CONSTRAINT_NAME WHERE i.TABLE_SCHEMA = :table_schema AND i.TABLE_NAME = :table_name AND i.CONSTRAINT_TYPE = :constraint_type """ ).bindparams( table_schema=mysql_credentials.database, table_name=table_name, constraint_type="FOREIGN KEY", ) mysql_fk_result = mysql_cnx.execute(mysql_fk_stmt) mysql_foreign_keys = [dict(row) for row in mysql_fk_result] sqlite_fk_stmt = 'PRAGMA foreign_key_list("{table}")'.format( table=table_name ) sqlite_fk_result = sqlite_cnx.execute(sqlite_fk_stmt) if sqlite_fk_result.returns_rows: for row in sqlite_fk_result: fk = dict(row) assert { "table": fk["table"], "from": fk["from"], "to": fk["to"], "on_update": fk["on_update"], "on_delete": fk["on_delete"], } in mysql_foreign_keys """ Check if all the data was transferred correctly """ sqlite_results = [] mysql_results = [] meta = MetaData(bind=None) for table_name in sqlite_tables: sqlite_table = Table( table_name, meta, autoload=True, autoload_with=sqlite_engine ) sqlite_stmt = select([sqlite_table]) sqlite_result = sqlite_cnx.execute(sqlite_stmt).fetchall() sqlite_result.sort() sqlite_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in sqlite_result ] sqlite_results.append(sqlite_result) for table_name in mysql_tables: mysql_table = Table( table_name, meta, autoload=True, autoload_with=mysql_engine ) mysql_stmt = select([mysql_table]) mysql_result = mysql_cnx.execute(mysql_stmt).fetchall() mysql_result.sort() mysql_result = [ [float(data) if isinstance(data, Decimal) else data for data in row] for row in mysql_result ] mysql_results.append(mysql_result) assert sqlite_results == mysql_results