def __init__(self, alias): super().__init__(alias) source_db = Settings.get_source_db(alias) params = dict( host=source_db.get("host"), port=source_db.get("port"), user=source_db.get("user"), password=source_db.get("password"), ) self.insert_interval = Settings.insert_interval() self.skip_dmls = source_db.get("skip_dmls") or [] self.skip_update_tables = source_db.get("skip_update_tables") or [] self.skip_delete_tables = source_db.get("skip_delete_tables") or [] self.conn = psycopg2.connect(**params, cursor_factory=DictCursor) self.conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.cursor = self.conn.cursor() for database in source_db.get("databases"): database_name = database.get("database") replication_conn = psycopg2.connect( **params, database=database_name, connection_factory=LogicalReplicationConnection) self._repl_conn[database_name] = { "cursor": replication_conn.cursor(), }
def __init__( self, alias: str, ): super().__init__() self.server_id = Settings.get_source_db(alias).get("server_id") self.pos_key = f"{self.prefix}:binlog:{alias}:{self.server_id}"
def __init__(self, alias: str): self.alias = alias source_db = Settings.get_source_db(alias) self.source_db = source_db self.host = source_db.get("host") self.port = source_db.get("port") self.user = source_db.get("user") self.password = source_db.get("password") signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler)
def __init__(self, alias): super().__init__(alias) self.servers = Settings.get("kafka").get("servers") self.topic = f'{Settings.get("kafka").get("topic_prefix")}.{alias}' self.databases = Settings.get_source_db(alias).get("databases") self.producer = KafkaProducer( bootstrap_servers=self.servers, value_serializer=lambda x: json.dumps(x, cls=JsonEncoder).encode(), key_serializer=lambda x: x.encode(), ) self._init_topic()
def get_broker(alias: str) -> Broker: b = _brokers.get(alias) broker_type = Settings.get_source_db(alias).get("broker_type") if not b: if broker_type == BrokerType.redis: b = RedisBroker(alias) elif broker_type == BrokerType.kafka: b = KafkaBroker(alias) else: raise NotImplementedError(f"Unsupported broker_type {broker_type}") _brokers[alias] = b return b
def get_reader(alias: str) -> Reader: """ get reader once """ r = _readers.get(alias) if not r: source_db = Settings.get_source_db(alias) if not source_db: raise Exception(f"Can't find alias {alias} in config.") db_type = source_db.get("db_type") if db_type == SourceDatabase.mysql.value: from synch.reader.mysql import Mysql r = Mysql(alias) elif db_type == SourceDatabase.postgres.value: from synch.reader.postgres import Postgres r = Postgres(alias) else: raise NotImplementedError(f"Unsupported db_type {db_type}") _readers[alias] = r return r
def get_postgres_database(): return Settings.get_source_db(alias_postgres).get("databases")[0].get( "database")
def get_mysql_database(): return Settings.get_source_db(alias_mysql).get("databases")[0].get( "database")