Beispiel #1
0
def database_sandbox_session(max_retries=3):
    db_connections = get_connection(
        config.env_config.topology_path,
        config.env_config.rbr_source_cluster,
        config.env_config.schema_tracker_cluster,
        config.env_config.rbr_state_cluster,
        is_avoid_internal_packages_set()
    )
    done_making_mysqld = False
    retries = 0
    while not done_making_mysqld:
        # Takes time for mysqld to launch, so we will attempt a few times to it
        try:
            _per_process_mysql_daemon = PerProcessMySQLDaemon()
            done_making_mysqld = True
        except RuntimeError:
            retries += 1
            if retries > max_retries:
                raise
    _session_prev_engine = db_connections.state_session.bind

    db_connections.state_session.bind = _per_process_mysql_daemon.engine
    db_connections.state_session.enforce_read_only = False
    yield db_connections.state_session
    db_connections.state_session.bind = _session_prev_engine
 def __init__(self):
     super(BaseParseReplicationStream, self).__init__()
     self.db_connections = get_connection(
         config.env_config.topology_path,
         config.env_config.rbr_source_cluster,
         config.env_config.schema_tracker_cluster,
         config.env_config.rbr_state_cluster,
         is_avoid_internal_packages_set(),
         config.env_config.rbr_source_cluster_topology_name,
     )
     self.schema_wrapper = SchemaWrapper(
         db_connections=self.db_connections,
         schematizer_client=get_schematizer()
     )
     self.register_dry_run = config.env_config.register_dry_run
     self.publish_dry_run = config.env_config.publish_dry_run
     self._running = True
     self._profiler_running = False
     self._changelog_mode = config.env_config.changelog_mode
     if get_config().kafka_producer_buffer_size > config.env_config.recovery_queue_size:
         # Printing here, since this executes *before* logging is
         # configured.
         sys.stderr.write("Shutting down because kafka_producer_buffer_size was greater than \
                 recovery_queue_size")
         sys.exit(1)
 def mock_db_connections(self, topology_path, mock_source_cluster_name,
                         mock_tracker_cluster_name, mock_state_cluster_name,
                         yelp_conn_conf):
     yield get_connection(topology_path, mock_source_cluster_name,
                          mock_tracker_cluster_name,
                          mock_state_cluster_name,
                          is_avoid_internal_packages_set())
Beispiel #4
0
def get_mock_stats_counters():
    """In open source mode StatsCount is not supported because of absence of
    yelp_meteorite and hence StatsCount is passed in as None into BaseEventHandler`.
    In internal mode StatsCount be set to None if
    config.env_config.disable_meteorite is True, in all other cases
    StatsCount will be set to an object of
    data_pipeline.tools.meteorite_wrappers.StatsCounter

    Goal here is to test event handlers for all the above cases.
    None and object of StatsCounter cover all the cases described above.
    We start by setting counter to None and if data_pipeline.tools.meteorite_wrappers
    is importable we add StatsCounter to possible counters values.
    """
    counters = [None]
    try:
        # TODO(DATAPIPE-1509|abrar): Currently we have
        # force_avoid_internal_packages as a means of simulating an absence
        # of a yelp's internal package. And all references
        # of force_avoid_internal_packages have to be removed from
        # RH after we are completely ready for open source.
        if is_avoid_internal_packages_set():
            raise ImportError
        from data_pipeline.tools.meteorite_wrappers import StatsCounter
        counters.append(mock.Mock(autospec=StatsCounter))
    except ImportError:
        pass
    return counters
def get_mock_stats_counters():
    """In open source mode StatsCount is not supported because of absence of
    yelp_meteorite and hence StatsCount is passed in as None into BaseEventHandler`.
    In internal mode StatsCount be set to None if
    config.env_config.disable_meteorite is True, in all other cases
    StatsCount will be set to an object of
    data_pipeline.tools.meteorite_wrappers.StatsCounter

    Goal here is to test event handlers for all the above cases.
    None and object of StatsCounter cover all the cases described above.
    We start by setting counter to None and if data_pipeline.tools.meteorite_wrappers
    is importable we add StatsCounter to possible counters values.
    """
    counters = [None]
    try:
        # TODO(DATAPIPE-1509|abrar): Currently we have
        # force_avoid_internal_packages as a means of simulating an absence
        # of a yelp's internal package. And all references
        # of force_avoid_internal_packages have to be removed from
        # RH after we are completely ready for open source.
        if is_avoid_internal_packages_set():
            raise ImportError
        from data_pipeline.tools.meteorite_wrappers import StatsCounter
        counters.append(mock.Mock(autospec=StatsCounter))
    except ImportError:
        pass
    return counters
Beispiel #6
0
 def is_meteorite_supported(cls):
     try:
         # TODO(DATAPIPE-1509|abrar): Currently we have
         # force_avoid_internal_packages as a means of simulating an absence
         # of a yelp's internal package. And all references
         # of force_avoid_internal_packages have to be removed from
         # RH after we are completely ready for open source.
         if is_avoid_internal_packages_set():
             raise ImportError
         from data_pipeline.tools.meteorite_wrappers import StatTimer  # NOQA
         return True
     except ImportError:
         return False
 def is_pii_supported(cls):
     try:
         # TODO(DATAPIPE-1509|abrar): Currently we have
         # force_avoid_internal_packages as a means of simulating an absence
         # of a yelp's internal package. And all references
         # of force_avoid_internal_packages have to be removed from
         # RH after we are completely ready for open source.
         if is_avoid_internal_packages_set():
             raise ImportError
         from pii_generator.components.pii_identifier import PIIIdentifier  # NOQA
         return True
     except ImportError:
         return False
 def is_meteorite_supported(cls):
     try:
         # TODO(DATAPIPE-1509|abrar): Currently we have
         # force_avoid_internal_packages as a means of simulating an absence
         # of a yelp's internal package. And all references
         # of force_avoid_internal_packages have to be removed from
         # RH after we are completely ready for open source.
         if is_avoid_internal_packages_set():
             raise ImportError
         from data_pipeline.tools.meteorite_wrappers import StatTimer  # NOQA
         return True
     except ImportError:
         return False
Beispiel #9
0
def database_sandbox_session():
    db_connections = get_connection(config.env_config.topology_path,
                                    config.env_config.rbr_source_cluster,
                                    config.env_config.schema_tracker_cluster,
                                    config.env_config.rbr_state_cluster,
                                    is_avoid_internal_packages_set())
    _per_process_mysql_daemon = launch_mysql_daemon()
    _session_prev_engine = db_connections.state_session.bind

    db_connections.state_session.bind = _per_process_mysql_daemon.engine
    db_connections.state_session.enforce_read_only = False
    yield db_connections.state_session
    db_connections.state_session.bind = _session_prev_engine
 def is_pii_supported(cls):
     try:
         # TODO(DATAPIPE-1509|abrar): Currently we have
         # force_avoid_internal_packages as a means of simulating an absence
         # of a yelp's internal package. And all references
         # of force_avoid_internal_packages have to be removed from
         # RH after we are completely ready for open source.
         if is_avoid_internal_packages_set():
             raise ImportError
         from pii_generator.components.pii_identifier import PIIIdentifier  # NOQA
         return True
     except ImportError:
         return False
Beispiel #11
0
def get_base_model():
    try:
        if is_avoid_internal_packages_set():
            # TODO(DATAPIPE-1509|abrar): Currently we have
            # force_avoid_internal_packages as a means of simulating an absence
            # of a yelp's internal package. And all references
            # of force_avoid_internal_packages have to be removed from
            # RH after we have completely ready for open source.
            raise ImportError
        from yelp_conn.session import declarative_base
        return declarative_base()
    except ImportError:
        from sqlalchemy.ext.declarative import declarative_base
        return declarative_base()
Beispiel #12
0
def get_base_model():
    try:
        if is_avoid_internal_packages_set():
            # TODO(DATAPIPE-1509|abrar): Currently we have
            # force_avoid_internal_packages as a means of simulating an absence
            # of a yelp's internal package. And all references
            # of force_avoid_internal_packages have to be removed from
            # RH after we have completely ready for open source.
            raise ImportError
        from yelp_conn.session import declarative_base
        return declarative_base()
    except ImportError:
        from sqlalchemy.ext.declarative import declarative_base
        return declarative_base()
Beispiel #13
0
def database_sandbox_session(max_retries=3):
    db_connections = get_connection(config.env_config.topology_path,
                                    config.env_config.rbr_source_cluster,
                                    config.env_config.schema_tracker_cluster,
                                    config.env_config.rbr_state_cluster,
                                    is_avoid_internal_packages_set())
    done_making_mysqld = False
    retries = 0
    while not done_making_mysqld:
        # Takes time for mysqld to launch, so we will attempt a few times to it
        try:
            _per_process_mysql_daemon = PerProcessMySQLDaemon()
            done_making_mysqld = True
        except RuntimeError:
            retries += 1
            if retries > max_retries:
                raise
    _session_prev_engine = db_connections.state_session.bind

    db_connections.state_session.bind = _per_process_mysql_daemon.engine
    db_connections.state_session.enforce_read_only = False
    yield db_connections.state_session
    db_connections.state_session.bind = _session_prev_engine