def LocalConsumer(redis_client):
    with patch.object(artifacts.FirebaseInstance, 'get_session', new=get_local_session):
        with patch.object(artifacts.FirebaseInstance, 'get_cloud_firestore', new=get_local_cfs):
            _consumer = consumer.FirebaseConsumer(
                config.get_consumer_config(), None, redis_instance=redis_client)
            yield _consumer
            _consumer.stop()
def check_ckan_readyness(request, *args):
    # @mark annotation does not work with autouse=True
    if 'integration' not in request.config.invocation_params.args:
        LOG.debug(f'NOT Checking for CKAN')
        return
    LOG.debug('Waiting for CKAN')
    CC = config.get_consumer_config()
    url = CC.get('url')
    for _ in range(120):
        try:
            res = requests.get(f'http://{url}')
            res.raise_for_status()
            return
        except Exception:
            sleep(.5)
    raise TimeoutError('Could not connect to ckan for integration test')
Exemple #3
0
def check_local_kibana_readyness(request, *args):
    # @mark annotation does not work with autouse=True.
    if 'integration' not in request.config.invocation_params.args:
        LOG.debug(f'NOT Checking for Kibana')
        return
    LOG.debug('Waiting for LocalKibana')
    CC = config.get_consumer_config()
    url = CC.get('kibana_url')
    user = CC.get('elasticsearch_user')
    password = CC.get('elasticsearch_password')
    for x in range(120):
        try:
            res = requests.get(f'{url}', auth=(user, password))
            res.raise_for_status()
            return
        except Exception:
            sleep(.5)
    raise TimeoutError('Could not connect to kibana for integration test')
Exemple #4
0
def ElasticsearchConsumer(RedisInstance):
    settings = config.get_consumer_config()
    c = consumer.ElasticsearchConsumer(settings, None, RedisInstance)
    yield c
    c.stop()
from aet.job import BaseJob, JobStatus
from aet.kafka import KafkaConsumer, FilterConfig, MaskConfig
from aet.logger import callback_logger, get_logger
from aet.resource import BaseResource, lock
from werkzeug.local import LocalProxy

# Aether python lib
# from aether.python.avro.schema import Node

from app.config import get_consumer_config, get_kafka_config
from app.fixtures import schemas

from app import helpers

LOG = get_logger('artifacts')
CONSUMER_CONFIG = get_consumer_config()
KAFKA_CONFIG = get_kafka_config()


class FirebaseInstance(BaseResource):
    schema = schemas.FB_INSTANCE
    jobs_path = '$.firebase'
    name = 'firebase'
    public_actions = BaseResource.public_actions + [
        'test_connection'
    ]

    app: firebase_admin.App = None
    cfs: firestore.Client = None
    rtdb: helpers.RTDB = None
def CKANConsumer(birdisle_server, Birdisle):
    settings = config.get_consumer_config()
    c = consumer.CKANConsumer(settings, None, Birdisle)
    yield c
    c.stop()
def Birdisle(birdisle_server):
    birdisle.redis.LocalSocketConnection.health_check_interval = 0
    password = config.get_consumer_config().get('REDIS_PASSWORD')
    r = birdisle.redis.StrictRedis(server=birdisle_server, password=password)
    r.config_set('notify-keyspace-events', 'KEA')
    return r
def birdisle_server():
    password = config.get_consumer_config().get('REDIS_PASSWORD')
    server = birdisle.Server(f'requirepass {password}')
    yield server
    server.close()
Exemple #9
0
def StreamConsumer(RedisInstance):
    settings = config.get_consumer_config()
    c = consumer.StreamConsumer(settings, redis_instance=RedisInstance)
    yield c
    c.stop()