from aet.logger import get_logger from aet.jsonpath import CachedParser from aet.resource import ResourceDefinition from aether.python.avro import generation from aether.python.avro.schema import Node from app import config from app.fixtures import examples from app.processor import ESItemProcessor from app.artifacts import Subscription, ESJob, LocalESInstance from app import consumer CONSUMER_CONFIG = config.consumer_config KAFKA_CONFIG = config.get_kafka_config() LOG = get_logger('FIXTURE') # Some of the fixtures are non-compliant so we don't QA this file. # flake8: noqa URL = 'http://localhost:9013' # pick a random tenant for each run so we don't need to wipe ES. TS = str(uuid4()).replace('-', '')[:8] TENANT = f'TEN{TS}' TEST_TOPIC = 'es_test_topic' # instances of samples pushed to Kafka GENERATED_SAMPLES = {}
from aet.kafka import KafkaConsumer, FilterConfig, MaskConfig from aet.logger import callback_logger, get_logger from aet.resource import BaseResource, lock from werkzeug.local import LocalProxy # Aether python lib # from aether.python.avro.schema import Node from app.config import get_consumer_config, get_kafka_config from app.fixtures import schemas from app import helpers LOG = get_logger('artifacts') CONSUMER_CONFIG = get_consumer_config() KAFKA_CONFIG = get_kafka_config() class FirebaseInstance(BaseResource): schema = schemas.FB_INSTANCE jobs_path = '$.firebase' name = 'firebase' public_actions = BaseResource.public_actions + [ 'test_connection' ] app: firebase_admin.App = None cfs: firestore.Client = None rtdb: helpers.RTDB = None def __init__(self, tenant, definition, app=None):