def __init__(self): HOST = get_config('REDIS_HOST') PORT = get_config('REDIS_PORT') PASSWORD = get_config('REDIS_PASSWORD') self.pool = redis.ConnectionPool(host=HOST, port=PORT, password=PASSWORD, decode_responses=True)
def start_discord(): cfg = config.get_config() TOKEN = cfg["discord_token"] bot = Bot(command_prefix="+") bot.load_extension("app.discordbot.cogs.twitter") bot.run(TOKEN)
async def on_ready(self): logger.info("starting redis connection") cfg = config.get_config() self.redis_addr = cfg["redis_uri"] self.redis = await aioredis.create_redis_pool(self.redis_addr, encoding='utf-8') if self.redis is None: logger.error("redis not started") self.poll_redis.start()
def test_get_config_in_env_var_x_dynaconf_priority(self): key = 'CONFIG_KEY' conf_value_dynaconf = 'VALUE_DYNACONF' config.settings = LazySettings(KEY_DYNACONF=conf_value_dynaconf) conf_value_env_var = 'VALUE_ENV_VAR' os.environ[key] = conf_value_env_var conf = config.get_config(key) self.assertEqual(conf, conf_value_env_var)
def format(self, record): log = { 'timestamp': time.time(), '_app_version': get_config('APPLICATION_VERSION'), '_product': 'namespace', '_application': get_config('APPLICATION_NAME'), '_environment': get_config('ENV', 'dev'), '_log_type': 'application', 'host': gethostname(), 'level': self.__get_log_level(record.levelno), 'Severity': record.levelname, 'message': record.msg, } extra = record.args if isinstance(extra, dict): fields = {} # convert all logs custom fields to Gelf pattern for k, v in extra.items(): fields[f'_{k}'] = str(v) log.update(fields) return dumps(log)
async def on_ready(self): if not hasattr(self, 'uptime'): self.uptime = datetime.datetime.utcnow() logger.info(self.guilds) for guild in self.guilds: for channel in guild.text_channels: logger.info(channel) logger.info("starting redis connection") cfg = config.get_config() self.redis_addr = cfg["redis_uri"] logger.debug(self.redis_addr) #await self._start_redis() logger.info(f'Ready: {self.user} (ID: {self.user.id})')
def start_twitter(): loop = asyncio.get_event_loop() try: cfg = config.get_config() consumer_key = cfg['twitter_api_key'] consumer_secret = cfg['twitter_api_secret'] access_token = cfg['twitter_access_token'] access_token_secret = cfg['twitter_access_secret'] redis_addr = cfg['redis_uri'] twhandler = TwitterHandler(consumer_key, consumer_secret, access_token, access_token_secret, redis_addr) asyncio.ensure_future(twhandler.run()) loop.run_forever() except KeyboardInterrupt: pass finally: twhandler.close_stream() asyncio.run(twhandler.close_redis()) return
def publish(redis, chan, msg): stream_msg = {'msg': msg} logger.info(stream_msg) return redis.xadd(chan, stream_msg) async def test(server_addr, channel_name): redis = await aioredis.create_redis_pool(server_addr, encoding='utf-8') await asyncio.sleep(10) stream_msg = { 'type': 'follow', 'username': '******' } logger.info(stream_msg) await redis.xadd(channel_name, stream_msg) await asyncio.sleep(60) stream_msg = { 'type': 'unfollow', 'username': '******' } logger.info(stream_msg) await redis.xadd(channel_name, stream_msg) await redis.wait_closed() if __name__ == "__main__": logger.info("starting publisher") cfg = config.get_config() addr = addr = cfg['redis_uri'] chan = "follow" asyncio.run(test(addr, chan))
def test_get_config_in_env_var(self): conf_declared = 'VALUE_ENV_VAR' key = 'KEY_ENV_VAR' os.environ[key] = conf_declared conf = config.get_config(key) self.assertEqual(conf, conf_declared)
def test_get_config_in_dynaconf(self): conf = config.get_config('KEY_DYNACONF') self.assertEqual(conf, 'VALUE')
def test_get_config_not_found(self): conf = config.get_config('NOT_FOUND_KEY') self.assertIsNone(conf)
def test_get_config_not_found_default_value(self): conf = config.get_config('NOT_FOUND_KEY', 'DEFAULT') self.assertEqual(conf, 'DEFAULT')
from contextlib import contextmanager from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from app.utils.config import get_config engine = create_engine(get_config('SQLALCHEMY_DATABASE_URI'), echo=True) Session = sessionmaker(bind=engine) @contextmanager def create_context(context=None): ''' Create a new context to use database If 'context' arg are given, a sub transaction are create. useful for nested operations with transactions. ''' try: if not context: session = Session() else: session = context['session'] session.begin(subtransactions=True) yield {'session': session} session.commit() except Exception as e: session.rollback() raise e finally: session.close()
from app.utils.config import get_config bind = "0.0.0.0:5000" workers = get_config('GUNICORN_WORKERS') threads = get_config('GUNICORN_THREADS')
from sentry_sdk import init as sentry_init from app import app from app.utils.logger import get_logger from app.utils.config import get_config SENTRY_DSN = get_config('SENTRY_DSN') log = get_logger(__name__) if __name__ == '__main__': log.info('Starting InventoryService') if SENTRY_DSN: sentry_init(SENTRY_DSN) app.run(host='0.0.0.0', port=5000, debug=get_config('DEBUG', default=False))
from sentry_sdk import init as sentry_init from app import app from app.utils.logger import get_logger from app.utils.config import get_config SENTRY_DSN = get_config('SENTRY_DSN') log = get_logger(__name__) if __name__ == '__main__': log.info('Starting Worker') if SENTRY_DSN: sentry_init(SENTRY_DSN) app.run(host='0.0.0.0', port=5000, debug=get_config('DEBUG', default=False))
from celery import Celery # from kombu import Connection, Exchange, Queue from sentry_sdk import init as sentry_init import time import logging from app.utils.config import get_config from actions.order import create_order, update_redis_quantity, send_telegram from actions.email import send_mail from actions.common import test celery = Celery('tasks', broker=get_config('CELERY_BROKER_URL')) SENTRY_DSN = get_config('SENTRY_DSN') if SENTRY_DSN: sentry_init(SENTRY_DSN) # celery.conf.update( # CELERY_DEFAULT_QUEUE = 'celery_queue', # CELERY_DEFAULT_EXCHANGE = 'celery', # CELERY_DEFAULT_EXCHANGE_TYPE = 'direct', # CELERY_RESULT_BACKEND = 'rpc://', # CELERY_RESULT_PERSISTENT = True, # CELERY_QUEUES = ( # Queue('celery', routing_key="celery"), # Queue('case_creation', routing_key='create.#') # ), # ) # celery.conf.task_routes = { # 'case.tasks.create_case': { # 'queue': 'case_creation', # 'routing_key': 'create.1'
import requests import json from uuid import uuid1 from unittest import TestCase from app.utils.config import get_config DOMAIN = get_config('TEST_DOMAIN', 'http://localhost:5000') URL = f'{DOMAIN}/api/v1/product' HEADERS = {'content-type': 'application/json'} PRODUCT = { 'name': 'Product Integration Tests 1', 'code': str(uuid1())[:8], 'price': 99.99, } class TestProductIntegration(TestCase): def __create_product(self, product=None): prod = product if product is None: prod = PRODUCT.copy() prod.update({'code': str(uuid1())[:8]}) return requests.post(URL, data=json.dumps(prod), headers=HEADERS) def test_find_products(self): resp = requests.get(URL) self.assertEqual(resp.status_code, 200) def test_find_product_by_id(self): resp = self.__create_product() resp_body = resp.json()
import logging import time from json import dumps from socket import gethostname from sys import stdout from app.utils.config import get_config LOG_LEVEL = logging.getLevelName(get_config('LOG_LEVEL', 'INFO')) class GraylogFormatter(logging.Formatter): def __init__(self): logging.Formatter.__init__(self) def format(self, record): log = { 'timestamp': time.time(), '_app_version': get_config('APPLICATION_VERSION'), '_product': 'namespace', '_application': get_config('APPLICATION_NAME'), '_environment': get_config('ENV', 'dev'), '_log_type': 'application', 'host': gethostname(), 'level': self.__get_log_level(record.levelno), 'Severity': record.levelname, 'message': record.msg, } extra = record.args