import json from config.log import LoggingConfiguration from db.cc_mysql_connector import CCMySqlConnector from config.settings import settings ABC_GS_FLAG_TABLE = settings.get('ABC_GS_FLAG_TABLE') class GsFlagQueryExecutor(object): @staticmethod def get_flag(fixture_id=None): query = None where_clause = None flag = None cc_mysql_connector = CCMySqlConnector() logger = LoggingConfiguration.get_default_logger() # processing if fixture_id: where_clause = "fixture_id={}".format(fixture_id) if where_clause: query = "select gs_flag from {} where {} order by timestamp desc limit 1".format( ABC_GS_FLAG_TABLE, where_clause) res = cc_mysql_connector.execute(query) flag = res.first() if flag: flag = flag[0] logger.info("mysql query: {} result: {}".format(query, flag)) return flag
def __init__(self): self.host = settings.get('MONGO_HOST') self.port = settings.get('MONGO_PORT') self.user = settings.get('MONGO_USER') self.password = settings.get('MONGO_PASS')
async def db(self, database='workflow'): if not database: database = settings.get('MONGO_DB') client = await self.client() db = client[database] return db
import pika from config.settings import settings AMQP_HOST = settings.get("AMQP_HOST") AMQP_VHOST = settings.get("AMQP_VHOST") RABBITMQ_PORT = settings.get("RABBITMQ_PORT") AMQP_USER = settings.get("AMQP_USER") AMQP_PASS = settings.get("AMQP_PASS") def get_rmq_connection(): credentials = pika.PlainCredentials(AMQP_USER, AMQP_PASS) connection = pika.BlockingConnection( pika.ConnectionParameters( host=AMQP_HOST, port=RABBITMQ_PORT, virtual_host=AMQP_VHOST, credentials=credentials ) ) return connection
def __init__(self): self.elastic_search_url = settings.get('ELASTIC_SEARCH_URL') self.elastic_search_user = settings.get('ELASTIC_SEARCH_USER') self.elastic_search_pass = settings.get('ELASTIC_SEARCH_PASS')
from __future__ import absolute_import, unicode_literals from celery import Celery from config.settings import settings broker = settings.get('MQ_BROKER') backend = 'redis://*****:*****@%s:%s/%s' % ( settings.get('REDIS_PASS'), settings.get('REDIS_HOST'), settings.get('REDIS_PORT'), settings.get('REDIS_DB')) # broker='amqp://*****:*****@localhost//' # backend = 'redis://*****:*****@localhost:6379/9' app = Celery('Workers', broker=broker, backend=backend, include=['workers.tasks']) app.conf.update( result_expires=3600, # task_serializer='json', # accept_content=['json'], # result_serializer='json', enable_utc=True, ) if __name__ == '__main__': app.start()
def __init__(self, db=None): self.host = settings.get('REDIS_HOST') # self.password = settings.get('REDIS_PASS') self.db = db or settings.get('REDIS_DB') or 9
from config.settings import settings from db.mysql_connector import MySqlConnector MYSQL_HOST = settings.get("MYSQL_HOST") CC_MYSQL_PORT = settings.get("CC_MYSQL_PORT") MYSQL_USER = settings.get("MYSQL_USER") MYSQL_PASS = settings.get("MYSQL_PASS") CC_MYSQL_DATABASE = settings.get("CC_MYSQL_DATABASE") CC_MYSQL_DIALECT = settings.get("CC_MYSQL_DIALECT") class CCMySqlConnector(object): engine = None def __init__(self): self.engine = MySqlConnector.get_engine(MYSQL_HOST, CC_MYSQL_PORT, CC_MYSQL_DATABASE, MYSQL_USER, MYSQL_PASS, CC_MYSQL_DIALECT) def execute(self, query): return MySqlConnector.execute_request(self.engine, query)
def __init__(self, url): self.host = settings.get('WX_WORK_API_BASE_URL') self.url = f"{self.host}{url}"
import requests from config.log import LoggingConfiguration from config.settings import settings INSP_API_HOST = settings.get("INSP_API_HOST") INSP_API_AUTH_TOKEN = settings.get("INSP_API_AUTH_TOKEN") INSP_API_AUTH_USER = settings.get("INSP_API_AUTH_USER") class InspectionRecordFetcher(object): @staticmethod def fetch(appointment_id): return InspectionRecordFetcher._fetch(appointment_id) @staticmethod def _fetch(appointment_id): logger = LoggingConfiguration.get_default_logger() inspection_api_uri = "v2/inspection?action=Search&role=storemanager&client=GS-C2C&appointmentId={" \ "appointment_id}&user={user} " url = INSP_API_HOST + inspection_api_uri.format( appointment_id=appointment_id, user=INSP_API_AUTH_USER) headers = {"Authorization": "Bearer {}".format(INSP_API_AUTH_TOKEN)} response = requests.get(url, headers=headers) logger.debug("inspection api response: {}".format(response.text)) if response.status_code == 200: return response.json().get('detail', []) else: logger.error("INSPECTION API FAIL: {}".format(response.text)) return list()
import json import traceback from config.log import LoggingConfiguration from config.settings import settings from rabbitmq.rmq_consumer import RmqConsumer from services.product_transformation_result import set_gs_flag GS_FLAG_RMQ_QUEUE = settings.get("GS_FLAG_RMQ_QUEUE") GS_FLAG_RMQ_ROUTING_KEY = settings.get("GS_FLAG_RMQ_ROUTING_KEY") GS_FLAG_RMQ_EXCHANGE = settings.get("GS_FLAG_RMQ_EXCHANGE") class GsFlagRmqConsumer(RmqConsumer): @staticmethod def callback(ch, method, properties, body): LoggingConfiguration.update_request_id() logger = LoggingConfiguration.get_default_logger() logger.info("rabbit mq consumer message received: {}".format(body)) dict_body = json.loads(body) try: res = set_gs_flag(dict_body.get('fixtureId'), dict_body.get('price')) except Exception as e: logger.error("MESSAGE PROCESSING FAILED: {}".format(str(e))) traceback.print_exc() res = False return res def get_queue_name(self): return GS_FLAG_RMQ_QUEUE
def __init__(self): self.db_user = settings.get('DB_USER') self.db_pass = settings.get('DB_PASS') self.db_name = settings.get('DB_NAME') self.db_host = settings.get('DB_HOST') self.db_port = settings.get('DB_PORT')