def init_log_from_config(cfg_file, formatter='flume', delay=False): parser = ConfUtils.load_parser(cfg_file) for sec, op in required_ops: if not parser.has_option(sec, op): raise Exception("Log load config file failed") log_dir = os.path.abspath(parser.get(BASE, LOG_DIR)) if not os.path.exists(log_dir): os.makedirs(log_dir) log_file = parser.get(BASE, LOG_FILE) # add default hostname suffix for log_file log_file += '.' + MACHINE_NAME log_level = parser.get(BASE, LOG_LEVEL) delay = delay or get_env_bool('DELAY_LOG', default=False) rotate = get_env_bool("ROTATE_LOG", default=True) if not rotate: settings = get_no_rotate_log_settings(delay) else: settings = get_basic_log_settings(delay) settings['handlers']['info_handler']['filename'] = log_dir + '/' + log_file + '.info' settings['handlers']['debug_handler']['filename'] = log_dir + '/' + log_file + '.debug' settings['handlers']['warn_handler']['filename'] = log_dir + '/' + log_file + '.warn' settings['handlers']['error_handler']['filename'] = log_dir + '/' + log_file + '.error' if formatter == 'standard': settings['handlers']['info_handler']['formatter'] = formatter settings['handlers']['debug_handler']['formatter'] = formatter settings['handlers']['warn_handler']['formatter'] = formatter settings['handlers']['error_handler']['formatter'] = formatter settings['loggers'][log_file] = copy.deepcopy(settings['loggers']['demo']) logging.config.dictConfig(settings) extra_dict = {"host": MACHINE_NAME} logger = logging.getLogger(log_file) logger.propagate = False if log_level == 'DEBUG': logger.setLevel(logging.DEBUG) elif log_level == 'INFO': logger.setLevel(logging.INFO) elif log_level == 'WARN': logger.setLevel(logging.WARN) elif log_level == 'ERROR': logger.setLevel(logging.ERROR) else: raise ("unknown log level:[%s]" % log_level) return LoggerAdapter(logger, extra_dict)
def connect_from_conf(conf_file_path): """ :return: :rtype: _RabbitMQWorkQueueWrapper """ _rabbitmq_conf = ConfUtils.load_parser(conf_file_path) _params = { 'host': "localhost", "port": 5672, "user": "", "password": "", "queue": "", "is_delay": "False", "durable": "True", # 默认队列是永久保存的 "delivery_mode": 2, # 默认消息是要持久化的 "virtual_host": "/" } for k, v in _rabbitmq_conf.items('default'): _params[k] = v _params['port'] = int(_params['port']) _params['delivery_mode'] = int(_params['delivery_mode']) _params['durable'] = eval(_params['durable']) _params['is_delay'] = eval(_params['is_delay']) if _params.get('user'): user_pwd = pika.PlainCredentials(_params.get('user'), _params.get('password')) connection_parameters = pika.ConnectionParameters( host=_params.get('host'), virtual_host=_params['virtual_host'], port=_params['port'], credentials=user_pwd, heartbeat=0) else: connection_parameters = pika.ConnectionParameters( host=_params.get('host'), virtual_host=_params['virtual_host'], port=_params['port'], heartbeat=0) return _RabbitMQWorkQueueWrapper(connection_parameters, **_params)
# Author:yhl # Version: # Last modified: 2019-12-12 18:59 import copy import json import multiprocessing import random import threading import traceback import pika from pika.exceptions import AMQPError from utils4py import ConfUtils _rabbitmq_conf = ConfUtils.load_parser("data_source/rabbitmq.conf") _client_pool = dict() # key:配置名称,value:自己封装的rabbitmq client _reuse_mutex = threading.RLock() def connect_pool(section, settings_reuse_pool=True): """ :param section: :rtype: SqlShell """ if settings_reuse_pool: with _reuse_mutex: if section not in _client_pool: client_obj = _ConnectParams().init_with_section( section).connect()
#!/usr/bin/env python # -*- coding: utf-8 -*- import json import threading from pymysql.cursors import DictCursor from utils4py import ConfUtils from utils4py.data.pymysql_pool import Pool, SqlShell _mysql_conf = ConfUtils.load_parser("data_source/mysql.conf") _conn_pool = dict() # 配置池 _conn_mutex = threading.RLock() def connect_pool(section): """ :param section: :rtype: SqlShell """ with _conn_mutex: if section not in _conn_pool: params = _ConnectParams().init_with_section( section).get_connect_params() _conn_pool[section] = Pool(**params) return SqlShell(_conn_pool[section])
# -*- coding: utf-8 -*- import copy import json import threading import time import traceback import redis import redis.client from utils4py import ConfUtils, TextUtils from utils4py.flask_ext.errors import SimpleError from utils4py.uni_identifier import get_uuid _redis_conf = ConfUtils.load_parser("data_source/redis.conf") _conn_pool = dict() _reuse_mutex = threading.RLock() def connect(section, settings_reuse_pool=True): if settings_reuse_pool: with _reuse_mutex: conn = _conn_pool.get(section, None) if not conn: conn = _ConnectParams().init_with_section(section).connect() if conn: _conn_pool[section] = conn return conn else:
#!/usr/bin/env python # -*- coding: utf-8 -*- import threading from pymongo import MongoClient from pymongo.database import Database from six.moves.urllib.parse import quote_plus from utils4py import ConfUtils settings_reuse_pool = True _mongo_conf = ConfUtils.load_parser("data_source/mongo.conf") _conn_pool = dict() _reuse_mutex = threading.RLock() def connect(section): """ :param section: :rtype: Database """ if settings_reuse_pool: with _reuse_mutex: if section not in _conn_pool: db_obj = _ConnectParams().init_with_section(section).connect() if db_obj: _conn_pool[section] = db_obj return _conn_pool[section]