Beispiel #1
0
    def init_app(self, app: Flask):
        flask_record_log_file_name_from_config = app.config.get(
            'FLASK_RECORD_LOG_FILE_NAME', None)
        flask_record_log_file_name_default = Path(
            sys.path[1]).as_posix().split('/')[-1] + '_flask_record.log'
        if flask_record_log_file_name_from_config:
            app.flask_record_logger = LogManager(
                flask_record_log_file_name_from_config.split('.')
                [0]).get_logger_and_add_handlers(
                    log_filename=flask_record_log_file_name_from_config)
            self.logger.info(
                f'flask的正常请求记录将记录在  /pythonlogs/{flask_record_log_file_name_from_config} 文件中 '
            )
        else:
            app.flask_record_logger = LogManager(
                flask_record_log_file_name_default.split('.')
                [0]).get_logger_and_add_handlers(
                    log_filename=flask_record_log_file_name_default)
            self.logger.info(
                f'flask的正常请求记录将记录在  /pythonlogs/{flask_record_log_file_name_default} 文件中 '
            )
            self.logger.warning(
                f'也可以手动配置flask的正常请求记录日志文件名字,请指定 FLASK_RECORD_LOG_FILE_NAME')

        flask_error_log_file_name_from_config = app.config.get(
            'FLASK_ERROR_LOG_FILE_NAME', None)
        flask_error_log_file_name_default = Path(
            sys.path[1]).as_posix().split('/')[-1] + '_flask_error.log'
        if flask_error_log_file_name_from_config:
            logger_error_name = flask_error_log_file_name_from_config.split(
                '.')[0] + (app.config['DING_TALK_KEYWORD'] if app.config.get(
                    'FLASK_ERROR_DING_TALK_TOKEN', None) else '')
            # logger_dingtalk_debug.debug(logger_error_name)
            app.flask_error_logger = LogManager(
                logger_error_name).get_logger_and_add_handlers(
                    log_filename=flask_error_log_file_name_from_config,
                    ding_talk_token=app.config.get(
                        'FLASK_ERROR_DING_TALK_TOKEN', None))
            self.logger.info(
                f'''flask错误日志将记录在  /pythonlogs/{flask_error_log_file_name_from_config} 文件中'''
            )
        else:
            logger_error_name = flask_error_log_file_name_default.split(
                '.')[0] + (app.config['DING_TALK_KEYWORD'] if app.config.get(
                    'FLASK_ERROR_DING_TALK_TOKEN', None) else '')
            # logger_dingtalk_debug.debug(logger_error_name)
            app.flask_error_logger = LogManager(
                logger_error_name).get_logger_and_add_handlers(
                    log_filename=flask_error_log_file_name_default,
                    ding_talk_token=app.config.get(
                        'FLASK_ERROR_DING_TALK_TOKEN', None))
            self.logger.info(
                f'''flask错误日志将记录在  /pythonlogs/{flask_error_log_file_name_default} 文件中'''
            )
            self.logger.warning(
                f'''也可以手动配置flask的错误记录日志文件名字,请指定 FLASK_ERROR_LOG_FILE_NAME''')

        app.before_first_request_funcs.append(self.__before_first_request)
Beispiel #2
0
def log(text):
    now = datetime.datetime.now().strftime('%Y%m%d-%H%M')
    logger = LogManager('INFO').get_logger_and_add_handlers(
        is_add_stream_handler=True,
        do_not_use_color_handler=True,
        log_filename=LOG_PATH + now + '.log')
    logger.info(text)
Beispiel #3
0
 def custom_init(self):
     self._kombu_broker_url_prefix = frame_config.KOMBU_URL.split(":")[0]
     logger_name = f'{self._logger_prefix}{self.__class__.__name__}--{self._kombu_broker_url_prefix}--{self._queue_name}'
     self.logger = LogManager(logger_name).get_logger_and_add_handlers(self._log_level_int,
                                                                       log_filename=f'{logger_name}.log' if self._is_add_file_handler else None,
                                                                       formatter_template=frame_config.NB_LOG_FORMATER_INDEX_FOR_CONSUMER_AND_PUBLISHER,
                                                                       )  #
 def __init__(self, queue_name, log_level_int=10, logger_prefix='', is_add_file_handler=True,
              clear_queue_within_init=False, is_add_publish_time=True, consuming_function: callable = None):
     """
     :param queue_name:
     :param log_level_int:
     :param logger_prefix:
     :param is_add_file_handler:
     :param clear_queue_within_init:
     :param is_add_publish_time:是否添加发布时间,以后废弃,都添加。
     :param consuming_function:消费函数,为了做发布时候的函数入参校验用的,如果不传则不做发布任务的校验,
            例如add 函数接收x,y入参,你推送{"x":1,"z":3}就是不正确的,函数不接受z参数。
     """
     self._queue_name = queue_name
     if logger_prefix != '':
         logger_prefix += '--'
     logger_name = f'{logger_prefix}{self.__class__.__name__}--{queue_name}'
     self.logger = LogManager(logger_name).get_logger_and_add_handlers(log_level_int,
                                                                       log_filename=f'{logger_name}.log' if is_add_file_handler else None)  #
     self.publish_params_checker = PublishParamsChecker(consuming_function) if consuming_function else None
     # self.rabbit_client = RabbitMqFactory(is_use_rabbitpy=is_use_rabbitpy).get_rabbit_cleint()
     # self.channel = self.rabbit_client.creat_a_channel()
     # self.queue = self.channel.queue_declare(queue=queue_name, durable=True)
     self._lock_for_count = Lock()
     self._current_time = None
     self.count_per_minute = None
     self._init_count()
     self.custom_init()
     self.logger.info(f'{self.__class__} 被实例化了')
     self.publish_msg_num_total = 0
     self._is_add_publish_time = is_add_publish_time
     self.__init_time = time.time()
     atexit.register(self.__at_exit)
     if clear_queue_within_init:
         self.clear()
Beispiel #5
0
class TimerContextManager(object):
    """
    用上下文管理器计时,可对代码片段计时
    """
    log = LogManager('TimerContext').get_logger_and_add_handlers()

    def __init__(self, is_print_log=True):
        self._is_print_log = is_print_log
        self.t_spend = None
        self._line = None
        self._file_name = None
        self.time_start = None

    def __enter__(self):
        self._line = sys._getframe().f_back.f_lineno  # 调用此方法的代码的函数
        self._file_name = sys._getframe(1).f_code.co_filename  # 哪个文件调了用此方法
        self.time_start = time.time()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.t_spend = time.time() - self.time_start
        if self._is_print_log:
            self.log.debug(
                f'对下面代码片段进行计时:  \n执行"{self._file_name}:{self._line}" 用时 {round(self.t_spend, 2)} 秒'
            )
Beispiel #6
0
    def get_logger(cls):
        if cls.logger is None:
            cls.logger = LogManager('simple').get_logger_and_add_handlers(
                log_path='../logger',
                log_filename='test.log',
                formatter_template=5,
                log_file_size=10)

            return cls.logger
 def custom_init(self):
     self._middware_name = frame_config.KOMBU_URL.split(":")[0]
     logger_name = f'{self._logger_prefix}{self.__class__.__name__}--{self._middware_name}--{self._queue_name}'
     self.logger = LogManager(logger_name).get_logger_and_add_handlers(
         self._log_level,
         log_filename=f'{logger_name}.log'
         if self._create_logger_file else None,
         formatter_template=frame_config.
         NB_LOG_FORMATER_INDEX_FOR_CONSUMER_AND_PUBLISHER,
     )  #
     patch_kombu_redis()
class FunctionResultCacher:
    logger = LogManager('FunctionResultChche').get_logger_and_add_handlers()
    func_result_dict = {}
    """
    {
        (f1,(1,2,3,4)):(10,1532066199.739),
        (f2,(5,6,7,8)):(26,1532066211.645),
    }
    """
    @classmethod
    def cached_function_result_for_a_time(cls, cache_time: float):
        """
        函数的结果缓存一段时间装饰器,不要装饰在返回结果是超大字符串或者其他占用大内存的数据结构上的函数上面。
        :param cache_time :缓存的时间
        :type cache_time : float
        """
        def _cached_function_result_for_a_time(fun):
            @wraps(fun)
            def __cached_function_result_for_a_time(*args, **kwargs):
                # print(cls.func_result_dict)
                # if len(cls.func_result_dict) > 1024:
                if sys.getsizeof(cls.func_result_dict) > 100 * 1000 * 1000:
                    cls.func_result_dict.clear()

                key = cls._make_arguments_to_key(args, kwargs)
                if (fun, key) in cls.func_result_dict and time.time(
                ) - cls.func_result_dict[(fun, key)][1] < cache_time:
                    return cls.func_result_dict[(fun, key)][0]
                else:
                    if (fun, key) in cls.func_result_dict and time.time(
                    ) - cls.func_result_dict[(fun, key)][1] < cache_time:
                        return cls.func_result_dict[(fun, key)][0]
                    else:
                        cls.logger.debug('函数 [{}] 此次不能使用缓存'.format(
                            fun.__name__))
                        result = fun(*args, **kwargs)
                        cls.func_result_dict[(fun, key)] = (result,
                                                            time.time())
                        return result

            return __cached_function_result_for_a_time

        return _cached_function_result_for_a_time

    @staticmethod
    def _make_arguments_to_key(args, kwds):
        key = args
        if kwds:
            sorted_items = sorted(kwds.items())
            for item in sorted_items:
                key += item
        return key  # 元祖可以相加。
Beispiel #9
0
 def __init__(
     self,
     logger_name='ExceptionContextManager',
     verbose=100,
     donot_raise__exception=True,
 ):
     """
     :param verbose: 打印错误的深度,对应traceback对象的limit,为正整数
     :param donot_raise__exception:是否不重新抛出错误,为Fasle则抛出,为True则不抛出
     """
     self.logger = LogManager(logger_name).get_logger_and_add_handlers()
     self._verbose = verbose
     self._donot_raise__exception = donot_raise__exception
 def __init__(self, function_of_get_new_https_proxies_list_from_website, func_args=tuple(), func_kwargs: dict = None,
              platform_name='xx平台', redis_key=PROXY_KEY_IN_REDIS_DEFAULT,
              time_sleep_for_get_new_proxies=60,
              ):
     """
     :param function_of_get_new_https_proxies_list_from_website: 獲取代理ip列表的函數,使用策略模式。
     :param redis_key 代理ip存放的redis键名,是个zset。
     :param time_sleep_for_get_new_proxies:每个单独的网页隔多久拉取一次。
     """
     self.function_of_get_new_https_proxies_list_from_website = function_of_get_new_https_proxies_list_from_website
     self._func_args = func_args
     self._func_kwargs = func_kwargs or {}
     self.platform_name = platform_name
     self._redis_key = redis_key
     self._time_sleep_for_get_new_proxies = time_sleep_for_get_new_proxies
     self.logger = LogManager(f'ProxyCollector-{platform_name}').get_logger_and_add_handlers(
         log_filename=f'ProxyCollector-{platform_name}.log', formatter_template=7)
def timer(func):
    """计时器装饰器,只能用来计算函数运行时间"""
    if not hasattr(timer, 'log'):
        timer.log = LogManager(
            f'timer_{func.__name__}').get_logger_and_add_handlers(
                log_filename=f'timer_{func.__name__}.log')

    @wraps(func)
    def _timer(*args, **kwargs):
        t1 = time.time()
        result = func(*args, **kwargs)
        t2 = time.time()
        t_spend = round(t2 - t1, 2)
        timer.log.debug('执行[ {} ]方法用时 {} 秒'.format(func.__name__, t_spend))
        return result

    return _timer
Beispiel #12
0
def where_is_it_called(func):
    """一个装饰器,被装饰的函数,如果被调用,将记录一条日志,记录函数被什么文件的哪一行代码所调用,非常犀利黑科技的装饰器"""
    if not hasattr(where_is_it_called, 'log'):
        where_is_it_called.log = LogManager(
            'where_is_it_called').get_logger_and_add_handlers()

    # noinspection PyProtectedMember
    @wraps(func)
    def _where_is_it_called(*args, **kwargs):
        # 获取被调用函数名称
        # func_name = sys._getframe().f_code.co_name
        func_name = func.__name__
        # 什么函数调用了此函数
        which_fun_call_this = sys._getframe(1).f_code.co_name  # NOQA

        # 获取被调用函数在被调用时所处代码行数
        line = sys._getframe().f_back.f_lineno

        # 获取被调用函数所在模块文件名
        file_name = sys._getframe(1).f_code.co_filename

        # noinspection PyPep8
        where_is_it_called.log.debug(
            f'文件[{func.__code__.co_filename}]的第[{func.__code__.co_firstlineno}]行即模块 [{func.__module__}] 中的方法 [{func_name}] 正在被文件 [{file_name}] 中的'
            f'方法 [{which_fun_call_this}] 中的第 [{line}] 行处调用,传入的参数为[{args},{kwargs}]'
        )
        try:
            t0 = time.time()
            result = func(*args, **kwargs)
            result_raw = result
            t_spend = round(time.time() - t0, 2)
            if isinstance(result, dict):
                result = json.dumps(result)
            if len(str(result)) > 200:
                result = str(result)[0:200] + '  。。。。。。  '
            where_is_it_called.log.debug(
                '执行函数[{}]消耗的时间是{}秒,返回的结果是 --> '.format(func_name, t_spend) +
                str(result))
            return result_raw
        except Exception as e:
            where_is_it_called.log.debug('执行函数{},发生错误'.format(func_name))
            where_is_it_called.log.exception(e)
            raise e

    return _where_is_it_called
def keep_circulating(time_sleep=0.001,
                     exit_if_function_run_sucsess=False,
                     is_display_detail_exception=True,
                     block=True,
                     daemon=False):
    """间隔一段时间,一直循环运行某个方法的装饰器
    :param time_sleep :循环的间隔时间
    :param exit_if_function_run_sucsess :如果成功了就退出循环
    :param is_display_detail_exception
    :param block :是否阻塞主主线程,False时候开启一个新的线程运行while 1。
    :param daemon: 如果使用线程,那么是否使用守护线程,使这个while 1有机会自动结束。
    """
    if not hasattr(keep_circulating, 'keep_circulating_log'):
        keep_circulating.log = LogManager(
            'keep_circulating').get_logger_and_add_handlers()

    def _keep_circulating(func):
        @wraps(func)
        def __keep_circulating(*args, **kwargs):

            # noinspection PyBroadException
            def ___keep_circulating():
                while 1:
                    try:
                        result = func(*args, **kwargs)
                        if exit_if_function_run_sucsess:
                            return result
                    except Exception as e:
                        msg = func.__name__ + '   运行出错\n ' + traceback.format_exc(
                            limit=10) if is_display_detail_exception else str(
                                e)
                        keep_circulating.log.error(msg)
                    finally:
                        time.sleep(time_sleep)

            if block:
                return ___keep_circulating()
            else:
                threading.Thread(target=___keep_circulating,
                                 daemon=daemon).start()

        return __keep_circulating

    return _keep_circulating
Beispiel #14
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from abc import ABCMeta, abstractmethod
from nb_log import LogManager
import re
import datetime
from entity.fesql_result import FesqlResult
import util.fesql_util as fesql_util
import check.fesql_assert as fesql_assert

log = LogManager('fesql-auto-test').get_logger_and_add_handlers()


class Checker(metaclass=ABCMeta):
    @abstractmethod
    def check(self):
        pass


class BaseChecker(Checker):
    def __init__(self, fesqlCase, fesqlResult: FesqlResult):
        self.fesqlCase = fesqlCase
        self.fesqlResult = fesqlResult


class RowsChecker(BaseChecker):
 def setUp(self) -> None:
     self.session = requests.session()
     self.HOSTS = config.HOSTS
     self.logger = LogManager('ApiCase').get_logger_and_add_handlers()
# -*- coding: utf-8 -*-
# @Author  : ydf
# @Time    : 2019/8/8 0008 13:32
import json

# noinspection PyPackageRequirements
from kafka import KafkaConsumer as OfficialKafkaConsumer, KafkaProducer

from function_scheduling_distributed_framework.consumers.base_consumer import AbstractConsumer
from function_scheduling_distributed_framework import frame_config
from nb_log import LogManager

LogManager('kafka').get_logger_and_add_handlers(20)


class KafkaConsumer(AbstractConsumer):
    """
    kafla作为中间件实现的。
    """
    BROKER_KIND = 8

    def _shedual_task(self):
        self._producer = KafkaProducer(bootstrap_servers=frame_config.KAFKA_BOOTSTRAP_SERVERS)
        consumer = OfficialKafkaConsumer(self._queue_name, bootstrap_servers=frame_config.KAFKA_BOOTSTRAP_SERVERS,
                                         group_id=f'frame_group-{self._queue_name}', enable_auto_commit=True)
        # REMIND 由于是很高数量的并发消费,线程很多,分区很少,这里设置成自动确认消费了,否则多线程提交同一个分区的偏移量导致超前错乱,就没有意义了。
        # REMIND 要保证很高的可靠性和一致性,请用rabbitmq。
        # REMIND 好处是并发高。topic像翻书一样,随时可以设置偏移量重新消费。多个分组消费同一个主题,每个分组对相同主题的偏移量互不干扰。

        for message in consumer:
            # 注意: message ,value都是原始的字节数据,需要decode
Beispiel #17
0
    def __init__(
            self,
            queue_name,
            *,
            consuming_function: Callable = None,
            function_timeout=0,
            threads_num=50,
            concurrent_num=50,
            specify_threadpool=None,
            concurrent_mode=1,
            max_retry_times=3,
            log_level=10,
            is_print_detail_exception=True,
            msg_schedule_time_intercal=0.0,
            qps: float = 0,
            msg_expire_senconds=0,
            is_using_distributed_frequency_control=False,
            is_send_consumer_hearbeat_to_redis=False,
            logger_prefix='',
            create_logger_file=True,
            do_task_filtering=False,
            task_filtering_expire_seconds=0,
            is_consuming_function_use_multi_params=True,
            is_do_not_run_by_specify_time_effect=False,
            do_not_run_by_specify_time=('10:00:00', '22:00:00'),
            schedule_tasks_on_main_thread=False,
            function_result_status_persistance_conf=FunctionResultStatusPersistanceConfig(
                False, False, 7 * 24 * 3600),
            is_using_rpc_mode=False):
        """
        :param queue_name:
        :param consuming_function: 处理消息的函数。
        :param function_timeout : 超时秒数,函数运行超过这个时间,则自动杀死函数。为0是不限制。
        :param threads_num:线程或协程并发数量
        :param concurrent_num:并发数量,这个覆盖threads_num。以后会废弃threads_num参数,因为表达的意思不太准确,不一定是线程模式并发。
        :param specify_threadpool:使用指定的线程池/携程池,可以多个消费者共使用一个线程池,不为None时候。threads_num失效
        :param concurrent_mode:并发模式,暂时支持 线程 、gevent、eventlet三种模式。  1线程  2 gevent 3 evenlet
        :param max_retry_times:
        :param log_level:
        :param is_print_detail_exception:
        :param msg_schedule_time_intercal:消息调度的时间间隔,用于控频
        :param qps:指定1秒内的函数执行次数,qps会覆盖msg_schedule_time_intercal,一会废弃msg_schedule_time_intercal这个参数。
        :param is_using_distributed_frequency_control: 是否使用分布式空频(依赖redis计数),默认只对当前实例化的消费者空频有效。假如实例化了2个qps为10的使用同一队列名的消费者,
               并且都启动,则每秒运行次数会达到20。如果使用分布式空频则所有消费者加起来的总运行次数是10。
        :param is_send_consumer_hearbeat_to_redis   时候将发布者的心跳发送到redis,有些功能的实现需要统计活跃消费者。因为有的中间件不是真mq。
        :param logger_prefix: 日志前缀,可使不同的消费者生成不同的日志
        :param create_logger_file : 是否创建文件日志
        :param do_task_filtering :是否执行基于函数参数的任务过滤
        :param task_filtering_expire_seconds:任务过滤的失效期,为0则永久性过滤任务。例如设置过滤过期时间是1800秒 ,
               30分钟前发布过1 + 2 的任务,现在仍然执行,
               如果是30分钟以内发布过这个任务,则不执行1 + 2,现在把这个逻辑集成到框架,一般用于接口价格缓存。
        :is_consuming_function_use_multi_params  函数的参数是否是传统的多参数,不为单个body字典表示多个参数。
        :param is_do_not_run_by_specify_time_effect :是否使不运行的时间段生效
        :param do_not_run_by_specify_time   :不运行的时间段
        :param schedule_tasks_on_main_thread :直接在主线程调度任务,意味着不能直接在当前主线程同时开启两个消费者。
        :param function_result_status_persistance_conf   :配置。是否保存函数的入参,运行结果和运行状态到mongodb。
               这一步用于后续的参数追溯,任务统计和web展示,需要安装mongo。
        :param is_using_rpc_mode 是否使用rpc模式,可以在发布端获取消费端的结果回调,但消耗一定性能,使用async_result.result时候会等待阻塞住当前线程。
        """
        self.init_params = copy.copy(locals())
        self.init_params.pop('self')
        self.init_params['broker_kind'] = self.__class__.BROKER_KIND

        ConsumersManager.consumers_queue__info_map[
            queue_name] = current_queue__info_dict = copy.copy(
                self.init_params)
        current_queue__info_dict['consuming_function'] = str(
            consuming_function)  # consuming_function.__name__
        current_queue__info_dict[
            'function_result_status_persistance_conf'] = function_result_status_persistance_conf.to_dict(
            )
        current_queue__info_dict['class_name'] = self.__class__.__name__
        concurrent_name = ConsumersManager.get_concurrent_name_by_concurrent_mode(
            concurrent_mode)
        current_queue__info_dict['concurrent_mode_name'] = concurrent_name

        # 方便点击跳转定位到当前解释器下所有实例化消费者的文件行,点击可跳转到该处。
        # 获取被调用函数在被调用时所处代码行数
        # 直接实例化相应的类和使用工厂模式来实例化相应的类,得到的消费者实际实例化的行是不一样的,希望定位到用户的代码处,而不是定位到工厂模式处。
        line = sys._getframe(0).f_back.f_lineno
        # 获取被调用函数所在模块文件名
        file_name = sys._getframe(1).f_code.co_filename
        if 'consumer_factory.py' in file_name:
            line = sys._getframe(1).f_back.f_lineno
            file_name = sys._getframe(2).f_code.co_filename
        current_queue__info_dict[
            'where_to_instantiate'] = f'{file_name}:{line}'

        self._queue_name = queue_name
        self.queue_name = queue_name  # 可以换成公有的,免得外部访问有警告。
        self.consuming_function = consuming_function
        self._function_timeout = function_timeout
        self._threads_num = concurrent_num if threads_num == 50 else threads_num  # concurrent参数优先,以后废弃threads_num参数。
        self._specify_threadpool = specify_threadpool
        self._threadpool = None  # 单独加一个检测消息数量和心跳的线程
        self._concurrent_mode = concurrent_mode
        self._max_retry_times = max_retry_times
        self._is_print_detail_exception = is_print_detail_exception
        self._qps = qps
        if qps != 0:
            msg_schedule_time_intercal = 1.0 / qps  # 使用qps覆盖消息调度间隔,以qps为准,以后废弃msg_schedule_time_intercal这个参数。
        self._msg_schedule_time_intercal = msg_schedule_time_intercal if msg_schedule_time_intercal > 0.001 else 0.001
        self._is_using_distributed_frequency_control = is_using_distributed_frequency_control
        self._is_send_consumer_hearbeat_to_redis = is_send_consumer_hearbeat_to_redis or is_using_distributed_frequency_control
        self._msg_expire_senconds = msg_expire_senconds

        if self._concurrent_mode not in (1, 2, 3):
            raise ValueError('设置的并发模式不正确')
        self._concurrent_mode_dispatcher = ConcurrentModeDispatcher(self)

        self._logger_prefix = logger_prefix
        self._log_level = log_level
        if logger_prefix != '':
            logger_prefix += '--'

        # logger_name = f'{logger_prefix}{self.__class__.__name__}--{concurrent_name}--{queue_name}--{self.consuming_function.__name__}'
        logger_name = f'{logger_prefix}{self.__class__.__name__}--{queue_name}'
        # nb_print(logger_name)
        self.logger = LogManager(logger_name).get_logger_and_add_handlers(
            log_level,
            log_filename=f'{logger_name}.log' if create_logger_file else None,
            formatter_template=frame_config.
            NB_LOG_FORMATER_INDEX_FOR_CONSUMER_AND_PUBLISHER,
        )
        # self.logger.info(f'{self.__class__} 在 {current_queue__info_dict["where_to_instantiate"]}  被实例化')
        sys.stdout.write(
            f'{time.strftime("%H:%M:%S")} "{current_queue__info_dict["where_to_instantiate"]}"  \033[0;30;44m此行 '
            f'实例化队列名 {current_queue__info_dict["queue_name"]} 的消费者, 类型为 {self.__class__}\033[0m\n'
        )

        self._do_task_filtering = do_task_filtering
        self._redis_filter_key_name = f'filter_zset:{queue_name}' if task_filtering_expire_seconds else f'filter_set:{queue_name}'
        filter_class = RedisFilter if task_filtering_expire_seconds == 0 else RedisImpermanencyFilter
        self._redis_filter = filter_class(self._redis_filter_key_name,
                                          task_filtering_expire_seconds)

        self._is_consuming_function_use_multi_params = is_consuming_function_use_multi_params

        self._execute_task_times_every_minute = 0  # 每分钟执行了多少次任务。
        self._lock_for_count_execute_task_times_every_minute = Lock()
        self._current_time_for_execute_task_times_every_minute = time.time()

        self._msg_num_in_broker = 0
        self._last_timestamp_when_has_task_in_queue = 0
        self._last_timestamp_print_msg_num = 0

        self._is_do_not_run_by_specify_time_effect = is_do_not_run_by_specify_time_effect
        self._do_not_run_by_specify_time = do_not_run_by_specify_time  # 可以设置在指定的时间段不运行。
        self._schedule_tasks_on_main_thread = schedule_tasks_on_main_thread

        self._result_persistence_helper = ResultPersistenceHelper(
            function_result_status_persistance_conf, queue_name)

        self._is_using_rpc_mode = is_using_rpc_mode

        self.stop_flag = False

        # 控频要用到的成员变量
        self._last_submit_task_timestamp = 0
        self._last_start_count_qps_timestamp = time.time()
        self._has_execute_times_in_recent_second = 0

        self._publisher_of_same_queue = None

        self.consumer_identification = f'{socket.gethostname()}_{time_util.DatetimeConverter().datetime_str.replace(":","-")}_{os.getpid()}_{id(self)}'

        self.custom_init()
import os
import pytest
import allure
import shutil
from common.testdatatransferutils import TestDataTransferUtils
from common.requestsutils import RequestUtils
from nb_log import LogManager

logger = LogManager().get_logger_and_add_handlers()
all_cases_info_list = TestDataTransferUtils().get_testdata_by_list()
params_key = (',').join(all_cases_info_list[0].keys())
# print(params_key)   #'case_name,case_info'
params_value = []

for case in all_cases_info_list:
    value = tuple(case.values())
    params_value.append(value)
    params_value.append(value)
# print(params_value)   #[('case01',[step1,step2]),('case02',[step1,step2]).....]


class TestCase:
    @pytest.mark.parametrize(params_key, params_value)
    def test_case(self, case_name, case_info):
        logger.info('测试用例:%s开始执行' % (case_info[0].get('测试用例编号')))
        actual_result = RequestUtils().test_steps(case_info)
        assert actual_result.get('check_result'), actual_result.get('message')


if __name__ == '__main__':
    report_path = os.path.join(os.path.dirname(__file__),
Beispiel #19
0
import pytest
import time

from utils.data_analysis import DataAnalysis
from utils.request_utils import request
from nb_log import LogManager

time_ = time.strftime("%Y-%m-%d", time.localtime())
log_name = '%s.log' % time_
logger = LogManager('sfmdm_api_auto').get_logger_and_add_handlers(
    is_add_stream_handler=False, log_filename=log_name)

test_data = DataAnalysis("登陆模块").convert_testcase_data_list()
# print(test_data)
# print(test_data[0]['case_id'])
print(test_data)
# print([test_data['case_id'],test_data['case_step']])


class TestLogin:
    @classmethod
    def setup_class(cls):
        logger.info("$登陆模块$")

    @pytest.mark.parametrize('test_info', test_data)
    def test_login_case(self, test_info):
        logger.info(
            "$%s_%s$" %
            (test_info["case_id"], test_info["case_step"][0]['测试用例名称']))
        result = request.request_by_step(test_info["case_step"])
        assert result['check_result'], result['message']
Beispiel #20
0
# -*- coding: utf-8 -*-
# @Time : 2021/6/18 15:15
# @Author : 杜云慧
# @Site :
# @File : api_test.py
# @Software: PyCharm

import warnings
import unittest
import paramunittest
from nb_log import LogManager
from API_TEST_FEAME.common.testdata_utils import TestdataUtils
from API_TEST_FEAME.common.requests_utils import RequestsUtils

case_infos = TestdataUtils().def_testcase_data_list()
logger = LogManager(__file__).get_logger_and_add_handlers()


@paramunittest.parametrized(*case_infos)
class APITest(paramunittest.ParametrizedTestCase):
    def setUp(self) -> None:
        warnings.simplefilter('ignore', ResourceWarning)
        logger.info('测试初始化操作')

    def setParameters(self, case_id, case_info):
        logger.info('加载测试数据')
        self.case_id = case_id
        self.case_info = case_info

    def test_api_common_function(self):
        '''测试描述'''

def check_gevent_monkey_patch(raise_exc=True):
    if not monkey.is_module_patched('socket'):  # 随便选一个检测标志
        if raise_exc:
            warnings.warn(
                f'检测到 你还没有打gevent包的猴子补丁,请在所运行的起始脚本第一行写上  【import gevent.monkey;gevent.monkey.patch_all()】  这句话。'
            )
            raise Exception(
                f'检测到 你还没有打gevent包的猴子补丁,请在所运行的起始脚本第一行写上  【import gevent.monkey;gevent.monkey.patch_all()】  这句话。'
            )
    else:
        return 1


logger_gevent_timeout_deco = LogManager(
    'gevent_timeout_deco').get_logger_and_add_handlers()


def gevent_timeout_deco(timeout_t):
    def _gevent_timeout_deco(f):
        def __gevent_timeout_deceo(*args, **kwargs):
            timeout = gevent.Timeout(timeout_t, )
            timeout.start()
            result = None
            try:
                result = f(*args, **kwargs)
            except gevent.Timeout as t:
                logger_gevent_timeout_deco.error(f'函数 {f} 运行超过了 {timeout_t} 秒')
                if t is not timeout:
                    nb_print(t)
                    # raise  # not my timeout
Beispiel #22
0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import sqlalchemy as db
from nb_log import LogManager

log = LogManager('fedb-sdk-test').get_logger_and_add_handlers()


class FedbClient:
    def __init__(self, zkCluster, zkRootPath, dbName='test_fedb'):
        self.zkCluster = zkCluster
        self.zkRootPath = zkRootPath
        self.dbName = dbName

    def getConnect(self):
        engine = db.create_engine('openmldb://@/{}?zk={}&zkPath={}'.format(
            self.dbName, self.zkCluster, self.zkRootPath))
        connect = engine.connect()
        return connect
            # nb_print(work_item)
            if work_item is not None:
                self._executorx._change_threads_free_count(-1)
                work_item.run()
                del work_item
                self._executorx._change_threads_free_count(1)
                continue
            if _shutdown or self._executorx._shutdown:
                self._executorx.work_queue.put(None)
                break


process_name_set = set()
logger_show_current_threads_num = LogManager(
    'show_current_threads_num').get_logger_and_add_handlers(
        formatter_template=5,
        log_filename='show_current_threads_num.log',
        do_not_use_color_handler=False)


def show_current_threads_num(sleep_time=600,
                             process_name='',
                             block=False,
                             daemon=True):
    process_name = sys.argv[0] if process_name == '' else process_name

    def _show_current_threads_num():
        while True:
            # logger_show_current_threads_num.info(f'{process_name} 进程 的 并发数量是 -->  {threading.active_count()}')
            # nb_print(f'  {process_name} {os.getpid()} 进程 的 线程数量是 -->  {threading.active_count()}')
            logger_show_current_threads_num.info(
Beispiel #24
0
import time
import os
import requests
import importlib
import sys
import time
import os.path
from pdfminer.pdfparser import PDFParser, PDFDocument
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LTTextBoxHorizontal, LAParams
from pdfminer.pdfinterp import PDFTextExtractionNotAllowed
from nb_log import LogManager
from nb_log_config import LOG_PATH

logger = LogManager("api").get_logger_and_add_handlers(
    is_add_stream_handler=True, log_filename="api.log", log_path=LOG_PATH)

importlib.reload(sys)
time1 = time.time()
from case.VC_project.VCtest_login import VC_Login


def Ycontract_Listquire(s,
                        contractNo=None,
                        supplierCode=None,
                        businessStatus=None,
                        createTimeStart=None,
                        createTimeEnd=None,
                        executeStart=None,
                        executeEnd=None,
                        contractTemplateNo=None,
Beispiel #25
0
# -*- coding: utf-8 -*-
# @Author  : ydf
# @Time    : 2019/8/8 0008 13:27
import json
from threading import Lock

from pikav0.exceptions import AMQPError

from function_scheduling_distributed_framework.consumers.base_consumer import AbstractConsumer
from nb_log import LogManager
from function_scheduling_distributed_framework.utils.rabbitmq_factory import RabbitMqFactory

LogManager('pikav0').get_logger_and_add_handlers(20)


class RabbitmqConsumer(AbstractConsumer):
    """
    使用pika包实现的。
    """
    BROKER_KIND = 0

    # noinspection PyAttributeOutsideInit
    def custom_init(self):
        self._lock_for_pika = Lock()

    def _shedual_task(self):
        channel = RabbitMqFactory(
            is_use_rabbitpy=0).get_rabbit_cleint().creat_a_channel()
        channel.queue_declare(queue=self._queue_name, durable=True)
        channel.basic_qos(prefetch_count=self._concurrent_num)
Beispiel #26
0
# -*- coding: utf-8 -*-
# @Time    : 2020/12/29 16:44
# @Author  : tingting.yang
# @FileName: check.py
import requests
import json
import re
from nb_log import LogManager

logger = LogManager('Api_Test_Framework').get_logger_and_add_handlers(
    is_add_mail_handler=True, log_filename='Api_Test_Framework.log')


class CheckUtils:
    def __init__(self, response_data):
        self.response_data = response_data
        self.check_rules = {
            "none": self.none_check,
            "json_key": self.body_key_check,
            "json_key_value": self.body_key_value_check,
            "body_regexp": self.regexp_check,
            "header_key": self.header_key_check,
            "header_key_value": self.header_key_value_check,
            'message': '',
            "response_code": self.response_code_check
        }
        self.pass_result = {
            'code': 0,
            'response_code': self.response_data.status_code,
            'response_reason': self.response_data.reason,
            'response_headers': self.response_data.headers,
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import allure
import pytest

from nb_log import LogManager

from common.standalone_test import StandaloneTest
from executor import fedb_executor
from util.test_util import getCases

log = LogManager('python-sdk-test').get_logger_and_add_handlers()

class TestStandaloneWindow(StandaloneTest):

    #都pass
    @pytest.mark.parametrize("testCase", getCases(["/function/window/"]))
    @allure.feature("window")
    @allure.story("batch")
    def test_window1(self, testCase):
        print(testCase)
        fedb_executor.build(self.connect, testCase).run()

    # 13没pass属于正常情况 剩下都pass
    @pytest.mark.parametrize("testCase", getCases(["/function/cluster/"]))
    @allure.feature("window")
    @allure.story("batch")
Beispiel #28
0
 def setUp(self) -> None:
     self.session = requests.session()
     self.HOSTS = config.HOSTS
     self.APPID = config.appid
     self.SECRET = config.secret
     self.logger = LogManager('case_log').get_logger_and_add_handlers(10)
#!/usr/bin/env python
# encoding: utf-8
# @author: liusir
# @file: requests_utils.py
# @time: 2020/11/18 8:07 下午

import json
import jsonpath
import requests
import re
from utils.config_utils import local_config
from utils.check_utils import CheckUtils
from requests.exceptions import RequestException, ProxyError, ConnectionError
from nb_log import LogManager

logger = LogManager('P3P4_API_TEST').get_logger_and_add_handlers(
    is_add_stream_handler=True, log_filename=local_config.LOG_NAME)


class RequestsUtils:
    def __init__(self):
        self.hosts = local_config.HOSTS
        self.session = requests.session()
        self.tmp_variables = {}

    def __get(self, requests_info):
        try:
            url = self.hosts + requests_info['请求地址']
            variable_list = re.findall('\\${\w+}', requests_info['请求参数(get)'])
            for variable in variable_list:
                requests_info['请求参数(get)'] = requests_info[
                    '请求参数(get)'].replace(
Beispiel #30
0
from nb_log import LogManager

logger = LogManager('LALALA').get_logger_and_add_handlers(
    is_add_stream_handler=True, log_filename='ha.log')
logger.info('蓝色')