def metric_wrapper(*args, **kwargs): start_time = time.perf_counter() value = func(*args, **kwargs) run_time = time.perf_counter() - start_time # seconds logger = get_logger() logger.metric(key, int(round(run_time * 1000))) return value
#-*-coding:utf-8-*- # @time :2019/1/2014:46 # @Author :lemon_hehe # @Email :[email protected] # @File :test_login.py # @software:PyCharm Community Edition import time from log.logger import get_logger from selenium.webdriver.remote.webelement import WebElement from selenium.webdriver.support.wait import WebDriverWait # 显示等待 from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver import Chrome #很多类共享了某一种行为(继承) #日志 logger = get_logger() class BasePage: #定义所有页面共享的类 def __init__(self, driver: Chrome): #每个页面都需要浏览器,所以这个浏览器是共享的 self.driver = driver #接受定义的复杂,不接受调用的复杂 def get_visible_element(self, locator, eqc=20) -> WebElement: try: return WebDriverWait(self.driver, eqc).until( EC.visibility_of_element_located(locator)) #save_screenshot 保存截屏 except Exception as e:
:return: """ return params[name] if name in params else default # ----------------------------------------------------------------------------- # CONFIGS # ----------------------------------------------------------------------------- SERVICE_NAME = environ.get("SERVICE_NAME", "outlier_classifier") LOG_LEVEL = get_variable_or_die("LOG_LEVEL", "debug").lower() ROOT_DIR = Path(__file__).parents[1] logger.configure_logging_from_file(ROOT_DIR.joinpath("log/logger.json")) service_logger = logger.get_logger("outlier_classifier", level=LOG_LEVEL, capture_warnings=True) SM_REGION = get_variable_or_die("SM_REGION", "eu-central-1") # ----------------------------------------------------------------------------- # DATADOG # ----------------------------------------------------------------------------- NEXIOT_ENV = get_variable_or_die("NEXIOT_ENV", "preview") DD_PHASE = get_variable_or_die("DD_PHASE", "prd") DD_STAGE = get_variable_or_die("DD_STAGE", "preview") DD_OWNER = get_variable_or_die("DD_OWNER", "ds") DD_API_KEY = get_variable_or_die("DD_API_KEY", "") DATADOG_AGENT_HOST = environ.get("DATADOG_AGENT_HOST", None)
import time from multiprocessing import Process from log.logger import get_logger from service.cookies_store import CookiesStore from service.pdd_cookies_valid import PddCookieValidator from service.tb_cookies_valid import TbCookieValidator logger = get_logger('cookies_schedule') class CookiePoolSchedule: @staticmethod def store_cookie(period=30): while True: logger.info('cookies store start.') try: CookiesStore().cookies_store('pdd') CookiesStore().cookies_store('tb') time.sleep(period) except Exception as e: logger.exception(f'store cookies e:{e}') @staticmethod def valid_cookie(period=60): while True: logger.info('cookies valid start.') try: PddCookieValidator('pdd').do_valid() TbCookieValidator('tb').do_valid() logger.info('cookies valid finished.')
# -*- coding: utf-8 -*- import requests import time from log.logger import get_logger from service.base_cookies_valid import CookieValid logger = get_logger('tb_cookies_valid') class TbCookieValidator(CookieValid): def __init__(self, website: str): self.website = website CookieValid.__init__(self, website) @staticmethod def map_to_cookie(cookie_dict: dict): cookie_str = '' for k, v in cookie_dict.items(): cookie_str += f'{k}={v}; ' length = len(cookie_str) if length > 0: cookie_str = cookie_str[0: length - 2] return cookie_str def update_consul(self, key: str, value: str): config_dict = self.cookie_config.config_dict config_dict.update({key: value}) self.cookie_config.consul_config_set( 'config/spider/cookies,tb,prod/data', config_dict)
#!/usr/bin/env python # -*-coding=utf-8-*- __author__ = '李元' import datetime, time from utils import util from model import user from log.logger import get_logger from utils import mongo_utils logger = get_logger(__file__) class UserService(mongo_utils.MongoUtil): # def __init__(self): # self.collection = mongo_utils.MongoUtil.db['user'] def register(self, userId, password): ''' 注册 :param userId: 用户编号 :param password: 密码 :return: True or False & reason ''' self.collection = self.db['user'] cnt = self.collection.find({'userId': userId}).count() if cnt == 0: user_model = user.User() token_model = user.Token() token_model.token = util.produce_token() user_model.phone = userId user_model.userId = userId
import requests from log.logger import get_logger from service.base_cookies_valid import CookieValid logger = get_logger('pdd_cookies_valid') class PddCookieValidator(CookieValid): def __init__(self, website: str): self.website = website CookieValid.__init__(self, website) def update_consul(self, key: str): config_dict = self.cookie_config.config_dict config_dict.update({key: False}) self.cookie_config.consul_config_set( 'config/spider/cookies,pdd,dev/data', config_dict) def validator(self, account_cookies: dict): valid_url = self.valid_url try: account = account_cookies['phone'] cookies = account_cookies['cookie'] cookie_data = CookieValid.cookie_to_map(cookies) response = requests.get(valid_url, cookies=cookie_data, timeout=10, allow_redirects=False) # 判断cookie失效规则 if response.status_code == 200:
def health_check(): """ Controller for health check status """ logger = get_logger() logger.info("status", "Application is up and running") return "Application is up and running", HTTPStatus.OK
import time from functools import wraps from urllib.parse import urlparse from flask import request from log.logger import get_logger, get_influx_logger log = get_logger('cookies_pool') influx_logger = get_influx_logger('cookies_pool') def log_access(func): @wraps(func) def wrapper(*args, **kwargs): st = time.time() result = func(*args, **kwargs) rt = round((time.time() - st) * 1000, 3) path = urlparse(request.url).path log.info({ 'uri': request.url, # 'param': ast.literal_eval(request.query_string.decode('utf-8')), 'ms': rt, 'response': result }) influx_logger.info('cookies_pool,urlpath={} value={},rt={}'.format( path, 1, rt)) return result return wrapper