def get_json(url): _json = '' try: response = requests.get(url, headers=headers, timeout=request_timeout) _json = json.loads(response.text) except Exception as e: logger.exception(e) return _json
def get_white_list(_url): api_list = [] try: response = requests.get(_url, headers=headers, timeout=request_timeout) _api_list = json.loads(response.text) api_list = _api_list.get('msg') except Exception as e: logger.exception(e) return api_list
def add_white_list(_url, ip): """ 将本地IP添加到白名单列表 :param _url: 添加白名单接口 :param ip: 本地IP :return: None """ api = _url + ip try: response = requests.get(api, headers=headers, timeout=request_timeout) logger.info(response.text) except Exception as e: logger.exception(e)
def get_localhost_ip(_url): """ 百度搜索本地IP :param _url: :return: """ try: response = requests.get(_url, headers=headers, timeout=request_timeout) html = response.text pat = re.compile('本机IP: ([0-9|.]+)') result = pat.findall(html) if result: localhost = result[0] return localhost except Exception as e: logger.exception(e)
def get_page_source(url, use_proxy=False, proxies=None): page_source = '' headers = {'User-Agent': get_random_user_agent()} try: if use_proxy: response = requests.get(url, headers=headers, timeout=request_timeout, proxies=proxies) page_source = response.text else: response = requests.get(url, headers=headers, timeout=request_timeout) page_source = response.text return page_source except Exception as e: logger.exception(e) if proxies: message = 'proxy {} is no longer in force'.format( proxies.get('https')) logger.info(message) return page_source
def wrapped(*args, **kwargs): try: func(*args, **kwargs) except Exception as e: logger.exception(msg)