def testLogHandler():
    """
    test function LogHandler  in Util/LogHandler
    :return:
    """
    log = LogHandler('test')
    log.error('this is a log from test')

    log.resetName(name='test1')
    log.warning('this is a log from test1')

    log.resetName(name='test2')
    log.info('this is a log from test2')
Exemple #2
0
def run_schedule():
    start_init_proxy()
    start_proxy_check()

    schedule_log = LogHandler('schedule_log')
    schedule = BlockingScheduler(logger=schedule_log)

    schedule.add_job(start_init_proxy,
                     'interval',
                     minutes=GETTER_CYCLE,
                     id="start_init_proxy",
                     name="抓取代理初始化验证")
    schedule.add_job(start_proxy_check,
                     'interval',
                     minutes=TESTER_CYCLE,
                     id="start_proxy_check",
                     name="代理可用性定时复核")

    schedule.start()
Exemple #3
0
import random
#当前文件的路径
pwd = os.getcwd()
project_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(project_path)
print(project_path)
from proxy.proxy_valid import ValidIp
from api.rest_api import RestApi
from util.util_function import CheckDir, DownloadFile, WriteInfo

from util.log_handler import LogHandler

from util.config import GetConfig

# log = LogHandler('read_csv')
log = LogHandler('new_0')

api = RestApi()

configs = GetConfig()

# proxies = ValidIp(True,'http://www.jiayuan.com')

proxies = ValidIp(True, 'http://www.jiayuan.com')

print(proxies)

url_address = 'http://www.jiayuan.com/'

#当前文件的路径
Exemple #4
0
# -*- coding: utf-8 -*-
'''
-----------------------------------
    FileName:     check_proxy
    Description:  验证代理格式
    Author:       瓦都尅
    Date:         2019/10/30
-----------------------------------
'''
import re

from proxy.get_free_proxyip import GetFreeProxy
from util.log_handler import LogHandler

log = LogHandler('check_proxy', file=False)


def verifyProxyFormat(proxy):
    """
    检查代理格式
    """
    verify_regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}"
    _proxy = re.findall(verify_regex, proxy)
    return True if len(_proxy) == 1 and _proxy[0] == proxy else False


class CheckProxy(object):
    @staticmethod
    def checkAllGetProxyFunc():
        """
        检查get_free_proxyip所有代理获取函数运行情况
Exemple #5
0
import random
#当前文件的路径
pwd = os.getcwd()
project_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(project_path)
print(project_path)
from proxy.proxy import ValidIp
from api.rest_api import RestApi
from util.util_function import CheckDir, DownloadFile, WriteInfo

from util.log_handler import LogHandler

from util.config import GetConfig

# log = LogHandler('read_csv')
log = LogHandler('search_user_photos')

api = RestApi()

configs = GetConfig()

# proxies = ValidIp("local",'http://www.jiayuan.com')

proxies = ValidIp("local", 'http://www.jiayuan.com')

print(proxies)

url_address = 'http://www.jiayuan.com/'

#当前文件的路径
Exemple #6
0
#当前文件的路径
pwd = os.getcwd()
project_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(project_path)

from util.config import GetConfig

configs = GetConfig()

from proxy.proxy_valid import ValidIp
from api.rest_api import RestApi
from util.util_function import CheckDir, DownloadFile, WriteInfo

from util.log_handler import LogHandler

log = LogHandler('read_csv')

api = RestApi()

# proxies = ValidIp('1','http://www.jiayuan.com')
proxies = ValidIp(True, 'http://www.jiayuan.com')

#当前文件的路径

# csv_path = project_path+'\logs\csv\\'
csv_path = project_path + '/logs/csv/'

#输出文件夹
out_dir = './download'

# -*- coding: utf-8 -*-
# !/usr/bin/env python

import requests
import time, os, sys
from lxml import etree
from contextlib import closing

from util.log_handler import LogHandler
from util.web_request import WebRequest

sys.path.append('..')

from util.log_handler import LogHandler

log = LogHandler('photo')

# #当前文件的路径
# pwd = os.getcwd()
# #当前文件的父路径
# father_path=os.path.abspath(os.path.dirname(pwd)+os.path.sep+".")
# #当前文件的前两级目录
# grader_father=os.path.abspath(os.path.dirname(pwd)+os.path.sep+"..")


# noinspection PyPep8Naming
def robustCrawl(func):
    def decorate(*args, **kwargs):
        try:
            return func(*args, **kwargs)
        except Exception as e:
Exemple #8
0
 def __init__(self):
     self.client = db.DBclient()
     self.log = LogHandler('proxy_manager')
Exemple #9
0
# coding =utf-8

import json, random, sys
import requests

sys.path.append('..')

from util.config import GetConfig
from util.log_handler import LogHandler

configs = GetConfig()

log = LogHandler('proxy')

#本地ip
proxy_local_host = configs.proxy_local

#在线ip https://github.com/jhao104/proxy_pool
proxy_online_host = configs.proxy_online


#
# 1.只调用一个方法,本地和网络均可用
#
#
#使用本地代理获取ip
def GetLocalIp():

	r = requests.get(proxy_local_host)
	ip_ports = json.loads(r.text)
	num = random.randint(0,10)
# from tomorrow import threads
import random
#当前文件的路径
pwd = os.getcwd()
project_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(project_path)
print(project_path)
from api.rest_api import RestApi
from util.util_function import CheckDir, DownloadFile, WriteInfo

from util.log_handler import LogHandler

from util.config import GetConfig

# log = LogHandler('read_csv')
log = LogHandler('test_uid')

api = RestApi()


def get_uid(data):
    try:
        r = api.get_uid(data)
        return (json.loads(r)["data"])
    except Exception as e:
        log.error("api request fail: %s", format(e))


while True:
    data = {'need': 20000, 'remark': "im test"}
from util.log_handler import LogHandler

from util.config import GetConfig

configs = GetConfig()

host = 'ws://' + str(configs.host_ip) + ':' + str(configs.host_port) + "/cable"

try:
    import thread
except ImportError:
    import _thread as thread
import time

logger = LogHandler('web_socket')
logger.info('this is a log from web_socket')


def on_message(ws, message):
    data = json.loads(message)
    print(data['type'])

    if data['type'] == 'ping':
        print(data['type'])

    else:

        logger.info(data)

Exemple #12
0
 def __init__(self):
     self._pm = ProxyManager()
     self.log = LogHandler('proxy_refresher')
Exemple #13
0
 def __init__(self):
     self._pm = ProxyManager()
     self.queue = Queue()
     self.proxy_list = None
     self.proxy_dict = dict()
     self.log = LogHandler('proxy_validater')
 def __init__(self, queue, item_dict):
     self._pm = ProxyManager()
     super().__init__()
     self.log = LogHandler('validate_proxy', file=False)  # 多线程同时写一个日志文件会有问题
     self.queue = queue
     self.item_dict = item_dict
Exemple #15
0
# print(project_path)

from login import GetUserCookie
from proxy.proxy_valid import ValidIp
from api.rest_api import RestApi
from util.util_function import CheckDir, DownloadFile, WriteInfo
from util.log_handler import LogHandler
from util.config import GetConfig

# change: 定义当前爬虫名字
app = "uid5"

# change: 每次请求 uid 数量
req_nums = 200

log = LogHandler(app)

# 初始化
api = RestApi()
configs = GetConfig()

url_address = 'http://www.jiayuan.com/'

#当前文件的路径
csv_path = project_path + '\logs\csv\\'

#输出文件夹
out_dir = './download.new'

# cookie = GetUserCookie()
Exemple #16
0
 def __init__(self):
     ProxyManager.__init__(self)
     self.log = LogHandler('fetch_proxy')
Exemple #17
0
 def __init__(self, queue, thread_name):
     ProxyManager.__init__(self)
     Thread.__init__(self, name=thread_name)
     self.queue = queue
     self.log = LogHandler('init_proxy_check')
Exemple #18
0
# coding =utf-8

import requests
import json, sys
import random
from proxy.proxy import NewProxyIp

sys.path.append('..')

from util.log_handler import LogHandler

log = LogHandler('proxy')
loger = LogHandler('proxy_ok')

#
#2. 获取到代理后判断能否访问网站
#

#获取ip,调用NewProxyIp()默认为在线获取,NewProxyIp("1")为本地代理获取
def GenNewIp(local):
	proxy = NewProxyIp(local)
	return proxy

#验证IP地址是否能进入网站
#ValidIp('1','http://www.jiayuan.com' )
def ValidIp(local=True, valid_host='http://httpbin.org/ip'):
	#调用获取ip方法
	proxy = GenNewIp(local)
	# print(proxy)

	retry_count = 20