Beispiel #1
0
def save_proxylist():
    '''save proxy info list '''
    proxylist = handle_proxy_list(getproxylist())

    # get last time of update
    query = Proxy_proxy.select(Proxy_proxy.ftime).order_by(Proxy_proxy.ftime.desc()).limit(1)
    try:
        lasttime = str_to_timestamp(query[0].ftime)
    except Exception, e:
        lasttime = 0
Beispiel #2
0
from models.crawler import Proxy_proxy
from lib.timeutil import get_now_timestamp, str_to_timestamp
def save_proxylist():
    '''save proxy info list '''
    proxylist = handle_proxy_list(getproxylist())

    # get last time of update
    query = Proxy_proxy.select(Proxy_proxy.ftime).order_by(Proxy_proxy.ftime.desc()).limit(1)
    try:
        lasttime = str_to_timestamp(query[0].ftime)
    except Exception, e:
        lasttime = 0

    for pa in proxylist[::-1]:
        if len(pa) > 0 and str_to_timestamp(pa[-1], yearlen=2, hassec=False) > lasttime:
            proxy = Proxy_proxy()
            proxy.fip = pa[0]
            # proxy.fhost = pa[1]
            proxy.fport = pa[1]
            proxy.fprotocal = pa[4]
            proxy.ftime = pa[-1]
            proxy.fcreatetime = get_now_timestamp()
            proxy.save()
            # proxy.close()

    # flush redis
    


# proxylist = handle_proxy_list(getproxylist())
# print len(proxylist)