示例#1
0
文件: main.py 项目: Crazy-Cabbage/wet
def feeds2all():
    from rss import get_rss_entries
    
    lasttimes = read_rss_lasttimes()
    if lasttimes is None:
        lasttimes = {}
        
    for format_, url in conf.feeds:
        lasttime = lasttimes.get(url, None)
        if lasttime is None:
            log("first time fetching %s, skip", url)
            lasttimes[url] = gmtime()
            save_rss_lasttimes(lasttimes)
            continue
        
        statuses = get_rss_entries(url, lasttime)
        maxtime = lasttime
        
        for status, publishtime in statuses:
            status = format_ % status
            
            log("[publishing] %s : %s",
                strftime("%Y-%m-%d %H:%M:%S", publishtime),
                status,
            )

            if pub2all(status) and maxtime < publishtime:
                maxtime = publishtime
                sleep(10)
            
        lasttimes[url] = maxtime
        save_rss_lasttimes(lasttimes)
示例#2
0
文件: main.py 项目: yishenggudou/wet
def feeds2all():
    from rss import get_rss_entries
    
    lasttimes = loadfrom('rss_lasttimes')
    if lasttimes is None:
        lasttimes = {}
        
    for format_, url in conf.feeds:
        lasttime = lasttimes.get(url, None)
        if lasttime is None:
            lasttimes[url] = gmtime()
            dumpto('rss_lasttimes', lasttimes)
            continue
        
        statuses = get_rss_entries(url, lasttime)
        maxtime = lasttime
        
        for status, publishtime in statuses:
            status = format_ % status
            print publishtime, status

            if pub2all(status):
                if maxtime < publishtime:
                    maxtime = publishtime
                sleep(10)
            
        lasttimes[url] = maxtime
        dumpto('rss_lasttimes', lasttimes)
示例#3
0
文件: main.py 项目: Crazy-Cabbage/wet
def feeds2all():
    from rss import get_rss_entries

    lasttimes = read_rss_lasttimes()
    if lasttimes is None:
        lasttimes = {}

    for format_, url in conf.feeds:
        lasttime = lasttimes.get(url, None)
        if lasttime is None:
            log("first time fetching %s, skip", url)
            lasttimes[url] = gmtime()
            save_rss_lasttimes(lasttimes)
            continue

        statuses = get_rss_entries(url, lasttime)
        maxtime = lasttime

        for status, publishtime in statuses:
            status = format_ % status

            log(
                "[publishing] %s : %s",
                strftime("%Y-%m-%d %H:%M:%S", publishtime),
                status,
            )

            if pub2all(status) and maxtime < publishtime:
                maxtime = publishtime
                sleep(10)

        lasttimes[url] = maxtime
        save_rss_lasttimes(lasttimes)
示例#4
0
文件: main.py 项目: ifduyue/wet
def feeds2all():
    from rss import get_rss_entries

    lasttimes = read_rss_lasttimes()
    if lasttimes is None:
        lasttimes = {}

    for feed in conf.feeds:
        format_, url = feed[0:2]
        nhead = feed[2] if feed[2:] else 0
        lasttime = lasttimes.get(url, None)
        if lasttime is None:
            log("first time fetching %s, skip", url)
            lasttimes[url] = gmtime()
            lasttimes['links_' + url] = []
            save_rss_lasttimes(lasttimes)
            continue

        statuses = get_rss_entries(url, lasttime, nhead=nhead)
        maxtime = lasttime

        for entry, publishtime in statuses:
            status = format_ % entry

            if not can_pub(status):
                log(
                    "[skipping] %s can not be published because of include and exlucde conf",
                    status)
                continue

            if entry['link'] in lasttimes['links_' + url]:
                log(
                    "[skipping] %s can not be published because it has already bean published",
                    status)
                continue

            log(
                "[publishing] %s : %s",
                strftime("%Y-%m-%d %H:%M:%S", publishtime)
                if publishtime is not None else 'None',
                status,
            )

            if pub2all(status, entry):
                if publishtime is not None and maxtime < publishtime:
                    maxtime = publishtime

                lasttimes['links_' + url].append(entry['link'])

                sleep(10)

        lasttimes[url] = maxtime if maxtime != lasttime else gmtime()
        lasttimes['links_' + url] = lasttimes['links_' + url][-100:]

        save_rss_lasttimes(lasttimes)
示例#5
0
文件: main.py 项目: forestjiang/wet
def feeds2all():
    from rss import get_rss_entries
    
    lasttimes = read_rss_lasttimes()
    if lasttimes is None:
        lasttimes = {}
        
    for feed in conf.feeds:
        format_, url = feed[0:2]
        nhead = feed[2] if feed[2:] else 0
        lasttime = lasttimes.get(url, None)
        if lasttime is None:
            log("first time fetching %s, skip", url)
            lasttimes[url] = gmtime()
            lasttimes['links_' + url] = []
            save_rss_lasttimes(lasttimes)
            continue
        
        statuses = get_rss_entries(url, lasttime, nhead=nhead)
        maxtime = lasttime
        
        for entry, publishtime in statuses:
            status = format_ % entry

            if not can_pub(status):
                log("[skipping] %s can not be published because of include and exlucde conf", status)
                continue

            if entry['link'] in lasttimes['links_' + url]:
                log("[skipping] %s can not be published because it has already bean published",  status)
                continue

            log("[publishing] %s : %s",
                strftime("%Y-%m-%d %H:%M:%S", publishtime) if publishtime is not None else 'None',
                status,
            )

            if pub2all(status, entry):
                if publishtime is not None and maxtime < publishtime:
                    maxtime = publishtime
                
                lasttimes['links_' + url].append(entry['link'])

                sleep(10)
            
        lasttimes[url] = maxtime if maxtime != lasttime else gmtime()
        lasttimes['links_' + url] = lasttimes['links_' + url][-100:]

        save_rss_lasttimes(lasttimes)