def deal(self, content):

        def compact(l):
            ret = []
            for i in l:
                ret += i
            return ret
        general = parser.ParseIndexGeneral(content)

        tries = 4
        while tries > 0:
            try:
                news = yield [get_data(i[1], parser.ParsePost) for i in general['news']]
                info = yield [get_data(makeUrl('category', CatId=i), parser.ParseCategory) for i in [66, 67, 68]]

                ret = {
                    "news": [json.loads(i) for i in news],
                    "info": compact([json.loads(i)[:4] for i in info]),
                }
                break
            except HTTPError as e:
                tries -= 1
                print("503, Retry.....")
                yield tornado.gen.sleep(.1)
                continue

        for n, i in enumerate(ret['news']):
            i["link"] = convertUrl(general["news"][n][1], strict=True)
        return ret
Esempio n. 2
0
    def deal(self, content):

        def compact(l):
            ret = []
            for i in l:
                ret += i
            return ret
        general = parser.ParseIndexGeneral(content)

        tries = 4
        while tries > 0:
            try:
                news = yield [get_data(i[1], parser.ParsePost) for i in general['news']]
                info = yield [get_data(makeUrl('category', CatId=i), parser.ParseCategory) for i in [66, 67, 68]]

                ret = {
                    "news": [json.loads(i) for i in news],
                    "info": compact([json.loads(i)[:4] for i in info]),
                }
                break
            except HTTPError:
                tries -= 1
                print("503, Retry.....")
                yield tornado.gen.sleep(.1)
                continue

        for n, i in enumerate(ret['news']):
            i["link"] = convertUrl(general["news"][n][1], strict=True)
        return ret
def get_data(url, handler):
    key = parser.convertUrl(url)
    if not options.DEBUG:
        cached = yield fetcher.get_data(key)
        if cached:
            return cached

    result = yield fetcher.get_page(url)
    ret = yield maybe_future(handler(result))
    ret = json.dumps(ret)
    yield fetcher.write_data(key, ret, options.CACHE_TIME)
    return ret
Esempio n. 4
0
def get_data(url, handler):
    key = parser.convertUrl(url)
    if not options.DEBUG:
        cached = yield fetcher.get_data(key)
        if cached:
            return cached

    result = yield fetcher.get_page(url)
    ret = yield maybe_future(handler(result))
    ret = json.dumps(ret)
    yield fetcher.write_data(key, ret, options.CACHE_TIME)
    return ret