Exemplo n.º 1
0
    def test_seedids_oper(self):
        SeedidsOper.insert_seeds(FAKE_IDS)
        assert len(SeedidsOper.get_seed_ids()) == 2
        assert SeedidsOper.get_seed_by_id(FAKE_ID) is not None

        SeedidsOper.set_seed_crawled(FAKE_ID, 1)
        assert len(SeedidsOper.get_seed_ids()) == 1
Exemplo n.º 2
0
    def test_seedids_oper(self):
        SeedidsOper.insert_seeds(FAKE_IDS)
        assert len(SeedidsOper.get_seed_ids()) == 2
        assert SeedidsOper.get_seed_by_id(FAKE_ID) is not None

        SeedidsOper.set_seed_crawled(FAKE_ID, 1)
        assert len(SeedidsOper.get_seed_ids()) == 1
Exemplo n.º 3
0
def crawl_follower_fans(uid):
    seed = SeedidsOper.get_seed_by_id(uid)
    if seed.other_crawled == 0:
        rs = get_fans_or_followers_ids(uid, 1, 1)
        rs.extend(get_fans_or_followers_ids(uid, 2, 1))
        datas = set(rs)
        # If data already exits, just skip it
        if datas:
            SeedidsOper.insert_seeds(datas)
        SeedidsOper.set_seed_other_crawled(uid)
Exemplo n.º 4
0
def crawl_follower_fans(uid):
    seed = SeedidsOper.get_seed_by_id(uid)
    if seed.other_crawled == 0:
        rs = get_fans_or_followers_ids(uid, 1, 1)
        rs.extend(get_fans_or_followers_ids(uid, 2, 1))
        datas = set(rs)
        # If data already exits, just skip it
        if datas:
            SeedidsOper.insert_seeds(datas)
        SeedidsOper.set_seed_other_crawled(uid)
Exemplo n.º 5
0
def crawl_comment_by_page(mid, page_num, seeion):
    try:
        cur_url = BASE_URL.format(mid, page_num)
        html = get_page(cur_url, auth_level=1, is_ajax=True)
        comment_datas, seed_ids = comment.get_comment_list(html, mid)
    except SoftTimeLimitExceeded:
        crawler.error(
            "comment SoftTimeLimitExceeded    mid={mid} page_num={page_num}".
            format(mid=mid, page_num=page_num))
        crawl_comment_by_page(mid, page_num)
    CommentOper.add_all(comment_datas, seeion)
    SeedidsOper.insert_seeds(seed_ids, seeion)
    if page_num == 1:
        WbDataOper.set_weibo_comment_crawled(mid, seeion)
    return html, comment_datas