Exemple #1
0
def capture_main_data_and_analysis():
    """
    开启爬虫并分析数据
    :return:
    """
    qa = QQZoneAnalysis(use_redis=False, debug=True, stop_time='2011-11-11', mood_num=20, analysis_friend=False)
    qa.login_with_qr_code()
    qa.get_main_page_info()
    qa.get_mood_list()
    if qa.analysis_friend:
        qa.thread_num = 20
        qa.get_friend_detail()
    do_analysis_for_all(qa)
    qa.user_info.save_user()
Exemple #2
0
def web_interface(username, nickname, stop_time, mood_num, cookie_text,
                  no_delete, password, pool_flag):
    sp = QQZoneAnalysis(use_redis=True,
                        debug=False,
                        username=username,
                        analysis_friend=True,
                        from_web=True,
                        nickname=nickname,
                        stop_time=stop_time,
                        mood_num=mood_num,
                        no_delete=no_delete,
                        cookie_text=cookie_text,
                        pool_flag=pool_flag)
    try:
        # 存储用户密码
        sp.re.hset(USER_MAP_KEY, username, password)
        sp.logging_info(username + "init success")
        sp.login_with_qr_code()
        sp.logging_info(username + "logging success")
        sp.re.rpush(WEB_SPIDER_INFO + username,
                    "用户" + str(sp.username) + "登陆成功")
    except BaseException as e:
        sp.format_error(e, "logging failed")
        sp.re.rpush(WEB_SPIDER_INFO + username, LOGIN_FAILED)
        # 删除用户密码
        sp.re.hdel(USER_MAP_KEY, username)
    sp.get_main_page_info()
    sp.logging_info("get main page success")
    try:
        # 获取动态的数据
        t1 = threading.Thread(target=sp.get_mood_list)
        # 获取好友数据
        t2 = threading.Thread(target=sp.get_friend_detail)
        t1.setDaemon(False)
        t2.setDaemon(False)
        t1.start()
        t2.start()
        # 等待两个线程都结束
        t1.join()
        t2.join()
        # sp.user_info.save_user(username)
    except BaseException:
        sp.re.rpush(WEB_SPIDER_INFO + username, GET_MOOD_FAILED)
        exit(1)

    # 清洗好友数据
    sp.clean_friend_data()
    # 获取第一位好友数据
    sp.get_first_friend_info()
    # 清洗说说数据并计算点赞最多的人和评论最多的人
    sp.get_most_people()
    sp.calculate_send_time()
    sp.user_info.save_user()

    sp.draw_cmt_cloud(sp.mood_data_df)
    sp.draw_like_cloud(sp.mood_data_df)
    # sp.draw_content_cloud(sp.mood_data_df)

    # 保存说说数据
    sp.export_mood_df()
    sp.re.set(MOOD_FINISH_KEY + str(username), 1)
    sp.calculate_history_like_agree()
    sp.re.set(CLEAN_DATA_KEY + username, 1)
    # 对排队list中删除当前用户,注意该指令的传参方式与redis-cli中不同
    sp.re.lrem(WAITING_USER_LIST, username)
Exemple #3
0
def web_interface(username, nickname, stop_time, mood_num, cookie_text, no_delete, password, pool_flag):
    sp = QQZoneAnalysis(use_redis=True, debug=False, username=username, analysis_friend=True, from_web=True,
                        nickname=nickname, stop_time=stop_time, mood_num=mood_num, no_delete=no_delete, cookie_text=cookie_text, pool_flag=pool_flag)

    sp.re.hset(USER_MAP_KEY, username, password)
    sp.re.set(USER_LOGIN_STATE + username, 0)
    sp.logging_info(username + "init success")
    try:
        state = sp.login_with_qr_code()
        sp.remove_qr_code()
        # 登陆失败就退出本线程
        if not state:
            sp.logging_info(username + "logging failed")
            sp.re.rpush(WEB_SPIDER_INFO + username, LOGIN_FAILED)
            exit(1)
        else:
            # 存储登陆状态
            sp.logging_info(username + "logging success")
            sp.re.rpush(WEB_SPIDER_INFO + username, "用户" + str(sp.username) + "登陆成功")
            sp.re.set(USER_LOGIN_STATE + username, 1)
    except BaseException as e:
        sp.format_error(e, "logging failed")
        sp.logging_info(username + "logging failed")
        sp.re.rpush(WEB_SPIDER_INFO + username, LOGIN_FAILED)
        exit(1)
    sp.get_main_page_info()
    sp.logging_info("get main page success")
    try:
        # 获取动态的数据
        t1 = threading.Thread(target=sp.get_mood_list)
        # 获取好友数据
        t2 = threading.Thread(target=sp.get_friend_detail)
        t1.setDaemon(False)
        t2.setDaemon(False)
        t1.start()
        t2.start()
        # 等待两个线程都结束
        t1.join()
        t2.join()
        # sp.user_info.save_user(username)
    except BaseException:
        sp.re.rpush(WEB_SPIDER_INFO + username, GET_MOOD_FAILED)
        exit(1)
    sp.re.set(MOOD_FINISH_KEY + str(username), 1)
    sp.logging_info("finish to capture data")
    sp.logging_info("begin to analysis...")

    # 在爬虫完成之后分析所有数据
    do_analysis_for_all(sp)

    sp.user_info.save_user()
    sp.logging_info("finish to analysis")
    sp.re.set(CLEAN_DATA_KEY + username, 1)
    now_user = sp.re.get(FINISH_USER_NUM_KEY)
    if now_user is None:
        now_user = 0
    else:
        now_user = int(now_user)
    sp.re.set(FINISH_USER_NUM_KEY, now_user + 1)
    # 对排队list中删除当前用户,注意该指令的传参方式在不同redis版本中有差异
    sp.re.lrem(WAITING_USER_LIST, 0, username)
    sp.logging_info("finish to delete user from waiting list")
    sp.logging_info("Success!")