Esempio n. 1
0
def test_parse_follower_data():
    test_url = Setting.url.format('mu-yi-81-66', 20, 20)
    url_dealer = URLDealer(test_url, Setting.headers, Setting.timeout)
    data = url_dealer.get_response().text
    dm = DataParser(data, Queue(100))
    dm.parse_follower_data()
    queue = dm.followers_queue
    while not queue.empty():
        print(queue.get())
Esempio n. 2
0
 def spider(self):
     try:
         url = self.url_queue.get()
         url_dealer = URLDealer(url, Setting.headers, Setting.timeout)
         data = url_dealer.get_response().text
         data_parser = DataParser(data)
         followers_list = data_parser.parse_follower_data()
         saver = Saver(followers_list, self.username)
         saver.save_followers_info()
     except Exception as e:
         print(str(e))
Esempio n. 3
0
def main():
    while True:
        # 基础页面的解析
        base_info_url_dealer = URLDealer(Setting.base_url, Setting.timeout)
        base_info_soup = base_info_url_dealer.get_soup()
        plague_info = PlagueInfo(base_info_soup)

        mysql_saver = MySQLSaver(plague_info)
        mysql_saver.save_info_once()

        time.sleep(Setting.loop_time)
Esempio n. 4
0
def get_info_once(count, time_now):
    # 基础页面的解析
    base_info_url_dealer = URLDealer(Setting.base_url, Setting.timeout)
    base_info_soup = base_info_url_dealer.get_soup()
    plague_info = PlagueInfo(base_info_soup)

    # 获取全国信息,分省信息,图片url
    detailed_info = plague_info.detailed_info()
    total_info = plague_info.total_info(detailed_info)
    # img_url = plague_info.img_url()

    # 全国信息,分省信息的存储
    mySQL = MySQLSaver(Setting.db)
    save_total_info(mySQL, total_info)
    save_detailed_info(mySQL, detailed_info, count)
    mySQL.close()
Esempio n. 5
0
 def follower_count(self):
     """
     得到总的粉丝数
     :return:
     """
     url = Setting.url.format(self.name, 20, 20)
     data_str = URLDealer(url, Setting.headers, Setting.timeout).get_response().text
     data = json.loads(data_str)
     return data['paging']['totals']
Esempio n. 6
0
def test_total_info():
    soup = URLDealer(Setting.base_url, Setting.timeout).get_soup()
    plague_info = PlagueInfo(soup)
    detailed_info = plague_info.detailed_info()
    return plague_info.total_info(detailed_info)
Esempio n. 7
0
def test_china_info():
    text = URLDealer(Setting.base_url, Setting.timeout).get_response_text()
    plague_info = PlagueInfo(text)
    return plague_info.china_info()
Esempio n. 8
0
def test_get_response_str():
    response = URLDealer(Setting.base_url, Setting.timeout).get_response().text
    print(str(response))
Esempio n. 9
0
def test_get_response():

    test_url = Setting.url.format('mu-yi-81-66', 100, 20)
    url_dealer = URLDealer(test_url, Setting.headers, Setting.timeout)
    response = url_dealer.get_response()
    return type(response.text)
Esempio n. 10
0
 def spider(self):
     url = self.url_queue.get()
     url_dealer = URLDealer(url, Setting.headers, Setting.timeout)
     data = url_dealer.get_response().text
     data_parser = DataParser(data, self.follower_queue)
     data_parser.parse_follower_data()
Esempio n. 11
0
def test_world_info():
    soup = URLDealer(Setting.base_url, Setting.timeout).get_soup()
    plague_info = PlagueInfo(soup)
    return plague_info.world_info()