def get_phone_table(): mysql = SQL.save_mysql() phone_id_list = list(mysql.select_all_table('jd')) for table in phone_id_list: if table[0] in ignoring_table: phone_id_list.remove(table) phone_id_list.pop() return phone_id_list
import time import requests from bs4 import BeautifulSoup from spider.get_comment import SQL from spider.get_comment import getprice requests.packages.urllib3.disable_warnings() headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)' ' AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36' } for i in range(128383, 140432, 2): sss = SQL.save_mysql() link = sss.read_link(i) try: print(i) for row in link: url1 = row[0] print(url1) phone_id = url1.split('/')[-1].strip(".html") # phone_id = re.findall(r"\d+", url1) print(phone_id) url = "http:" + url1 print(url) price = getprice.jd_price(url) if price == KeyError: price = 0 try:
# coding:utf8 import json import requests from spider.get_comment import SQL from spider.get_comment.getprice import get_html requests.packages.urllib3.disable_warnings() for i in range(1, 21300, 2): sql = SQL.save_mysql() try: res = sql.read_phone_price(i) # print(res) # print("序号:" + str(i)) if int(res[0][1]) == int(0): print("序号:" + str(i)) try: price_url = "https://p.3.cn/prices/mgets?skuIds=J_" + str( res[0][0]) try: content = get_html(price_url) except: content = get_html(price_url) result = json.loads(content) record = result[0] price = int(float(record["p"])) print(price) sql.update_phone_price(i, price) except KeyError: print('KeyError')
def get_comments(phones_id, i): mysql = SQL.save_mysql() # 调用数据库操作 comment = mysql.read_comment_phone(phones_id, i) for ob in comment: return str((ob[0]))