def insert_data(): # 申请资源 mysql = Mysql() roll_max = 0.0999989 roll_min = 0.050000374106515644 roll_mean = 0.07501550874813788 roll_count = 100 pitch_max = 0.09999891074324113 pitch_min = 0.050000374106515644 pitch_mean = 0.07501550874813788 pitch_count = 1000 deviation_max = 0.09999891074324113 deviation_min = 0.050000374106515644 deviation_mean = 0.07501550874813788 deviation_count = 100 total_score = 87.76071883296575 insert_sql = "INSERT INTO posture_result(update_time,delta_roll_max,delta_roll_min,delta_roll_mean,delta_roll_count,delta_pitch_max,delta_pitch_min,delta_pitch_mean,delta_pitch_count,delta_deviation_max,delta_deviation_min,delta_deviation_mean,delat_deviation_count,score) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" update_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f") mysql.insertMany( insert_sql, [(update_time, roll_max, roll_min, roll_mean, roll_count, pitch_max, pitch_min, pitch_mean, pitch_count, deviation_max, deviation_min, deviation_mean, deviation_count, total_score)]) # 释放资源 mysql.dispose()
def request1(appkey, movieName, m="GET"): values = list() url = "http://op.juhe.cn/onebox/movie/video" params = { "key": appkey, # 应用APPKEY(应用详细页查询) "dtype": "json", # 返回数据的格式,xml或json,默认json "q": movieName, # 影视搜索名称 } params = urlencode(params) if m == "GET": f = urllib.urlopen("%s?%s" % (url, params)) else: f = urllib.urlopen(url, params) content = f.read() res = json.loads(content) if res: error_code = res["error_code"] if error_code == 0: # 成功请求 mysql = Mysql() url_list = getJsonContent((res["result"])) # print((res["result"])['cover']) for i in range(len(url_list)): global pic_id values.append([pic_id, url_list[i]]) pic_id += 1 mysql.insertMany( 'INSERT IGNORE INTO picture(picture_id,picture_url) values(%s,%s)', values) mysql.dispose() else: print("%s:%s" % (res["error_code"], res["reason"])) else: print("request api error")
# coding:utf-8 import requests import re from MySqlConn import Mysql # 申请资源 mysql = Mysql() for i in range(2): page = i * 50 page_url = "http://tieba.baidu.com/f?kw=%E5%90%8C%E6%B5%8E%E5%A4%A7%E5%AD%A6&ie=utf-8&pn=" + str( page) r = requests.get(page_url) ret = re.findall(r'(<a href="/p/\d+)', r.text) print(len(ret)) values = list() topic = '同济大学' for j in ret: baidu_id = 'http://tieba.baidu.com' baidu_id += j[9:] values.append([baidu_id, topic]) mysql.insertMany( 'INSERT IGNORE INTO baidu_info(baidu_id,topic) values(%s,%s)', values) # 释放资源 mysql.dispose()