try: driver.get(target_url) html = driver.page_source soup = soup = BeautifulSoup(html, 'lxml') tbody = soup.find_all('tbody') tbody = tbody[1] tr_list = tbody.find_all('tr') row_data = [] for tr in tr_list: tmp_row = [] td_list = tr.find_all("td") a = td_list[0].a.attrs tmp_row.append(a['href'].split('#')[1]) tmp_row.append(td_list[7].a.attrs['href'].split('/')[-2]) for td in td_list: tmp_row.append(td.get_text()) tmp_row[2] = tmp_row[2].encode('utf-8') tmp_row[9] = tmp_row[9].encode('utf-8') tmp_row[9] = tmp_row[9].split(" ")[0] tmp_row.append('test') tmp_row.append(datetime.datetime.now()) row_data.append(tmp_row) print row_data db = DB() print db.excutemany(SQLMgr.insert_many(), row_data) db.close() except Exception as e: print e finally: driver.quit()
@staticmethod def insert_many(): sql = ''' INSERT INTO stock_plate(plate_code,first_stock_code, plate_name,stock_cnt,price_avg,wave_cnt,wave_range,vol,amount, first_stock_name, first_stock_price, first_stock_wave_cnt, first_stock_wave_range,remark,TIME) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ''' return sql @staticmethod def get_all_kline_info(now): sql = ''' select * from stock_kline where tr_date='%s' and date_type=0 and ex_type=1 ''' % now return sql @staticmethod def insert_stock_xg_many(): sql = ''' insert into stock_xg(code, tr_date, date_type, ex_type, xg, remark, time) values(%s, %s, %s, %s, %s, %s, %s) ''' return sql if __name__ == "__main__": db = DB() print db.excutemany(SQLMgr.insert_stock_xg_many(),[('11',1,1,1,'123','111','2016-11-11'),]) db.close()