def show_user(username): sql = "select * from " + username details = db.query_data(sql) # print(details) return render_template('show_user_info_v2.html', username=username, datas=details)
def crawl_district_list(): global city_code_list city_code_list = query_data(2) try: page_urls = generate_page_url() for k, page_url in enumerate(page_urls): print page_url response = request_util(page_url, 'gbk') soup = BeautifulSoup(response, "lxml") info_list = soup.find('table', class_="countytable").find_all( "tr", class_="countytr") for k, item in enumerate(info_list): if item.contents[0].find('a', {'href': True}): #street_url = common_url_prefix + url_code + item.contents[0].a.attrs['href'].encode('utf8') code = item.contents[0].a.get_text().encode('utf8') name = item.contents[1].a.get_text().encode('utf8') parent_code, parent_name = get_city_code(code) level = 3 print code, name, parent_code, parent_name insert_data(code, name, parent_code, parent_name, level) #crawl_street_detail(street_url) except Exception, e: print traceback.format_exc()
def user_login(): verify_text = request.form.get('verify_text') captcha_text = session['verify'] if verify_text.lower() != captcha_text.lower(): print(verify_text.lower()) print(captcha_text.lower()) flash(u'验证码错误') return login() # return redirect(url_for('/')) # print(request.form) name = request.form.get('name') password = request.form.get('password') sql = f""" select * from user where ( account='{name}' or email='{name}' ) and password='******' """ # print(sql) res = db.query_data(sql) # print(res) if not res: flash(u'账号或密码错误') return login() details = res[0] return redirect(url_for('user_info.show_user', username=details['account']))
def crawl_community_list(): global street_code_list street_code_list = query_data(4) try: page_urls = generate_page_url() for k,page_item in enumerate(page_urls): page_url = page_item['page_url'] print page_url if (page_url in special_url_conn): for item in special_url: response = request_util(item['page_url'],item['encoding']); else: response = request_util(page_url,'gbk'); soup = BeautifulSoup(response, "lxml") info_list = soup.find('table',class_="villagetable").find_all("tr",class_="villagetr") for k,item in enumerate(info_list): #street_url = street_url_prefix + item.contents[0].a.attrs['href'].encode('utf8') code = item.contents[0].get_text().encode('utf8') name = item.contents[2].get_text().encode('utf8') parent_code,parent_name = get_street_code(code) level = 5 print code, name, parent_code, parent_name insert_data(code, name, parent_code, parent_name, level) except Exception, e: print traceback.format_exc()
def crawl_street_list(): # page_url = "http://www.stats.gov.cn/tjsj/tjbz/tjyqhdmhcxhfdm/2016/52/03/520324.html" # response = request_util(page_url,'gb18030'); # print response # return global district_code_list district_code_list = query_data(3) try: page_urls = generate_page_url() for k,page_item in enumerate(page_urls): page_url = page_item['page_url'] print page_url if (page_url in special_url_conn): for item in special_url: response = request_util(item['page_url'],item['encoding']); else: response = request_util(page_url,'gbk'); soup = BeautifulSoup(response, "lxml") info_list = soup.find('table',class_="towntable").find_all("tr",class_="towntr") for k,item in enumerate(info_list): if item.contents[0].find('a',{'href':True}): #street_url = street_url_prefix + item.contents[0].a.attrs['href'].encode('utf8') code = item.contents[0].a.get_text().encode('utf8') name = item.contents[1].a.get_text().encode('utf8') parent_code,parent_name = get_district_code(code) level = 4 print code, name, parent_code, parent_name insert_data(code, name, parent_code, parent_name, level) except Exception, e: print traceback.format_exc()
def get_data2(): sql = """ select sepal_length,sepal_width from iris_data; """ datas = db.query_data(sql) datas = [(data['sepal_length'], data['sepal_width']) for data in datas] return datas
def find_new_entities(): entities = query_data("find_associations_to_populate") if not entities: print("No new entities to find!") return pages = dict() for e in entities: db_id = str(e.get('id')) wiki_id = str(e.get('wiki_id')) # name = e.get('name') pages[wiki_id] = db_id new_entities = dict() for chunk in divide_chunks([*pages], 4): pages_info = fetch_wikipedia_pages_info(chunk) for wiki_id in pages_info.keys(): #print(pages_info.get(pi)) new_entities[pages[str(wiki_id)]] = pages_info.get(wiki_id).get( 'associations') time.sleep(0.020) return new_entities
def get_data(): sql = """ select date,pv,uv from pvuv; """ datas = db.query_data(sql) xdatas = [data['date'].strftime('%Y-%m-%d') for data in datas] ydatas = [data['pv'] for data in datas] return xdatas, ydatas
def show_user(user_id): sql = f""" select * from user where id = {user_id}; """ datas = db.query_data(sql) user = datas[0] return render_template("show_user.html", user=user)
def get_bar() -> Bar: sql = """ select sex,count(1) as cnt from user group by sex; """ datas = db.query_data(sql) c = (Bar().add_xaxis([data["sex"] for data in datas]).add_yaxis( "数量", [data["cnt"] for data in datas]).set_global_opts( title_opts=opts.TitleOpts(title="Bar-基本示例", subtitle="我是副标题"))) return c
def weather(): if request.method == "GET": sql = ''' SELECT * FROM weather ''' res = db.query_data(sql) return jsonify(month=[x['month'] for x in res], evaporation=[x['evaporation'] for x in res], precipitation=[x['precipitation'] for x in res])
def get_pie() -> Pie: sql = """ select sex,count(1) as cnt from user group by sex; """ datas = db.query_data(sql) c = (Pie().add( "", [(data["sex"], data["cnt"]) for data in datas]).set_global_opts( title_opts=opts.TitleOpts(title="Pie-基本示例")).set_series_opts( label_opts=opts.LabelOpts(formatter="{b}: {c}"))) return c
def get_line() -> Line: sql = """ select date,pv,uv from pvuv; """ datas = db.query_data(sql) c = (Line().add_xaxis([data["date"] for data in datas]).add_yaxis( "pv", [data["pv"] for data in datas]).add_yaxis( "uv", [data["uv"] for data in datas]).set_global_opts( title_opts=opts.TitleOpts(title="Line-基本示例"))) return c
def get_bar() -> Bar: sql = """ select sex,count(1) as cnt from user group by sex """ datas = db.query_data(sql) c = (Bar().add_xaxis([data["sex"] for data in datas]).add_yaxis( "数量", [data["cnt"] for data in datas]).set_series_opts( label_opts=opts.LabelOpts(is_show=False)).set_global_opts( title_opts=opts.TitleOpts(title="Bar-基本示例"))) return c
def generate_excel(data_dir, fname): fpath = os.path.join(data_dir, fname) workbook = xlwt.Workbook(encoding="utf-8") worksheet = workbook.add_sheet("pvuv") for idx, name in enumerate(["日期", "pv", "uv"]): worksheet.write(0, idx, name) datas = db.query_data("select * from pvuv") for row, data in enumerate(datas): for col, kv in enumerate(data.items()): worksheet.write(row + 1, col, kv[1]) workbook.save(fpath)
def show_current_market(): global conn res_list = db.query_data(conn) res_list = spider.get_markets(res_list) table = PrettyTable(["代码", "名称", "成本价", "现价", "涨跌幅", "持有份额"]) for stock in res_list: table.add_row([stock["code"], stock["name"], stock["cost_price"], format.format_up_down_content(stock["up_down"], stock["current_price"]), format.format_up_down_content(stock["up_down"], "{:.2%}".format(stock["up_down_rate"])), stock["shares_held"]]) print(table)
def do_register_user(): verify_text = request.form.get('verify_text') captcha_text = session['verify'] if verify_text.lower() != captcha_text.lower(): flash(u'验证码错误') return register() # print(request.form) account = request.form.get('account') password = request.form.get('password') email = request.form.get('email') # check the account is available sql = f""" select * from user where account='{account}' """ res = db.query_data(sql) if res: flash(u'账号已存在') return register() # check the email is available sql = f""" select * from user where email='{email}' """ res = db.query_data(sql) if res: flash(u'邮箱已存在') return register() # else insert in to database sql = f''' insert into user (account, password, email) value ('{account}', '{password}', '{email}') ''' # print(sql) db.insert_or_updata_data(sql) db.new_database(account) return render_template('login_v2.html')
def air_map(): if request.method == "GET": sql = ''' SELECT * FROM geoCoordMap ''' res = db.query_data(sql) citys = [] # 这里需要先设置一个空数组,将sql返回的数据进行逐个存到这个数组里面,在将结果转化为json格式 for r in res: city_map = {} city_map['name'] = r['name'] city_map['map'] = r['map'] city_map['value'] = r['value'] citys.append(city_map) return jsonify(citys)
def crawl_city_list(): global province_code_list province_code_list = query_data(1) response = request_util(city_url, 'gb2312') try: soup = BeautifulSoup(response, "lxml") info_list = soup.find('table', class_="provincetable").find_all( "tr", class_="provincetr") for k, item in enumerate(info_list): content_list = item.find_all("a") for c_k, c_item in enumerate(content_list): d_url = c_item.attrs['href'].encode('utf8') url_city_code = c_item.attrs['href'].encode('utf8').split( ".")[0] d_city_url = common_url_prefix + url_city_code + ".html" print d_city_url crawl_city_detail(d_city_url) # if(c_k > 1): # break except Exception, e: print traceback.format_exc()
import db query_sql = "select * from user" data = db.query_data(query_sql) for i in data: print(i) insert_sql = """ insert user (name,sex,age,email) values ('mayi','man',20,'*****@*****.**') """ db.insert_or_update_data(insert_sql) query_sql = "select * from user" data = db.query_data(query_sql) for i in data: print(i) update_sql = "update user set name='damayi' where id=3" db.insert_or_update_data(update_sql) query_sql = "select * from user" data = db.query_data(query_sql) for i in data: print(i)
def show_users(): sql = """ select id,name from user; """ datas = db.query_data(sql) return render_template("show_users.html", datas=datas)