def get_trans(price_map, tickers, date, holding_period): trans = [] for ticker in tickers: bdate, bprice = utils.get_price(price_map, ticker, date) if bdate is None or bprice is None: continue tdate = (datetime.datetime.strptime(bdate, '%Y-%m-%d') + holding_period).strftime('%Y-%m-%d') sdate, sprice = utils.get_price(price_map, ticker, tdate) if sdate is None or sprice is None: if utils.is_ticker_dead(price_map, ticker): utils.printd('!! %s dead' % ticker) sdate, sprice = tdate, 0 else: continue trans.append([ticker, bdate, bprice, sdate, sprice]) return trans
def get_city_house(user_in_nub,city): #创建时间 create_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))) province_dict = get_province_dict() city_code_dict = get_citycode_dict() city_name_dict = city_dict() # 打开数据库连接 db = pymysql.connect(host='172.20.206.28', port=3306, user='******', password='******', db='autodata-roomprice', charset='utf8') cursor = db.cursor() # 生成城市url result_sum = [] for i in generate_allurl(user_in_nub,city): print(i) # 楼盘,楼盘链接 contents, urls = (get_allurl(i)) # 对每个楼盘爬取数据 for content in contents: results = [] re_get = content[0] #数据来源 source = '安居客' # 楼盘名 name = content[1] detail, longitude, latitude = open_url(re_get) city_name = list(city_name_dict.get(city))[0] province = list(province_dict.get(city_name,''))[0] district = content[2] city_code = city_code_dict.get(city_name,'') province_code = city_code_dict.get(province,'') district_code = get_district_code(city_name,district,city_code_dict) for ele in detail: try: house_type = ele[0] area = str(ele[1]).replace('m', '') total_price = int(ele[2])*10000 count = ele[3] result = [name, longitude, latitude, province, city_name, district, house_type, area, total_price, source, create_time, count] results.append(result) except: pass print(results) # update_house_price_db(db,cursor,results,table = '''house_price_yyh''') try: average, metre_average, area_average = get_price(results) result_sum.append([name, longitude, latitude, province, city_name, district, province_code, city_code, district_code,average, metre_average, area_average, create_time,source]) except: pass # update_community_db(db, cursor, result_sum, table='''community_yyh_tmp''') print(result_sum) # 关闭数据库连接 db.close()
def post_payment(): """ processes form input for url registration """ content = request.form exists = db.session.query( User.id).filter_by(url=content["url"]).scalar() is not None if exists: row = User.query.filter(User.url == content["url"]).first() result = row2dict(row) return json.dumps({ 'status': 'Fail', 'reason': 'This URL has already been registered', 'data': result['address'] }), 500 else: pay_id = get_payment_id() try: user = User(address=content["address"], payment_id=pay_id, url=content["url"], message=content["message"], turtlehash=content["hash"], price=get_price(content["url"])) db.session.add(user) db.session.commit() except Exception as e: app.logger.info(traceback.format_exc()) return json.dumps({ 'status': 'Fail', 'reason': 'Your transaction could not be processed' }), 500 return json.dumps({ 'status': 'Success', 'payment_id': pay_id, 'price': get_price(content["url"]) }), 200
def add_dp(): book = ut.get_book() asks = book[0] bids = book[1] total_ask = 0 for ask in asks: total_ask += ask[1] total_bid = 0 for bid in bids: total_bid += bid[1] #print "asks:" + str(total_ask) #print "Bids: "+ str(total_bid) row = [str(datetime.now())] row.append(ut.get_price()['last']) row.append(total_ask) row.append(total_bid) fd = open(OUT_FILE, 'a') fd.write(row) fd.close() return row
def coin_market_price(self, currency, disable_cache=False): """ https://coinmarketcap.com/currencies/<currency>/#markets :param currency: 虚拟货币的名称 eg:bitcoin :param disable_cache: 禁用缓存默认为 :return: """ endpoint = u'currencies/{}/#markets'.format(currency) response = self.client.raw_request(self.__CMC_BASE_URL, endpoint, None, disable_cache) soup = bs(response, u'html.parser') table_body = soup.find(u'table', { u'id': u'markets-table' }).find(u'tbody') rows = table_body.find_all(u'tr') items = [] for row in rows: tds = row.find_all(u'td') item = { u'exchange': tds[1][u'data-sort'], u'pair': tds[2][u'data-sort'], u'volume': utils.get_volume(tds[3]), u'price': utils.get_price(tds[4]), u'percentage': tds[5][u'data-sort'] } items.append(item) resp = { u'data': items, u"metadata": { u"num_prices": len(items), u"error": None } } return resp
next(lines) # 0 사번 # 1 일자 # 2 고장사유및내역 # 3 처리결과 # 4 비용 # 5 담당 # 6 관련근거 # 7 count for line in lines: equipment_pk = line[0] date = get_date(line[1]) reason = line[2] result = line[3] cost = get_price(line[4]) manager = get_user_by_name(line[5]) reference = line[6] count = int(line[7]) try: eq = Equipment.objects.get(pk=equipment_pk) except: #print("장비ID({})가 존재하지 않습니다. 해당 내용은 migration 되지 않습니다.".format(equipment_pk)) continue eq_repair_history = EquipmentRepairHistory(equipment=eq, date=date, reason=reason, result=result, cost=cost,
# 11 담당 # 12 관련근거 # 13 count for line in lines: equipment_pk = line[0] cpu = line[1] mem = line[2] hdd = line[3] nic = line[4] grahpic = line[5] etc = line[6] text = line[7] change_date = get_date(line[8]) change_reason = line[9] cost = get_price(line[10]) change_user = get_user_by_name(line[11]) reference = line[12] count = int(line[13]) try: eq = Equipment.objects.get(pk=equipment_pk) except: continue eq_spec = EquipmentSpec(equipment=eq, cpu=cpu, mem=mem, hdd=hdd, nic=nic, graphic=grahpic,
client = pymongo.MongoClient(host='localhost', port=27017) collection = client['jw3_price']['jw3_price'] pattern_buy = r'(\d+).*?收' pattern_sale = r'(\d+).*?出' pattern = '<.*?>' sale_price_list = [] buy_price_list = [] reply_time_list = [] for post in collection.find(): if '出' in post['reply_content']: try: text = sub_content(pattern, post['reply_content']) price = get_price(pattern_sale, text) sale_price_list.append({ 'price': price, 'time': post['reply_time'].split(' ')[0] }) except Exception as e: pass if '收' in post['reply_content']: try: text = sub_content(pattern, post['reply_content']) price = get_price(pattern_buy, text) buy_price_list.append({ 'price': price, 'time': post['reply_time'].split(' ')[0] }) except Exception as e: