def load_symbols(): exchange = request.args.get('exchange', '') query = request.args.get('query', '') exclude = request.args.get('exclude', '') cond = { '$or': [{ 'name': { '$regex': query } }, { 'symbol': { '$regex': query } }] } if exchange: cond['exchange'] = exchange result = [ { 'text': '{}({})'.format(c.symbol, c.name), 'value': c.symbol } # '{}-{}'.format(c.symbol, c.name)} for c in Collection.query(cond, { 'name': 1, 'symbol': 1 }) ] if exclude: result = [r for r in result if exclude not in r['text']] return jsonify(result=result)
def sync_collections(): from ybk.config import setup_config from ybk.models import Collection as C1 from ybk.models import Quote as Q1 setup_config() for c in C1.query(): print(c.exchange, c.symbol) td = Q1.count({'exchange': c.exchange, 'symbol': c.symbol, 'type_': '1d'}) + 1 if td == 1: if not c.offers_at: # 没录入过, 基本上会挂 continue # 如果K线不存在, 可能是交易行情无法获取, 直接用估算数字 td = (datetime.utcnow() - c.offers_at).days - 1 c2 = Collection({ 'exchange': c.exchange, 'symbol': c.symbol, 'name': c.name, 'trade_day': td, }) c2.upsert()
def parse(): nav = 'parse' url = request.args.get('url') num_parsed = Announcement.count({ 'parsed': True, 'type_': { '$in': ['offer', 'result'] } }) num_total = Announcement.count({'type_': {'$in': ['offer', 'result']}}) if url: announcement = Announcement.query_one({'url': url}) colls = list(Collection.query({'from_url': url})) for coll in colls: if coll.offers_at: coll.offers_at2 = coll.offers_at.strftime('%Y%m%d') if coll.offer_cash_ratio: coll.offer_cash_ratio = '{:2.0f}%'.format( coll.offer_cash_ratio * 100) if coll.offer_price: coll.offer_price = str(coll.offer_price) if coll.offer_price.endswith('.0'): coll.offer_price = coll.offer_price[:-2] all_done = num_parsed == num_total return render_template('admin/parse.html', **locals())
def history_sysframe(exchange, url, force): for c in Collection.query({'exchange': exchange}): try: if not force and history_exists(c): continue # 拿到数据文件 theurl = ('{}/hqApplet/data/day/00{}.day.zip').format( url, c.symbol) log.info('fetching exchange {} url {}'.format(exchange, theurl)) r = session.get( theurl, timeout=(5, 10), verify=False) if r.status_code != 200: log.warning('{}_{}下载失败, 错误码: {}' ''.format(exchange, c.symbol, r.status_code)) continue content = gzip.decompress(r.content) # 解析之 num_rows = struct.unpack('>i', content[0:4])[0] kline_days = [] for i in range(num_rows): raw_row = content[4 + 40 * i: 4 + 40 * i + 40] row = struct.unpack('>i5f2ifi', raw_row) t = row[0] date = datetime(year=int(str(t)[0:2]) + 1997, month=int(str(t)[2:4]), day=int(str(t)[4:6]), minute=int(str(t)[6:8] or 0), second=int(str(t)[8:10] or 0)) # row[6]总是0, 不知道是啥 q = { 'exchange': exchange, 'symbol': c.symbol, 'quote_type': '1d', 'quote_at': date, 'open_': row[1], 'high': row[2], 'low': row[3], 'close': row[4], 'mean': row[5], 'volume': row[7], 'amount': row[8], 'quantity': row[9], } if kline_days: q['lclose'] = kline_days[-1]['close'] save_quotes(q, c, first_quote=False) else: save_quotes(q, c, first_quote=True) kline_days.append(q) except: log.exception('{}:{} 抓取失败'.format(c.exchange, c.symbol))
def history_sysframe(exchange, url, force): for c in Collection.query({'exchange': exchange}): try: if not force and history_exists(c): continue # 拿到数据文件 theurl = ('{}/hqApplet/data/day/00{}.day.zip').format( url, c.symbol) log.info('fetching exchange {} url {}'.format(exchange, theurl)) r = session.get(theurl, timeout=(5, 10), verify=False) if r.status_code != 200: log.warning('{}_{}下载失败, 错误码: {}' ''.format(exchange, c.symbol, r.status_code)) continue content = gzip.decompress(r.content) # 解析之 num_rows = struct.unpack('>i', content[0:4])[0] kline_days = [] for i in range(num_rows): raw_row = content[4 + 40 * i:4 + 40 * i + 40] row = struct.unpack('>i5f2ifi', raw_row) t = row[0] date = datetime(year=int(str(t)[0:2]) + 1997, month=int(str(t)[2:4]), day=int(str(t)[4:6]), minute=int(str(t)[6:8] or 0), second=int(str(t)[8:10] or 0)) # row[6]总是0, 不知道是啥 q = { 'exchange': exchange, 'symbol': c.symbol, 'quote_type': '1d', 'quote_at': date, 'open_': row[1], 'high': row[2], 'low': row[3], 'close': row[4], 'mean': row[5], 'volume': row[7], 'amount': row[8], 'quantity': row[9], } if kline_days: q['lclose'] = kline_days[-1]['close'] save_quotes(q, c, first_quote=False) else: save_quotes(q, c, first_quote=True) kline_days.append(q) except: log.exception('{}:{} 抓取失败'.format(c.exchange, c.symbol))
def analysis(): nav = 'analysis' exchange = request.args.get('exchange') exs = sorted(list(Exchange.query()), key=lambda x: x.abbr) exchanges = [e.abbr for e in exs] ratings = [ex.rating for ex in exs] if not exchange: exchange = exchanges[0] ex = None for ex in exs: if ex.abbr == exchange: break # invest history ih_dates = [] ih_values_self = [] ih_values_all = [] for h in ex.invest_cash_history: ih_dates.append(h['date'].strftime('%Y-%m-%d')) ih_values_self.append(h['invest_cash'] / 1e8) ih_values_all.append(h['total_cash'] / 1e8) # increase history inc_days = [] inc_series = [] symbols = [] for symbol, values in ex.increase_history.items(): if len(values) > len(inc_days): inc_days = list(range(1, len(values))) inc_series.append({ 'name': symbol, 'type': 'line', 'data': [v * 100 for v in values], }) symbols.append(symbol) # predict conf = get_conf(ex.abbr) today = datetime.utcnow() + timedelta(hours=8) today = today.replace(hour=0, minute=0, second=0, microsecond=0) before = today - timedelta(days=conf['cashout']) cashout_at = today + timedelta(days=conf['cashout']) colls = list( Collection.query({ 'exchange': ex.abbr, 'offers_at': { '$gte': before } })) locals()['zip'] = zip return render_template('frontend/analysis.html', **locals())
def load_symbols(): exchange = request.args.get('exchange', '') query = request.args.get('query', '') exclude = request.args.get('exclude', '') cond = {'$or': [{'name': {'$regex': query}}, {'symbol': {'$regex': query}}]} if exchange: cond['exchange'] = exchange result = [ {'text': '{}({})'.format(c.symbol, c.name), 'value': c.symbol} # '{}-{}'.format(c.symbol, c.name)} for c in Collection.query( cond, {'name': 1, 'symbol': 1})] if exclude: result = [r for r in result if exclude not in r['text']] return jsonify(result=result)
def analysis(): nav = 'analysis' exchange = request.args.get('exchange') exs = sorted(list(Exchange.query()), key=lambda x: x.abbr) exchanges = [e.abbr for e in exs] ratings = [ex.rating for ex in exs] if not exchange: exchange = exchanges[0] ex = None for ex in exs: if ex.abbr == exchange: break # invest history ih_dates = [] ih_values_self = [] ih_values_all = [] for h in ex.invest_cash_history: ih_dates.append(h['date'].strftime('%Y-%m-%d')) ih_values_self.append(h['invest_cash'] / 1e8) ih_values_all.append(h['total_cash'] / 1e8) # increase history inc_days = [] inc_series = [] symbols = [] for symbol, values in ex.increase_history.items(): if len(values) > len(inc_days): inc_days = list(range(1, len(values))) inc_series.append({ 'name': symbol, 'type': 'line', 'data': [v * 100 for v in values], }) symbols.append(symbol) # predict conf = get_conf(ex.abbr) today = datetime.utcnow() + timedelta(hours=8) today = today.replace(hour=0, minute=0, second=0, microsecond=0) before = today - timedelta(days=conf['cashout']) cashout_at = today + timedelta(days=conf['cashout']) colls = list(Collection.query({'exchange': ex.abbr, 'offers_at': {'$gte': before}})) locals()['zip'] = zip return render_template('frontend/analysis.html', **locals())
def parse(): nav = "parse" url = request.args.get("url") num_parsed = Announcement.count({"parsed": True, "type_": {"$in": ["offer", "result"]}}) num_total = Announcement.count({"type_": {"$in": ["offer", "result"]}}) if url: announcement = Announcement.query_one({"url": url}) colls = list(Collection.query({"from_url": url})) for coll in colls: if coll.offers_at: coll.offers_at2 = coll.offers_at.strftime("%Y%m%d") if coll.offer_cash_ratio: coll.offer_cash_ratio = "{:2.0f}%".format(coll.offer_cash_ratio * 100) if coll.offer_price: coll.offer_price = str(coll.offer_price) if coll.offer_price.endswith(".0"): coll.offer_price = coll.offer_price[:-2] all_done = num_parsed == num_total return render_template("admin/parse.html", **locals())
def collection(): nav = 'collection' search = request.args.get('search', '') exchange = request.args.get('exchange', '') page = int(request.args.get('page', 1) or 1) limit = 25 skip = limit * (page - 1) cond = {} if exchange: cond['exchange'] = exchange total = Collection.count(cond) pagination = Pagination(page, limit, total) collections = list( Collection.query(cond, sort=[('offers_at', -1)], skip=skip, limit=limit)) for c in collections: lp = Quote.latest_price(c.exchange, c.symbol) if lp and c.offer_price: c.total_increase = lp / c.offer_price - 1 return render_template('frontend/collection.html', **locals())
def history_winner(exchange, url, force): assert url.startswith('tcp://') host, port = url[6:].split(':') port = int(port) s = socket.socket() s.connect((host, port)) def get_day_data(symbol): bsymbol = symbol.encode('utf-8') bizdata = b''.join([ b'\xfe\x8f\x00\x00', b'\x00' * 12, b'\x01\x00', b',\x00', b'\x02\x04', b'\x00', b'\x00', b'\x00\x00\x00\x00', b'\x013' + bsymbol, b'\x03\x00', b'\x00\x00', b'\x00\x00\x00\x00\x00\x00\x00\x00', b'Z\x00\x10\x00\x013' + bsymbol, b'\x00\x00\x00\x00', b'\x00', b'\x00', ]) bdata = b''.join([ b'\x95\x00\x00\x95', b'11=524\x00', b'13=8\x00', b'5=36862\x00', b'4=109\x00', b'1=66\x008=', bizdata, b'\x00', b'1=40\x0069=markid=60ba7308cab942ee961536a74ec7c5f9\x00\x00', ]) s.sendall(bdata) batch = 8192 result = [] while True: result.append(s.recv(batch)) if len(result[-1]) != batch: break return b''.join(result) def parse_day_data(data): size = struct.unpack('>i', b'\x00' + data[1:4])[0] assert size + 4 == len(data) m = re.compile(b'1=(\d+)\x008=').search(data) if m: bizsize = int(m.group(1)) start = m.span()[1] try: bbiz = zlib.decompress(data[start:start + bizsize]) except: bbiz = data[start:start + bizsize] assert bbiz[:4] == b'\xfe\x8f\x00\x00' num_packs = struct.unpack('<H', bbiz[16:18])[0] sizes = [ struct.unpack('<H', bbiz[18 + 2 * i:20 + 2 * i])[0] for i in range(num_packs) ] for i in range(num_packs): start = 18 + 2 * num_packs + 0 if i == 0 else sizes[i - 1] if bbiz[start:start + 2] == b'\x02\x04': # K线数据 symbol = bbiz[start + 10:start + 16].decode('utf-8') count = struct.unpack('<I', bbiz[start + 16:start + 20])[0] kline_days = [] for i in range(count): begin = start + 20 + i * 32 row = struct.unpack('<Iiiiiiii', bbiz[begin:begin + 32]) q = { 'exchange': exchange, 'symbol': symbol, 'quote_type': '1d', 'quote_at': datetime.strptime(str(row[0]), "%Y%m%d"), 'open_': row[1] / 100, 'high': row[2] / 100, 'low': row[3] / 100, 'close': row[4] / 100, 'amount': row[5] / 1., 'volume': row[6], } if kline_days: q['lclose'] = kline_days[-1]['close'] save_quotes(q, c, first_quote=False) else: save_quotes(q, c, first_quote=True) kline_days.append(q) for c in Collection.query({'exchange': exchange}): try: if not force and history_exists(c): continue if '$' not in c.symbol: log.info('feching {}_{} on {}'.format(c.exchange, c.symbol, url)) data = get_day_data(c.symbol) parse_day_data(data) except: log.exception('{}_{}获取失败'.format(exchange, c.symbol))
def calendar(): nav = 'calendar' starts_at = request.args.get('starts_at') ends_at = request.args.get('ends_at') if starts_at: starts_at = datetime.strptime(starts_at, '%Y%m%d') today = datetime.utcnow() + timedelta(hours=8) today = today.replace(hour=0, minute=0, second=0, microsecond=0) if not starts_at: starts_at = today - timedelta(days=3) ends_at = starts_at + timedelta(days=10) # 表头 heads = [] d = starts_at while d <= ends_at: heads.append( ('周' + '一二三四五六日'[d.weekday()], '{}/{}'.format(d.month, d.day))) d += timedelta(days=1) # 表身 exs = [] # 交易所所在行 rowdict = defaultdict(list) # 交易所 -> 每天有/没有 seen = set() ddict = {} for c in Collection.query( {'offers_at': { '$gte': starts_at, '$lte': ends_at }}, sort=[('offers_at', 1)]): if (c.exchange, c.offers_at) in seen: continue seen.add((c.exchange, c.offers_at)) if c.exchange not in exs: exs.append(c.exchange) d = ddict.get(c.exchange, starts_at) while d < c.cashout_at: if d >= c.offers_at and d < c.cashout_at: cs = list( Collection.query({ 'offers_at': c.offers_at, 'exchange': c.exchange })) ndays = (c.cashout_at - c.offers_at).days if c.offers_at + timedelta(days=ndays) > ends_at: ndays = (ends_at - c.offers_at).days + 1 rowdict[c.exchange].append({ 'colspan': ndays, 'exchange': c.exchange, 'count': len(cs), 'cs': cs, 'symbols': ','.join([c.symbol for c in cs]) }) ddict[c.exchange] = c.cashout_at break else: rowdict[c.exchange].append({'colspan': 1}) d += timedelta(days=1) banks = {} details = {} for ex in ddict: d = ddict[ex] while d <= ends_at: spans = sum(x['colspan'] for x in rowdict[ex]) if spans < 11: rowdict[ex].append({'colspan': 1}) d += timedelta(days=1) c = get_conf(ex) banks[ex] = c['opening']['bank'] details[ex] = {} for cell in rowdict[ex]: if 'cs' in cell: for c in cell['cs']: details[ex][c.symbol] = { 'name': c.name, 'price': c.offer_price, 'offer_cash': c.offer_cash or 0, 'expected_ratio': c.expected_result_cash_ratio or 0, 'expected_revenue': c.expected_annual_profit or 0, } if not exs: exs = ['无申购'] prev_starts_at = (starts_at - timedelta(days=10)).strftime('%Y%m%d') next_starts_at = (starts_at + timedelta(days=10)).strftime('%Y%m%d') thisdate = (datetime.utcnow() + timedelta(hours=8)) thisdate = '{}/{}'.format(thisdate.month, thisdate.day) return render_template('frontend/calendar.html', **locals())
def history_winner(exchange, url, force): assert url.startswith('tcp://') host, port = url[6:].split(':') port = int(port) s = socket.socket() s.connect((host, port)) def get_day_data(symbol): bsymbol = symbol.encode('utf-8') bizdata = b''.join([ b'\xfe\x8f\x00\x00', b'\x00' * 12, b'\x01\x00', b',\x00', b'\x02\x04', b'\x00', b'\x00', b'\x00\x00\x00\x00', b'\x013' + bsymbol, b'\x03\x00', b'\x00\x00', b'\x00\x00\x00\x00\x00\x00\x00\x00', b'Z\x00\x10\x00\x013' + bsymbol, b'\x00\x00\x00\x00', b'\x00', b'\x00', ]) bdata = b''.join([ b'\x95\x00\x00\x95', b'11=524\x00', b'13=8\x00', b'5=36862\x00', b'4=109\x00', b'1=66\x008=', bizdata, b'\x00', b'1=40\x0069=markid=60ba7308cab942ee961536a74ec7c5f9\x00\x00', ]) s.sendall(bdata) batch = 8192 result = [] while True: result.append(s.recv(batch)) if len(result[-1]) != batch: break return b''.join(result) def parse_day_data(data): size = struct.unpack('>i', b'\x00' + data[1:4])[0] assert size + 4 == len(data) m = re.compile(b'1=(\d+)\x008=').search(data) if m: bizsize = int(m.group(1)) start = m.span()[1] try: bbiz = zlib.decompress(data[start: start + bizsize]) except: bbiz = data[start: start + bizsize] assert bbiz[:4] == b'\xfe\x8f\x00\x00' num_packs = struct.unpack('<H', bbiz[16:18])[0] sizes = [struct.unpack('<H', bbiz[18 + 2 * i: 20 + 2 * i])[0] for i in range(num_packs)] for i in range(num_packs): start = 18 + 2 * num_packs + 0 if i == 0 else sizes[i - 1] if bbiz[start:start + 2] == b'\x02\x04': # K线数据 symbol = bbiz[start + 10: start + 16].decode('utf-8') count = struct.unpack( '<I', bbiz[start + 16: start + 20])[0] kline_days = [] for i in range(count): begin = start + 20 + i * 32 row = struct.unpack('<Iiiiiiii', bbiz[begin: begin + 32]) q = { 'exchange': exchange, 'symbol': symbol, 'quote_type': '1d', 'quote_at': datetime.strptime(str(row[0]), "%Y%m%d"), 'open_': row[1] / 100, 'high': row[2] / 100, 'low': row[3] / 100, 'close': row[4] / 100, 'amount': row[5] / 1., 'volume': row[6], } if kline_days: q['lclose'] = kline_days[-1]['close'] save_quotes(q, c, first_quote=False) else: save_quotes(q, c, first_quote=True) kline_days.append(q) for c in Collection.query({'exchange': exchange}): try: if not force and history_exists(c): continue if '$' not in c.symbol: log.info('feching {}_{} on {}'.format(c.exchange, c.symbol, url)) data = get_day_data(c.symbol) parse_day_data(data) except: log.exception('{}_{}获取失败'.format(exchange, c.symbol))
def calendar(): nav = "calendar" starts_at = request.args.get("starts_at") ends_at = request.args.get("ends_at") if starts_at: starts_at = datetime.strptime(starts_at, "%Y%m%d") today = datetime.utcnow() + timedelta(hours=8) today = today.replace(hour=0, minute=0, second=0, microsecond=0) if not starts_at: starts_at = today - timedelta(days=3) ends_at = starts_at + timedelta(days=10) # 表头 heads = [] d = starts_at while d <= ends_at: heads.append(("周" + "一二三四五六日"[d.weekday()], "{}/{}".format(d.month, d.day))) d += timedelta(days=1) # 表身 exs = [] # 交易所所在行 rowdict = defaultdict(list) # 交易所 -> 每天有/没有 seen = set() ddict = {} for c in Collection.query({"offers_at": {"$gte": starts_at, "$lte": ends_at}}, sort=[("offers_at", 1)]): if (c.exchange, c.offers_at) in seen: continue seen.add((c.exchange, c.offers_at)) if c.exchange not in exs: exs.append(c.exchange) d = ddict.get(c.exchange, starts_at) while d < c.cashout_at: if d >= c.offers_at and d < c.cashout_at: cs = list(Collection.query({"offers_at": c.offers_at, "exchange": c.exchange})) ndays = (c.cashout_at - c.offers_at).days if c.offers_at + timedelta(days=ndays) > ends_at: ndays = (ends_at - c.offers_at).days + 1 rowdict[c.exchange].append( { "colspan": ndays, "exchange": c.exchange, "count": len(cs), "cs": cs, "symbols": ",".join([c.symbol for c in cs]), } ) ddict[c.exchange] = c.cashout_at break else: rowdict[c.exchange].append({"colspan": 1}) d += timedelta(days=1) banks = {} details = {} for ex in ddict: d = ddict[ex] while d <= ends_at: spans = sum(x["colspan"] for x in rowdict[ex]) if spans < 11: rowdict[ex].append({"colspan": 1}) d += timedelta(days=1) c = get_conf(ex) banks[ex] = c["opening"]["bank"] details[ex] = {} for cell in rowdict[ex]: if "cs" in cell: for c in cell["cs"]: details[ex][c.symbol] = { "name": c.name, "price": c.offer_price, "offer_cash": c.offer_cash or 0, "expected_ratio": c.expected_result_cash_ratio or 0, "expected_revenue": c.expected_annual_profit or 0, } if not exs: exs = ["无申购"] prev_starts_at = (starts_at - timedelta(days=10)).strftime("%Y%m%d") next_starts_at = (starts_at + timedelta(days=10)).strftime("%Y%m%d") thisdate = datetime.utcnow() + timedelta(hours=8) thisdate = "{}/{}".format(thisdate.month, thisdate.day) return render_template("frontend/calendar.html", **locals())