def wrapper(*args, **kv): try: return func(*args, **kv) except Exception as e: logger.error('%s failed, kv: %s' % (func.__name__, str(kv))) logger.error(traceback.format_exc()) return default
def collect_single_index_daily_from_ts(code, table_name='index_k_data_60m', conn=None): start = (datetime.now() - timedelta(5)).strftime( datetime_utils.DATE_FORMAT) try: data = ts.bar(conn=conn, code=code, freq='60min', asset="INDEX", start_date=start, retry_count=10) data.rename(columns={'vol': 'volume'}, inplace=True) data = data.drop(columns=['amount']) data['date'] = data.index data['pre_close'] = data['close'].shift(-1) data = data.head(4) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error(e)
def collect_single(code, start, end, futu_quote_ctx): table_name = 'k_data_weekly' try: state, data = futu_quote_ctx.get_history_kline(fill_market(code), ktype='K_WEEK', autype='qfq', start=start,end=end) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error(e)
def predict_k_data(): df = ts.get_hs300s() df_index = index_k_data_60m_dao.get_rel_price() for code in df['code'].values: try: logger.debug('begin predict, code:%s' % code) data, features = k_data_60m_dao.get_k_predict_data_with_features( code, df_index) lr = LogisticRegressionClassifier() svc = SupportVectorClassifier() rf = RandomForestClassifierModel() xgb = XGBoostClassier() ann = SequantialNeuralClassifier() lr_pred = lr.predict(code, data) svc_pred = svc.predict(code, data) rf_pred = rf.predict(code, data) xgb_pred = xgb.predict(code, data) ann_pred = ann.predict(code, data) k_data_60m_predict_log_dao.insert( code, logistic_regression=lr_pred, support_vector_classifier=svc_pred, random_forest_classifier=rf_pred, xgb_classifier=xgb_pred, sequantial_neural=ann_pred) logger.debug('predict end, code:%s' % code) except Exception as e: logger.error("predict k data error, code:%s, error:%s" % (code, repr(e)))
def training_k_data(): df = ts.get_hs300s() for code in df['code'].values: try: logger.debug('begin training mode, code:%s' % code) data, features = k_data_60m_dao.get_k_data_with_features( code, '2015-01-01', datetime.now().strftime("%Y-%m-%d")) pca = PCAModel(MODULE_NAME) lr = LogisticRegressionClassifier() svc = SupportVectorClassifier() rf = RandomForestClassifierModel() xgb = XGBoostClassier() ann = SequantialNeuralClassifier() pca.training_model(code, data, features) lr.training_model(code, data, features) svc.training_model(code, data, features) rf.training_model(code, data, features) xgb.training_model(code, data, features) ann.training_model(code, data, features) logger.debug('training mode end, code:%s' % code) except Exception as e: logger.error("training k data error, code:%s, error:%s" % (code, repr(e)))
def training_k_data(start, end): df = stock_pool_dao.get_list() codes = df['code'].values[start:end] for code in codes: try: logger.debug('begin training mode, code:%s' % code) data, features = k_data_dao.get_k_data_with_features( code, '2015-01-01', datetime.now().strftime("%Y-%m-%d")) pca = PCAModel('k_data') lr = LogisticRegressionClassifier() svc = SupportVectorClassifier() rf = RandomForestClassifierModel() xgb = XGBoostClassier() #ann = SequantialNeuralClassifier() pca.training_model(code, data, features) lr.training_model(code, data, features) svc.training_model(code, data, features) rf.training_model(code, data, features) xgb.training_model(code, data, features) #ann.training_model(code, data, features) logger.debug('training mode end, code:%s' % code) except Exception as e: logger.error("training k data error, code:%s, error:%s" % (code, repr(e)))
def handle(exception, req, res, error=None): if isinstance(exception, AppError): res.status = exception.status error = {'code': exception.code, 'message': exception.title} if exception.description: error['description'] = exception.description res.body = falcon.json.dumps({'error': error}) elif isinstance(exception, falcon.HTTPNotFound): error = { 'code': RESOURCE_NOT_FOUND_EXCEPTION['code'], 'message': RESOURCE_NOT_FOUND_EXCEPTION['title'] } res.status = RESOURCE_NOT_FOUND_EXCEPTION['status'] res.body = falcon.json.dumps({'error': error}) elif isinstance(exception, NoResultFound): error = { 'code': RESOURCE_NOT_FOUND_EXCEPTION['code'], 'message': RESOURCE_NOT_FOUND_EXCEPTION['title'] } res.status = RESOURCE_NOT_FOUND_EXCEPTION['status'] res.body = falcon.json.dumps({'error': error}) else: error_msg = traceback.format_exc() error = {'code': ERR_UNKNOWN['code'], 'message': error_msg} res.status = ERR_UNKNOWN['status'] res.body = falcon.json.dumps({'error': error}) logger.error(error_msg)
def get_k_data(self, code, start_date, end_date): try: start_date = self.string2ts(start_date) end_date = self.string2ts(end_date) cookie, crumb = self.get_cookie_crumb(code) url = "https://query1.finance.yahoo.com/v7/finance/download/%s?period1=%s&period2=%s&interval=1d&events" \ "=history&crumb=%s" % ( code, start_date, end_date, crumb) logger.debug(url) response = requests.get(url, cookies=cookie) df = pd.read_csv(io.StringIO(response.content.decode('utf-8'))) df["code"] = code df = df.drop(columns=['Adj Close']) df = df.rename(columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume'}) df['pre_close'] = df['close'].shift(1) df = df.dropna() return df except Exception as e: logger.error(repr(e)) raise e
def collect_all(): df = stock_industry_dao.get_list() for code in df['code'].values: try: collect_single(code) time.sleep(1) except Exception as e: logger.error(repr(e))
def collect_single_test(self): df = ts.get_hs300s() for code in df['code'].values: try: kdtfc.collect_single(code, start='2018-05-04', end='2018-06-05') except Exception as e: logger.error("collect technical features failed code:%s, exception:%s" % (code, repr(e)))
def collect_full(): df = stock_industry_dao.get_list() now = datetime.now().strftime('%Y-%m-%d') for code in df['code'].values: try: collect_single(code=code, start='2015-01-01', end=now) except Exception as e: logger.error( "collect technical features failed code:%s, exception:%s" % (code, repr(e)))
def collect_stock_basic(): stock_basic_dao.truncate() stock_list = stock_dao.query_all() for stock in stock_list: try: df = east_money_api.get_stock_basic(stock.code) df = df.fillna(0) df.to_sql('stock_basic', dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("code:%s, error:%s" %(stock.code, repr(e)))
def collect_single_index_daily_from_ts(code, table_name='index_k_data'): try: data = ts.get_k_data(code, index=True) data['code'] = code data['pre_close'] = data['close'].shift(1) data = data.tail(1) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("collect single index daily from ts:%s, exception:%s" % (code, repr(e)))
def collect_single(code, table_name='k_data_60m', conn=None): try: data = ts.bar(conn=conn, code=code, freq='60min', start_date='2015-01-01', retry_count=10) data.rename(columns={'vol': 'volume'}, inplace=True) data = data.drop(columns=['amount']) data['date'] = data.index data['pre_close'] = data['close'].shift(-1) data = data.dropna() data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("collect failed code:%s, exception:%s"% (code, repr(e)))
def collect_single_index_from_yahoo(code, start, end, table_name='index_k_data'): try: data = yahoo_finance_api.get_k_data(code, start_date=start, end_date=end) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("collect single index from yahoo:%s, exception:%s" % (code, repr(e)))
def collect_full_daily(): now = datetime.now().strftime('%Y-%m-%d') is_holiday = ts.is_holiday(now) # 如果是假日, 跳过 if is_holiday: return df = stock_industry_dao.get_stock_code_list() for code in df['code'].values: try: collect_single_daily(code) except Exception as e: logger.error( "collect technical features failed code:%s, exception:%s" % (code, repr(e)))
def predict(self, code, data): model_path = self.get_model_path(code, self.module_name, self.model_name) if not os.path.exists(model_path): logger.error('model not found, code is %s:' % code) return X = preprocessing.scale(data) pac = PCAModel(self.module_name).load(code) X = pac.transform(X) ridge_regression_model = joblib.load(model_path) y_pred = ridge_regression_model.predict(X) return int(y_pred[0])
def collect_single_index_daliy_from_yahoo(code, table_name='index_k_data'): try: start = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d') end = (datetime.now() + timedelta(days=1)).strftime('%Y-%m-%d') data = yahoo_finance_api.get_k_data(code, start_date=start, end_date=end) data = data.tail(1) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("collect single index from yahoo:%s, exception:%s" % (code, repr(e)))
def predict(self, code, data): model_path = self.get_model_path(code, self.module_name, self.model_name, 'h5') if not os.path.exists(model_path): logger.error('model not found, code is %s:' % code) return X = preprocessing.scale(data) pac = PCAModel(self.module_name).load(code) X = pac.transform(X) sequantial_model = load_model(model_path) y_pred = sequantial_model.predict(X) return int(y_pred[0][0])
def collect_single_daily(code, futu_quote_ctx, start=None, end=None): table_name = 'k_data' if start is None: start = get_current_date() if end is None: end = get_current_date() try: state, data = futu_quote_ctx.get_history_kline(fill_market(code), ktype='K_DAY', autype='qfq', start=start, end=end) data = data.tail(1) data.to_sql(table_name, dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error(e)
def collect_quarter(code, start, end, year, quarter): data = k_data_dao.get_k_data(code, start, end, cal_next_direction=False) data_report = stock_performance_dao.get_by_code(code, year, quarter) for index, row in data.iterrows(): try: dict = [{ 'code': code, 'date': row['date'], 'eps': data_report['eps'].values[0], 'eps_yoy': data_report['eps_yoy'].values[0], 'bvps': data_report['bvps'].values[0], 'roe': data_report['roe'].values[0], 'epcf': data_report['epcf'].values[0], 'net_profits': data_report['net_profits'].values[0], 'profits_yoy': data_report['profits_yoy'].values[0] }] df = pd.DataFrame(dict) df.to_sql('k_data_stock_performance', dataSource.mysql_quant_engine, if_exists='append', index=False) except Exception as e: logger.error("code:%s, error:%s" % (code, repr(e)))
def cal_single_stock(code, k_data_list, w_data_list): try: w_data = w_data_list.loc[w_data_list['code'] == fill_market(code)] w_data = w_data.join(cal_macd(w_data)) # w_data = w_data.join(acc_kdj(w_data)) k_data = k_data_list.loc[k_data_list['code'] == fill_market(code)] k_data = k_data.join(cal_macd(k_data)) if len(k_data['code'].values) == 0 or len(w_data['code'].values) == 0: return False k_data['ma145'] = cal_ma145(k_data) k_data['turnover7'] = cal_mavol7(k_data, column='turnover') w_pre_volume = w_data['volume'].values[-2] w_volume = w_data['volume'].values[-1] w_pre_macd = w_data['macd'].values[-1] w_pre_diff = w_data['diff'].values[-1] w_pre_dea = w_data['dea'].values[-1] w_last2_diff = w_data['diff'].tail(80) w_last2_dea = w_data['dea'].tail(80) w_macd = w_data['macd'].values[-1] w_diff = w_data['diff'].values[-1] w_dea = w_data['dea'].values[-1] # w_k_value = w_data['k_value'].values[-1] # w_d_value = w_data['d_value'].values[-1] k_close = k_data['close'].values[-1] k_ma145 = k_data['ma145'].values[-1] k_turnover7 = k_data['turnover7'].values[-1] k_diff = k_data['diff'].values[-1] k_dea = k_data['dea'].values[-1] k_last3_diff = k_data['diff'].tail(500) k_last3_dea = k_data['dea'].tail(500) pre_diff = k_data['diff'].values[-2] pre_dea = k_data['dea'].values[-2] macd = w_data['macd'].values[-1] ''' if k_close < k_ma145: logger.debug("code:%s, close price less than ma145" % code) return False if k_turnover7 < 75000000: logger.debug("code:%s, turnover less than 75000000" % code) return False if round(w_volume / w_pre_volume, 1) < 1.3: logger.debug("code:%s, volume less than pre_volume * 1.3" % code) return False 0''' # 通过机器学习预测, 下一个diff, 和 下一个dea k_next_diff = macd_predict(k_last3_diff, k_diff)[0] k_next_dea = macd_predict(k_last3_dea, k_dea)[0] w_next_diff = macd_predict(w_last2_diff, w_diff)[0] w_next_dea = macd_predict(w_last2_dea, w_dea)[0] if w_pre_diff < w_pre_dea and w_macd > -0.35 and w_diff > w_dea: return True if w_dea > w_diff > -0.35 and w_next_diff > w_next_dea: return True if pre_diff < pre_dea and k_diff > -0.35 and k_diff > k_dea: return True if k_dea > k_diff > -0.35 and k_next_diff > k_next_dea: return True # if (w_pre_diff < w_pre_dea and w_macd > -0.35 and w_diff > w_dea)\ # or (w_pre_diff < w_pre_dea and w_macd > -0.35 and w_diff < w_dea and w_dea - w_diff < abs(w_dea * 0.2)): ''' elif macd > -0.35 and abs(dea - diff) < 0.2 and abs(pre_dea - pre_diff) < 0.2: if (w_pre_diff < w_pre_dea and w_macd > -0.35 and w_diff > w_dea) \ or (w_pre_diff < w_pre_dea and w_macd > -0.35 and w_diff < w_dea and w_dea - w_diff < abs(w_dea * 0.2)): return True #macd_point = k_data_dao.get_last_macd_cross_point(k_data, window_size=8) # 日k上 macd已经金叉(-3days) #if macd_point is not None: #return True ''' return False except Exception as e: logger.error("code:%s" % code) logger.error(traceback.format_exc()) return False