def main(offset=0): daily001 = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == '000001.SZ').order_by(models.DailyPro.trade_date.desc()).all() LAST_MARKET_DATE = daily001[offset].trade_date data_frame = DataFrame() for i, stock_basic in enumerate(main_session.query(models.StockBasicPro).all()): try: for key in models.StockBasicPro.keys: data_frame.loc[i, key] = getattr(stock_basic, key) daily = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == stock_basic.ts_code, models.DailyPro.trade_date <= LAST_MARKET_DATE).order_by( models.DailyPro.trade_date.desc()).limit(sampling_count).all() ma_5 = api.daily_close_ma(daily=daily, step=5) ma_10 = api.daily_close_ma(daily=daily, step=10) ma_20 = api.daily_close_ma(daily=daily, step=20) data_frame.loc[i, COL_MA_5] = ma_5[0] data_frame.loc[i, COL_MA_10] = ma_10[0] data_frame.loc[i, COL_MA_20] = ma_20[0] data_frame.loc[i, COL_MA_5_SLOPE] = round((ma_5[0] / ma_5[1] - 1) * 100, 2) data_frame.loc[i, COL_MA_10_SLOPE] = round((ma_10[0] / ma_10[1] - 1) * 100, 2) data_frame.loc[i, COL_MA_20_SLOPE] = round((ma_20[0] / ma_20[1] - 1) * 100, 2) data_frame.loc[i, COL_LASTPRICE] = daily[0].close cons = main_session.query(models.ConceptPro).join(models.ConceptDetailPro, models.ConceptPro.code == models.ConceptDetailPro.code).filter( models.ConceptDetailPro.ts_code == stock_basic.ts_code).all() concept_value = '' for con in cons: concept_value = concept_value + '{c}, '.format(c=con.name) data_frame.loc[i, 'concept'] = concept_value daily_basic = main_session.query(models.DailyBasicPro).filter(models.DailyBasicPro.ts_code == stock_basic.ts_code).first() if daily_basic: data_frame.loc[i, 'circ_mv'] = '{}亿'.format(round(daily_basic.circ_mv / 10000, 2)) except Exception as e: print('excetion in index:{index} {code} {name}'.format(index=i, code=stock_basic.ts_code, name=stock_basic.name)) continue print('##### {i} #####'.format(i=i)) data_frame = data_frame[ (data_frame[COL_MA_5] > data_frame[COL_MA_10]) & (data_frame[COL_MA_10] > data_frame[COL_MA_20]) & (data_frame[COL_MA_5_SLOPE] > data_frame[COL_MA_10_SLOPE]) & (data_frame[COL_MA_10_SLOPE] > data_frame[COL_MA_20_SLOPE]) & (data_frame[COL_MA_20_SLOPE] > 0) ] # data_frame = data_frame.sort_values(by=COL_MAXGAP, ascending=False).reset_index(drop=True) # data_frame = data_frame.iloc[:200] data_frame = data_frame.sort_values(by=COL_MA_5_SLOPE, ascending=False).reset_index(drop=True) data_frame = data_frame.loc[:, ['ts_code', 'name', 'industry', COL_LASTPRICE, 'concept', 'circ_mv']] file_name = '../../logs/{date}@MA_diffuse.csv'.format(date=LAST_MARKET_DATE) # print(fileName) with open(file_name, 'w', encoding='utf8') as file: data_frame.to_csv(file)
def main(ma_a=10, ma_b=20): daily001 = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == '000001.SZ').order_by(models.DailyPro.trade_date.desc()).all() LAST_MARKET_DATE = daily001[0].trade_date data_frame = DataFrame() for i, stock_basic in enumerate(main_session.query(models.StockBasicPro).all()): try: for key in models.StockBasicPro.keys: data_frame.loc[i, key] = getattr(stock_basic, key) daily = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == stock_basic.ts_code, models.DailyPro.trade_date <= LAST_MARKET_DATE).order_by( models.DailyPro.trade_date.desc()).limit(sampling_count).all() data_frame.loc[i, COL_MA_A] = api.daily_close_ma(daily=daily, step=ma_a)[0] data_frame.loc[i, COL_MA_B] = api.daily_close_ma(daily=daily, step=ma_b)[0] data_frame.loc[i, COL_MA_GAP] = round((data_frame.loc[i, COL_MA_A] / data_frame.loc[i, COL_MA_B] - 1) * 100, 2) data_frame.loc[i, COL_LASTPRICE] = daily[0].close cons = main_session.query(models.ConceptPro).join(models.ConceptDetailPro, models.ConceptPro.code == models.ConceptDetailPro.code).filter( models.ConceptDetailPro.ts_code == stock_basic.ts_code).all() concept_value = '' for con in cons: concept_value = concept_value + '{c}, '.format(c=con.name) data_frame.loc[i, 'concept'] = concept_value except Exception as e: print('excetion in index:{index} {code} {name}'.format(index=i, code=stock_basic.ts_code, name=stock_basic.name)) continue print('##### {i} #####'.format(i=i)) data_frame = data_frame[ (data_frame[COL_MA_GAP] > 0) ] # data_frame = data_frame.sort_values(by=COL_MAXGAP, ascending=False).reset_index(drop=True) # data_frame = data_frame.iloc[:200] data_frame = data_frame.sort_values(by=COL_MA_GAP, ascending=True).reset_index(drop=True) file_name = '../../logs/{date}@Ma_dive_ma{ma_a}_ma{ma_b}.csv'.format(date=LAST_MARKET_DATE, ma_a=ma_a, ma_b=ma_b) # print(fileName) with open(file_name, 'w', encoding='utf8') as file: data_frame.to_csv(file)
def main(offset=0): daily001 = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == '000001.SZ').order_by(models.DailyPro.trade_date.desc()).all() LAST_MARKET_DATE = daily001[offset].trade_date data_frame = DataFrame() for i, stock_basic in enumerate(main_session.query(models.StockBasicPro).all()): try: for key in models.StockBasicPro.keys: data_frame.loc[i, key] = getattr(stock_basic, key) weekly = main_session.query(models.WeeklyPro).filter(models.WeeklyPro.ts_code == stock_basic.ts_code, models.WeeklyPro.trade_date <= LAST_MARKET_DATE).order_by( models.WeeklyPro.trade_date.desc()).limit(sampling_count).all() ma_20 = api.daily_close_ma(daily=weekly, step=20) ma_20_diff_1 = api.differ(ma_20) ma_20_diff_2 = api.differ(ma_20_diff_1) data_frame.loc[i, COL_MA_20] = ma_20[0] data_frame.loc[i, COL_MA_20_SLOPE] = ma_20_diff_1[0] data_frame.loc[i, COL_MA_20_SLOPE_CHANGE] = ma_20_diff_2[0] data_frame.loc[i, COL_WEEKLY_BREAK] = api.weekly_break(weekly[:10]) # daily = main_session.query(models.DailyPro).filter(models.DailyPro.ts_code == stock_basic.ts_code, # models.DailyPro.trade_date <= LAST_MARKET_DATE).order_by( # models.DailyPro.trade_date.desc()).limit(sampling_count).all() # data_frame.loc[i, COL_CRIMSON_RATE] = round(api.crimson_rate(daily[:15]), 2) # data_frame.loc[i, COL_PCT_CHG_STD] = round(api.pct_chg_std(daily[:15]), 2) holders = main_session.query(models.FloatHolderPro).filter(models.FloatHolderPro.ts_code == stock_basic.ts_code).all() h_list = [] for item in holders: h_list.append(item.holder_name) data_frame.loc[i, COL_FLOAT_HOLDERS] = '\n'.join(h_list) except Exception as e: print('exception in index:{index} {code} {name}'.format(index=i, code=stock_basic.ts_code, name=stock_basic.name)) continue print('##### tidal {i} #####'.format(i=i)) data_frame = data_frame[ (data_frame[COL_WEEKLY_BREAK] == True) ] data_frame = data_frame[ (data_frame[COL_MA_20_SLOPE] > 3) | (data_frame[COL_MA_20_SLOPE_CHANGE] > 100) # | (data_frame[COL_CRIMSON_RATE] > 0.7) ] data_frame = data_frame.sort_values(by=COL_MA_20_SLOPE_CHANGE, ascending=False).reset_index(drop=True) data_frame = data_frame.loc[:, ['ts_code', 'name', 'industry', COL_MA_20, COL_MA_20_SLOPE, COL_MA_20_SLOPE_CHANGE, COL_FLOAT_HOLDERS]] file_name = '{logs_path}/{date}@Tidal.csv'.format(date=LAST_MARKET_DATE, logs_path=env.logs_path) with open(file_name, 'w', encoding='utf8') as file: data_frame.to_csv(file)
# -*- coding: utf-8 -*-
def main(offset=0): daily001 = main_session.query(models.DailyPro).filter( models.DailyPro.ts_code == '000001.SZ').order_by( models.DailyPro.trade_date.desc()).all() LAST_MARKET_DATE = daily001[offset].trade_date data_frame = DataFrame() for i, stock_basic in enumerate( main_session.query(models.StockBasicPro).all()): try: if 'ST' in stock_basic.name \ or stock_basic.symbol.startswith('688'): continue for key in models.StockBasicPro.keys: data_frame.loc[i, key] = getattr(stock_basic, key) daily = main_session.query(models.DailyPro).filter( models.DailyPro.ts_code == stock_basic.ts_code, models.DailyPro.trade_date <= LAST_MARKET_DATE).order_by( models.DailyPro.trade_date.desc()).limit( sampling_count).all() data_frame.loc[i, COL_PRICE] = daily[0].close data_frame.loc[i, COL_LIMIT_COUNT_A] = local_limit_count( daily[2:], local_scale=10) data_frame.loc[i, COL_LIMIT_COUNT_B] = local_limit_count( daily[2:], local_scale=1) # data_frame.loc[i, COL_CONTINUOUS_LIMIT_COUNT] = continuous_limit(daily, local_scale=10) data_frame.loc[i, COL_HISTORY_BREAK_INDEX] = daily_break_index( daily[2:], local_scale=30) ma_20 = api.daily_close_ma(daily=daily[2:], step=20) data_frame.loc[i, COL_MA_20_SLOPE] = round( (ma_20[0] / ma_20[1] - 1) * 100, 2) if data_frame.loc[i, COL_LIMIT_COUNT_A] == 1 and data_frame.loc[ i, COL_LIMIT_COUNT_B] == 1: data_frame.loc[i, COL_PASS_1] = True else: data_frame.loc[i, COL_PASS_1] = False if (daily[1].close > daily[1].pre_close) and ( daily[1].close < round(daily[1].pre_close * 1.1, 2)) and ( daily[1].close > daily[1].open): data_frame.loc[i, COL_PASS_2] = True else: data_frame.loc[i, COL_PASS_2] = False data_frame.loc[i, COL_LAST_CHG] = daily[0].pct_chg daily_basic = main_session.query(models.DailyBasic).filter( models.DailyBasic.ts_code == stock_basic.ts_code).one() data_frame.loc[i, COL_CIRC_MV] = daily_basic.circ_mv holders = main_session.query(models.FloatHolderPro).filter( models.FloatHolderPro.ts_code == stock_basic.ts_code).all() h_set = set() for item in holders: h_set.add(item.holder_name) data_frame.loc[i, COL_FLOAT_HOLDERS] = '\n'.join(h_set) data_frame.loc[i, COL_HOLDERS_COUNT] = len(h_set) except Exception as e: print('exception in index:{index} {code} {name}'.format( index=i, code=stock_basic.ts_code, name=stock_basic.name)) continue print('##### analyze_n {i} #####'.format(i=i)) data_frame = data_frame[(data_frame[COL_PASS_1] == True) & (data_frame[COL_PASS_2] == True) & (data_frame[COL_MA_20_SLOPE] > 0) & (data_frame[COL_HISTORY_BREAK_INDEX] == 0)] data_frame = data_frame.sort_values(by=COL_LAST_CHG, ascending=False).reset_index(drop=True) # data_frame = data_frame.head(100) file_name = '{logs_path}/{date}@Analyze_N.csv'.format( date=LAST_MARKET_DATE, logs_path=env.logs_path) with open(file_name, 'w', encoding='utf8') as file: data_frame.to_csv(file) batch_size = 100 sub = 0 for i in range(0, len(data_frame), batch_size): sub_df = data_frame.iloc[i:i + batch_size, :] sub_df = sub_df.reset_index(drop=True) plot_candle_gather(data_frame=sub_df, last_date=LAST_MARKET_DATE, sub=sub, offset=i) sub += 1 for i in range(0, len(data_frame)): print('\'{}\','.format(data_frame.loc[i, 'symbol']))
def main(offset=0): daily001 = main_session.query(models.DailyPro).filter( models.DailyPro.ts_code == '000001.SZ').order_by( models.DailyPro.trade_date.desc()).all() LAST_MARKET_DATE = daily001[offset].trade_date data_frame = DataFrame() for i, stock_basic in enumerate( main_session.query(models.StockBasicPro).all()): try: for key in models.StockBasicPro.keys: data_frame.loc[i, key] = getattr(stock_basic, key) weekly = main_session.query(models.WeeklyPro).filter( models.WeeklyPro.ts_code == stock_basic.ts_code, models.WeeklyPro.trade_date <= LAST_MARKET_DATE).order_by( models.WeeklyPro.trade_date.desc()).limit( sampling_count).all() ma_20 = api.daily_close_ma(daily=weekly, step=20) ma_20_diff_1 = api.differ(ma_20) data_frame.loc[i, COL_MA_20] = ma_20[0] data_frame.loc[i, COL_MA_20_SLOPE] = round(ma_20_diff_1[0], 2) daily = main_session.query(models.DailyPro).filter( models.DailyPro.ts_code == stock_basic.ts_code, models.DailyPro.trade_date <= LAST_MARKET_DATE).order_by( models.DailyPro.trade_date.desc()).limit( sampling_count).all() # data_frame.loc[i, COL_DAILY_VIBRATION] = round(api.avg_vibration_chg(daily[:20]), 2) data_frame.loc[i, COL_LASTPRICE] = daily[0].close data_frame.loc[i, COL_DAILY_AGGRESSIVE_ACCUMULATION] = round( api.aggressive_chg_accumulation(daily[:5]), 2) # data_frame.loc[i, COL_DAILY_NEGATIVE_ACCUMULATION] = round(api.negative_chg_accumulation(daily[:5]), 2) # if data_frame.loc[i, COL_DAILY_NEGATIVE_ACCUMULATION] < 0: # data_frame.loc[i, COL_DAILY_AGGRESSIVE_RATE] = round(- data_frame.loc[i, COL_DAILY_AGGRESSIVE_ACCUMULATION] / data_frame.loc[i, COL_DAILY_NEGATIVE_ACCUMULATION], 2) # else: # data_frame.loc[i, COL_DAILY_AGGRESSIVE_RATE] = round(data_frame.loc[i, COL_DAILY_AGGRESSIVE_ACCUMULATION], 2) holders = main_session.query(models.FloatHolderPro).filter( models.FloatHolderPro.ts_code == stock_basic.ts_code).all() h_list = [] for item in holders: h_list.append(item.holder_name) data_frame.loc[i, COL_FLOAT_HOLDERS] = '\n'.join(h_list) except Exception as e: print('excetion in index:{index} {code} {name}'.format( index=i, code=stock_basic.ts_code, name=stock_basic.name)) continue print('##### weekly diffuse {i} #####'.format(i=i)) data_frame = data_frame[(data_frame[COL_MA_20_SLOPE] > 2) # & (data_frame[COL_DAILY_VIBRATION] > 5) & (data_frame[COL_DAILY_AGGRESSIVE_ACCUMULATION] > 0)] # data_frame = data_frame.sort_values(by=COL_MAXGAP, ascending=False).reset_index(drop=True) # data_frame = data_frame.iloc[:200] data_frame = data_frame.sort_values(by=COL_DAILY_AGGRESSIVE_ACCUMULATION, ascending=False).reset_index(drop=True) data_frame = data_frame.loc[:, [ 'ts_code', 'name', 'industry', COL_LASTPRICE, COL_MA_20_SLOPE, COL_DAILY_AGGRESSIVE_ACCUMULATION, COL_FLOAT_HOLDERS ]] file_name = '{logs_path}/{date}@Weekly_diffuse.csv'.format( date=LAST_MARKET_DATE, logs_path=env.logs_path) # print(fileName) with open(file_name, 'w', encoding='utf8') as file: data_frame.to_csv(file)