def do_train(): days = [1, 2, 3, 4, 5] skip_days = 10 # 10天内不重新计算 window = days[-1] + 1 start = '1990-01-01' end = QA_util_datetime_to_strdate(datetime.today()) lst = QA.QA_fetch_stock_list_adv().code.values index = QA.QA_fetch_index_day_adv('399300', start=start, end=end) for code in lst: stock = QA.QA_fetch_stock_day_adv(code, start=start, end=end) if stock and stock.date[0].date() <= index.date[0].date(): logging.info( '{0}/{1} - {2}'.format(list(lst).index(code) + 1, len(lst), code)) for day in days: dt = get_last_train_date(code, window, day) if dt != None and dt + timedelta( days=skip_days) > datetime.today(): logging.info( 'SKIP:{0},Window:{1}.LastTrain:{2}.距今小于 {3}'.format( code, window, QA_util_datetime_to_strdate(dt), skip_days )) continue do(code, window=window, days=day) logging.info('window={} Done.'.format(window)) logging.info( '{0}/{1} - {2} - Done.'.format(list(lst).index(code) + 1, len(lst), code)) else: logging.info("SKIP:" + code)
def QA_SU_save_report_calendar_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存财报日历 历史全部数据 :return: ''' END_DATE = QA_util_datetime_to_strdate( QA_util_add_months(QA_util_today_str(), -3)) START_DATE = QA_util_datetime_to_strdate( QA_util_add_months(QA_util_today_str(), -12)) date_list = list( pd.DataFrame.from_dict(QA_util_getBetweenQuarter(START_DATE, END_DATE)).T.iloc[:, 1]) report_calendar = client.report_calendar report_calendar.create_index([("code", pymongo.ASCENDING), ("report_date", pymongo.ASCENDING)], unique=True) err = [] def __saving_work(report_date, report_calendar): try: QA_util_log_info( '##JOB01 Now Saving Report_Calendar==== {}'.format( str(report_date)), ui_log) report_calendar.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_financial_calendar(report_date)), ordered=False) except Exception as error0: print(error0) err.append(str(report_date)) for item in date_list: QA_util_log_info('The {} of Total {}'.format( (date_list.index(item) + 1), len(date_list))) strProgressToLog = 'DOWNLOAD PROGRESS {}'.format( str(float( (date_list.index(item) + 1) / len(date_list) * 100))[0:4] + '%', ui_log) intProgressToLog = int( float((date_list.index(item) + 1) / len(date_list) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(item, report_calendar) if len(err) < 1: QA_util_log_info('SUCCESS save report calendar ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def start_code(path, code, window, days): d = {} p = Predict(path, code, window, days) ds, y, feature_price = p.do_prediction() del p d['code'] = code d['name'] = QA.QA_fetch_stock_list_adv().loc[code]['name'] d['window'] = window d['days'] = days d['last_date'] = ds.index[-1].date() d['last_price'] = ds.iloc[-1]['close'] d['first_date'] = ds.index[0].date() d['first_price'] = ds.iloc[0]['close'] d['last_change'] = ds['close'][-1] / ds['close'][-2] - 1 # d[code] = {'last_date': ds.index[-1].date(), # 'last_close': ds.iloc[-1]['close']} d['precents'] = y d['feature_price'] = feature_price d['acc'] = get_train_acc(code, window, days) last_dt = get_last_train_date(code, window, days) d['last_train_date'] = QA_util_datetime_to_strdate( last_dt) if last_dt else '' # for i in range(len(y)): # d['day{}_per'.format(i + 1)] = y[i] # d['day{}_pri'.format(i + 1)] = feature_price[i] logging.info('{0}_{1}_{2} Done.'.format(code, window, days)) return d
def QA_SU_save_stock_shares_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存财报日历 历史全部数据 :return: ''' END_DATE = QA_util_today_str() START_DATE = QA_util_datetime_to_strdate( QA_util_add_days(QA_util_today_str(), -7)) code = list(QA_fetch_stock_list_adv()['code']) stock_shares = client.stock_shares stock_shares.create_index([("code", pymongo.ASCENDING), ("begin_date", pymongo.ASCENDING), ('total_shares', pymongo.DESCENDING), ('reason', pymongo.DESCENDING), ('send_date', pymongo.DESCENDING)], unique=True) err = [] def __saving_work(code, stock_shares): try: QA_util_log_info( '##JOB01 Now Saving SSINA shares change==== {}'.format( str(code)), ui_log) stock_shares.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_shares_sina(code)), ordered=False) gc.collect() except Exception as error0: print(error0) err.append(str(code)) for item in code: QA_util_log_info('The {} of Total {}'.format((code.index(item) + 1), len(code))) strProgressToLog = 'DOWNLOAD PROGRESS {}'.format( str(float((code.index(item) + 1) / len(code) * 100))[0:4] + '%', ui_log) intProgressToLog = int(float((code.index(item) + 1) / len(code) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(item, stock_shares) if len(err) < 1: QA_util_log_info('SUCCESS save SINA shares change ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def QA_SU_save_stock_divyield_his(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存财报日历 反向查询四个季度财报 :return: ''' START_DATE = '2007-01-01' END_DATE = QA_util_datetime_to_strdate( QA_util_add_months(QA_util_today_str(), -3)) date_list = list( pd.DataFrame.from_dict(QA_util_getBetweenQuarter(START_DATE, END_DATE)).T.iloc[:, 1]) stock_divyield = client.stock_divyield stock_divyield.create_index([("a_stockcode", pymongo.ASCENDING), ("report_date", pymongo.ASCENDING), ("reg_date", pymongo.ASCENDING)], unique=True) err = [] def __saving_work(report_date, stock_divyield): try: QA_util_log_info( '##JOB01 Now Saving stock_divyield==== {}'.format( str(report_date)), ui_log) stock_divyield.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_divyield(report_date)), ordered=False) except Exception as error0: print("error : {code}".format(code=error0)) err.append(str(report_date)) for item in date_list: QA_util_log_info('The {} of Total {}'.format( (date_list.index(item) + 1), len(date_list))) strProgressToLog = 'DOWNLOAD PROGRESS {}'.format( str(float( (date_list.index(item) + 1) / len(date_list) * 100))[0:4] + '%', ui_log) intProgressToLog = int( float((date_list.index(item) + 1) / len(date_list) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(item, stock_divyield) if len(err) < 1: QA_util_log_info('SUCCESS save stock divyield ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def write_web(template, path, result): if len(result) > 0: with open(path, 'w', encoding='utf-8') as f: html = template.render( title=QA_util_datetime_to_strdate(datetime.datetime.today()), result=result) f.write(html) logging.info('WebPage Saved at:' + path) else: logging.info('WebPage Skip.')
def save_train_record(code, window, days, values: dict = {}): file = _get_train_record_filepath(code) dic = _read_train_record(code) record = _find_train_record(dic, window, days) if record == None: record = {} dic.append(record) record['window'] = window record['days'] = days record['time'] = QA_util_datetime_to_strdate(datetime.now()) if values: for k, v in values.items(): record[k] = v with open(file, mode='wt', encoding='utf-8') as f: json.dump(dic, f, indent=2, sort_keys=True)
def QA_fetch_financial_report_adv(code, start='all', type='report', end=None): """高级财务查询接口 Arguments: code {[type]} -- [description] start {[type]} -- [description] Keyword Arguments: end {[type]} -- [description] (default: {None}) """ end = start if end is None else end start = str(start)[0:10] end = str(end)[0:10] if start == 'all': start = '1990-01-01' end = str(datetime.date.today()) if end is None: end = str(datetime.date.today()) date_list = list( pd.DataFrame.from_dict( QA_util_getBetweenQuarter( start, QA_util_datetime_to_strdate(QA_util_add_months( end, -3)))).T.iloc[:, 1]) if type == 'report': return QA_DataStruct_Financial( QA_fetch_financial_report(code, date_list)) elif type == 'date': return QA_DataStruct_Financial( QA_fetch_financial_report(code, date_list, type='date')) else: daterange = pd.date_range(start, end) timerange = [item.strftime('%Y-%m-%d') for item in list(daterange)] if type == 'report': return QA_DataStruct_Financial( QA_fetch_financial_report(code, timerange)) elif type == 'date': return QA_DataStruct_Financial( QA_fetch_financial_report(code, timerange, type='date'))
def QA_SU_save_financial_report_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存财报日历 历史全部数据 :return: ''' END_DATE = QA_util_today_str() START_DATE = QA_util_datetime_to_strdate( QA_util_add_days(QA_util_today_str(), -7)) END_YEAR = QA_util_today_str()[0:4] START_YEAR = QA_util_datetime_to_strdate( QA_util_add_years(QA_util_today_str(), -1))[0:4] YEARS = [END_YEAR, START_YEAR] code = list( QA_fetch_stock_financial_calendar_adv( list(QA_fetch_stock_list_adv()['code']), START_DATE, END_DATE).data['code']) stock_financial_sina = client.stock_financial_sina stock_financial_sina.create_index([("code", pymongo.ASCENDING), ("report_date", pymongo.ASCENDING)], unique=True) err = [] def __saving_work(code, stock_financial_sina): try: QA_util_log_info( '##JOB01 Now Saving SINA financial_report==== {}'.format( str(code)), ui_log) stock_financial_sina.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_report_sina(code, YEARS)), ordered=False) gc.collect() except Exception as error0: print(error0) err.append(str(code)) for item in code: QA_util_log_info('The {} of Total {}'.format((code.index(item) + 1), len(code))) strProgressToLog = 'DOWNLOAD PROGRESS {}'.format( str(float((code.index(item) + 1) / len(code) * 100))[0:4] + '%', ui_log) intProgressToLog = int(float((code.index(item) + 1) / len(code) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(item, stock_financial_sina) if len(err) < 1: QA_util_log_info('SUCCESS save SINA financial_report ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def default(obj): if isinstance(obj, datetime.date): return {"$dt": QA_util_datetime_to_strdate(obj)} if isinstance(obj, decimal.Decimal): return json_util.default(float(obj)) return json_util.default(obj)