def factor_analysis(f_name): from alphamind.api import SqlEngine, Universe, alpha_logger engine = SqlEngine() universe = Universe('custom', ['zz800']) base1 = LAST('Alpha60') base2 = CSRes('roe_q', base1) base3 = CSRes(CSRes('ep_q', base1), base2) factor = CSRes(CSRes(CSRes(LAST(f_name), base1), base2), base3) res = factor_residue_analysis('2010-01-01', '2018-01-26', f_name, factor, '10b', universe, engine) alpha_logger.info('{0} is done'.format(f_name)) return f_name, res
def worker_func_negative(factor_name): from alphamind.api import SqlEngine, Universe neutralize_factors = ['roe_q', 'ep_q'] engine = SqlEngine() benchmark_code = 905 universe_name = ['zz500'] universe = Universe('custom', universe_name) return factor_analysis(engine, factor_name, universe, benchmark_code, positive=False, neutralize_factors=neutralize_factors)
universe = Universe('custom', ['zz800']) base1 = LAST('Alpha60') base2 = CSRes('roe_q', base1) base3 = CSRes(CSRes('ep_q', base1), base2) factor = CSRes(CSRes(CSRes(LAST(f_name), base1), base2), base3) res = factor_residue_analysis('2010-01-01', '2018-01-26', f_name, factor, '10b', universe, engine) alpha_logger.info('{0} is done'.format(f_name)) return f_name, res if __name__ == '__main__': from dask.distributed import Client client = Client('10.63.6.176:8786') engine = SqlEngine() df = engine.fetch_factor_coverage() df = df[df.universe == 'zz800'].groupby('factor').mean() df = df[df.coverage >= 0.98] universe = Universe('custom', ['zz800']) factor_df = pd.DataFrame() tasks = client.map(factor_analysis, df.index.tolist()) res = client.gather(tasks) for f_name, df in res: factor_df[f_name] = df['$top1 - bottom1$'] # for i, f_name in enumerate(df.index): # base1 = LAST('Alpha60')
start_date = dt.datetime(2018, 5, 4) dag_name = 'update_uqer_data_postgres' default_args = { 'owner': 'wegamekinglc', 'depends_on_past': True, 'start_date': start_date } dag = DAG(dag_id=dag_name, default_args=default_args, schedule_interval='0 1 * * 1,2,3,4,5') _ = uqer.Client(token=os.environ['DATAYES_TOKEN']) engine = sqlalchemy.create_engine(os.environ['DB_URI']) alpha_engine = SqlEngine(os.environ['DB_URI']) def process_date(ds): alpha_logger.info("Loading data at {0}".format(ds)) this_date = dt.datetime.strptime(ds, '%Y-%m-%d') ref_date = this_date.strftime('%Y%m%d') return ref_date, this_date def format_data(df, format='%Y%m%d'): df['trade_date'] = pd.to_datetime(df['trade_date'], format=format) def check_holiday(this_date): flag = isBizDay('china.sse', this_date)
from alphamind.api import ( Universe, map_freq, risk_styles, industry_styles, macro_styles, BoundaryType, create_box_bounds ) """ Back test parameter settings """ start_date = '2010-01-01' end_date = '2018-02-28' category = 'sw_adj' level = 1 freq = '20b' universe = Universe('custom', ['zz800']) data_source = 'postgres+psycopg2://postgres:[email protected]/alpha' engine = SqlEngine(data_source) horizon = map_freq(freq) """ Factor Model """ factor_name = 'SIZE' """ Constraints """ risk_names = list(set(risk_styles).difference({factor_name})) industry_names = list(set(industry_styles).difference({factor_name})) constraint_risk = risk_names + industry_names + macro_styles