def test_data_meta_persistence(self): freq = '5b' universe = Universe('zz800') batch = 4 neutralized_risk = ['SIZE'] risk_model = 'long' pre_process = ['standardize', 'winsorize_normal'] post_process = ['standardize', 'winsorize_normal'] warm_start = 2 data_source = 'postgresql://*****:*****@server/dummy' data_meta = DataMeta(freq=freq, universe=universe, batch=batch, neutralized_risk=neutralized_risk, risk_model=risk_model, pre_process=pre_process, post_process=post_process, warm_start=warm_start, data_source=data_source) data_desc = data_meta.save() loaded_data = DataMeta.load(data_desc) self.assertEqual(data_meta.freq, loaded_data.freq) self.assertEqual(data_meta.universe, loaded_data.universe) self.assertEqual(data_meta.batch, loaded_data.batch) self.assertEqual(data_meta.neutralized_risk, loaded_data.neutralized_risk) self.assertEqual(data_meta.risk_model, loaded_data.risk_model) self.assertEqual(data_meta.pre_process, loaded_data.pre_process) self.assertEqual(data_meta.post_process, loaded_data.post_process) self.assertEqual(data_meta.warm_start, loaded_data.warm_start) self.assertEqual(data_meta.data_source, loaded_data.data_source)
def fetch_model(self, ref_date=None, model_type=None, model_version=None, is_primary=True, model_id=None) -> pd.DataFrame: from alphamind.model.composer import DataMeta conditions = [] if ref_date: conditions.append(Models.trade_date == ref_date) if model_id: conditions.append(Models.model_id == model_id) if model_type: conditions.append(Models.model_type == model_type) if model_version: conditions.append(Models.model_version == model_version) conditions.append(Models.is_primary == is_primary) query = select([Models]).where(and_(*conditions)) model_df = pd.read_sql(query, self.engine) for i, data in enumerate(zip(model_df.model_desc, model_df.data_meta)): model_desc, data_desc = data model_df.loc[i, 'model'] = load_model(model_desc) model_df.loc[i, 'data_meta'] = DataMeta.load(data_desc) del model_df['model_desc'] return model_df
def test_composer_persistence(self): freq = '5b' universe = Universe('zz800') batch = 4 neutralized_risk = ['SIZE'] risk_model = 'long' pre_process = ['standardize', 'winsorize_normal'] post_process = ['standardize', 'winsorize_normal'] warm_start = 2 data_source = 'postgresql://*****:*****@server/dummy' data_meta = DataMeta(freq=freq, universe=universe, batch=batch, neutralized_risk=neutralized_risk, risk_model=risk_model, pre_process=pre_process, post_process=post_process, warm_start=warm_start, data_source=data_source) features = {'f1': 'closePrice', 'f2': 'openPrice'} alpha_model = XGBClassifier(features=features) composer = Composer(alpha_model=alpha_model, data_meta=data_meta) comp_desc = composer.save() loaded_comp = Composer.load(comp_desc) self._assert_composer_equal(composer, loaded_comp)