def _init_shared_objs(self): trades_cache = self._kwargs['trades_cache'] if 'trades_cache' in self._kwargs else TradesCache() if self._env.lower() == 'prod': feature_conn = MongoPersister.init_from_config('features', auto_connect=True) elif self._env.lower() == 'dev': feature_conn = MongoPersister.init_from_config('features_dev', auto_connect=True) elif self._env.lower() == 'mock': with patch('stratagemdataprocessing.dbutils.mongo.MongoClient', mongomock.MongoClient): feature_conn = MongoPersister.init_from_config('features', auto_connect=True) else: raise ValueError('Unknown Environment {}'.format(self._env)) if 'arctic_storage' in self._kwargs: arctic_loader = self._kwargs['arctic_storage'] else: if self._env.lower() == 'dev': arctic_db_conn = MongoPersister.init_from_config('arctic_crypto_dev', auto_connect=True) else: arctic_db_conn = MongoPersister.init_from_config('arctic_crypto', auto_connect=True) arctic_loader = ArcticStorage(arctic_db_conn.client) self.SHARED_OBJECTS = { 'trades_cache': trades_cache, 'feature_conn': feature_conn, 'arctic_loader': arctic_loader }
def get_connection(sport): if sport == 'football': conn = MongoPersister.init_from_config('abc_football_v2', auto_connect=True) elif sport == 'tennis': conn = MongoPersister.init_from_config('abc_tennis_v2', auto_connect=True) elif sport == 'basketball': conn = MongoPersister.init_from_config('abc_basketball_v2', auto_connect=True) else: raise ValueError("Bad sport") conn.sport = sport return conn
def _run_fun(self, x, x_nc, i, initial_=False): params_to_use = self.static_params() dinamic_params = {} p = self.param_def i_nc = 0 for index, (k, v) in enumerate(p.iteritems()): if index not in self._values_for_each_not_continuos_param: dinamic_params[k] = x[i, index] else: dinamic_params[k] = x_nc[i][i_nc] i_nc += 1 params_to_use.update(dinamic_params) params_to_use = self.modify_parameters(params_to_use, initial=initial_) for k in dinamic_params: dinamic_params[k] = params_to_use[k] strategy_instance = self.strategy_instance(params_to_use) output = run_backtest(strategy_instance, self._start_time, self._end_time) conn = MongoPersister.init_from_config('trading_dev', auto_connect=True) self.save_on_backtest_results(conn, FootballSpreadChooseParamsPSO.strategy_name(), dinamic_params, output) score, metrics = self.get_score(output) optimization_params = {'particle': i, 'iteration': self._it} self.save_on_backtest_optimization(conn, dinamic_params, score, metrics, optimization_params) return score
def action_provider(sport, delay_ms=0, provider=None): if sport == Sports.TENNIS: if provider is None or provider == Providers.ENET: action_prov = HistoricalTennisActionDataProvider( delay_ms=delay_ms, strict=False) elif provider == Providers.LSPORTS: action_prov = HistoricalLsportsTennisActionDataProvider( delay_ms=delay_ms, strict=False) else: raise ValueError("Bad provider {}".format(provider)) elif sport == Sports.FOOTBALL: mongo_odds = MongoPersister.init_from_config('odds', auto_connect=True).db # mongo_football = MongoPersister.init_from_config('football', auto_connect=True).db action_prov = HistoricalCleanfeedFootballActionDataProvider( mongo_odds, delay_ms=delay_ms) # action_prov = HistoricalFootballCrowdScoresActionDataProvider(mongo_odds, mongo_football) elif sport == Sports.BASKETBALL: if delay_ms > 0: raise NotImplementedError( "delay_ms not implemented for BASKETBALL") return HistoricalBasketballActionDataProvider() elif sport == Sports.CRYPTO: if delay_ms > 0: raise NotImplementedError( "delay_ms not implemented for CRYPTO") return EmptyProvider() else: raise ValueError('Cannot create action provider for sport %s' % sport) return action_prov
def get_arctic_data(lob_or_trades='trades'): mongo_client = MongoPersister.init_from_config('arctic_crypto', auto_connect=True) storage = ArcticStorage(mongo_client.client) if lob_or_trades == 'trades': subdir = os.path.join(local_cache, 'Trades') download_call_back = storage.load_trades data_ranges = storage.trades_range() elif lob_or_trades == 'lob': subdir = os.path.join(local_cache, 'LOB') download_call_back = storage.load_lob data_ranges = storage.lob_range() else: raise ValueError("You shouild be requesting one of LOB or trades") for symbol, (sd, ed) in data_ranges.iteritems(): symbol_dir = os.path.join(subdir, symbol) if not os.path.exists(symbol_dir): os.makedirs(symbol_dir) print "Reading data for {}".format(symbol) for dd in pd.date_range(sd, ed, freq='D'): data = download_call_back(symbol, sd, ed, as_df=True) if len(data) > 0: print "storing {}".format(dd.strftime('%Y%m%d')) data.to_csv(os.path.join(subdir, dd.strftime('%Y%m%d')))
def evaluate_spark(self, ind): # Creating connection here in the worker conn = MongoPersister.init_from_config('trading_dev', auto_connect=True) strategy_GA = PARAM_CLASS_MAP[self._strategy](self._opt_name, conn, self._start_time, self._end_time, self._static_params) return strategy_GA.evaluate_spark(ind)
def __init__(self, feature, mongo_connection=None, mongo_db='features', collection='timeseries'): """ WARNING this is deprecated, use MongoCompactTimeseriesStorage instead """ warnings.warn('MongoTimeseriesFeatureStorage is deprecated,' ' use MongoCompactTimeseriesStorage instead ', DeprecationWarning) super(MongoTimeseriesFeatureStorage, self).__init__(feature) self._connection = mongo_connection or MongoPersister.init_from_config(mongo_db, auto_connect=True) self._collection = collection
def __init__(self, feature, mongo_connection=None, mongo_db='features', collection_stickers='timeseries_compact_s', collection_events='timeseries_compact_e', max_values=1000): from sgmtradingcore.analytics.features.feature import TimeSeriesFeature if not isinstance(feature, TimeSeriesFeature): raise ValueError("This is optimized for timeseries only") super(MongoCompactTimeseriesStorage, self).__init__(feature) self._connection = mongo_connection or MongoPersister.init_from_config(mongo_db, auto_connect=True) self._collection_e = collection_events self._collection_s = collection_stickers self._max_values = max_values
def _init_shared_objs(self): cassandra_connection = self._kwargs['cassandra_connection'] if 'cassandra_connection' in self._kwargs else \ get_cassandra_connection() fixture_cache = self._kwargs['fixture_cache'] if 'fixture_cache' in self._kwargs else \ FixtureCache() odds_cache = self._kwargs['odds_cache'] if 'odds_cache' in self._kwargs else \ HistoricalCassandraOddsCache( cassandra_connection=cassandra_connection, fixture_cache=fixture_cache, eager=True) odds_cache_parse_false = self._kwargs['odds_cache_parse_false'] if \ 'odds_cache_parse_false' in self._kwargs else \ HistoricalCassandraOddsCache( cassandra_connection=cassandra_connection, fixture_cache=fixture_cache, eager=True, parse=False) mysql_client = self._kwargs['mysql_client'] if 'mysql_client' in self._kwargs else \ MySQLClient.init_from_config(config_name='mysql') feature_conn = self._kwargs['feature_conn'] if 'feature_conn' in self._kwargs else \ MongoPersister.init_from_config('features', auto_connect=True) abc_tennis_conn = MongoPersister.init_from_config('abc_tennis_v2', auto_connect=True) mysql_client.connect() self.SHARED_OBJECTS = { 'cassandra_connection': cassandra_connection, 'odds_cache': odds_cache, 'odds_cache_parse_false': odds_cache_parse_false, 'fixture_cache': fixture_cache, 'mysql_client': mysql_client, 'feature_conn': feature_conn, 'abc_tennis_conn': abc_tennis_conn, }
def _get_existing_feature_id(cls, feature_name, params, feature_conn=None, runner=None): """Check the features database to see if the id for the params exist It transforms the document into a query and makes sure that a query from the matching document would have had exactly the same query Parameters -------- params: dict, parameters of the feature feature_name: str, name of the feature feature_conn: open connection to the features database Returns ------- feature_id: - str, if the feature_id found associated with params is found - None otherwise (parameters not found in the database) """ close = False params = cls.sanitize_parameters(params, runner) if feature_conn is None: feature_conn = runner.feature_conn( ) or MongoPersister.init_from_config('features', auto_connect=True) close = True query = cls.get_query_from_document(params, prefix='params', runner=runner) query['feature'] = feature_name existing_params = feature_conn.db['feature_ids'].find(query) existing_params.sort('feature_id', 1) feature_id = None for p in existing_params: pp = {'params': p['params'], 'feature': p['feature']} new_params_check_query = get_query_from_document(pp) if new_params_check_query == query: feature_id = p['feature_id'] break if close: feature_conn.close() return feature_id
def param_sets_from_configs(config_ids, mongo_client=None): connect_here = mongo_client is None if connect_here: mongo_client = MongoPersister.init_from_config('trading_dev', auto_connect=True) mongo_client.connect() # Get the parameter sets from the configuration IDs param_sets = {} for cfg_id in config_ids: params = mongo_client['backtest_configurations'].find_one( {'_id': ObjectId(cfg_id)}) if params: param_values = params['variable_params'] param_sets[cfg_id] = param_values if connect_here: mongo_client.close() return param_sets
def main(): logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - ' '%(levelname)s - %(message)s') tickers = ['BTCUSD.SPOT.BITS', 'BTCUSD.SPOT.BITF', 'BTCUSD.PERP.BMEX'] frequency = '1d' runner = CryptoFeatureRunner() request = [ FeatureRequest( 'OHLCTrades', { 'frequency': frequency, }, {}, ), # FeatureRequest('OHLC', # { # 'frequency': frequency, # 'base_ts': { # 'is_feature_base_ts': False, # 'base_ts_name': TSInput.L1_MID, # } # }, # {}, # prefix='t_' # ) ] _FIRST_VALID_DATETIMES = get_first_valid_datetime() _LAST_VALID_DATETIMES = get_last_valid_datetime() for ticker in tickers: runner = CryptoFeatureRunner() start_dt = _FIRST_VALID_DATETIMES[ticker] end_dt = _LAST_VALID_DATETIMES[ticker] df = runner.get_merged_dataframes(request, start_dt, end_dt) column_names = ['open', 'high', 'low', 'close'] column_names_t = ['t_open', 't_high', 't_low', 't_close'] column_names_a = ['a_open', 'a_high', 'a_low', 'a_close'] mongo_client = MongoPersister.init_from_config('arctic_crypto', auto_connect=True) arctic = ArcticStorage(mongo_client.client) arctic_trades_df = arctic.load_trades(ticker, start_dt, end_dt) timestamps = arctic_trades_df.index.to_pydatetime().tolist() prices = arctic_trades_df['price'].tolist() arctic_candles = make_ohlc_candles(timestamps=timestamps, values=prices, period=timedelta(days=1)) arctic_candles.columns = [ 'a_' + str(col) for col in arctic_candles.columns ] df1 = pd.merge(df, arctic_candles) compare_candles(ticker, df1, column_names, column_names_a)
def default_abc_connection(self): return MongoPersister.init_from_config('abc', auto_connect=True)
help='Strategy1,Strategy2', type=valid_string_list) parser.add_argument('--startdate', help='First date to close format YYYY-MM-DD ', required=True, type=valid_date) parser.add_argument('--enddate', help='Last date to close - format YYYY-MM-DD ', required=True, type=valid_date) args = parser.parse_args() trading_user_id = ObjectId(args.trading_user_id) strategies = args.strategies crypto_trading_conn = MongoPersister.init_from_config('crypto_trading', auto_connect=True).db # look for trades in orders from up to 7 days ago load_start_date = args.startdate - timedelta(days=7) load_end_date = datetime.combine(args.enddate, CLOSING_TIME) all_orders = list(crypto_trading_conn['orders'].find({ 'trading_user_id': ObjectId(trading_user_id), 'placed_time': { '$gte': load_start_date, '$lt': load_end_date }, })) orders_by_strategy = defaultdict(dict)
def connect_to_mongo(database): mongo_client = MongoPersister.init_from_config(database, auto_connect=True) return mongo_client