def __init__(self, log=logtoscreen("csvRollParametersData"), datapath=arg_not_supplied): super().__init__(log=log) if datapath is arg_not_supplied: datapath = ROLLS_DATAPATH config_file = get_filename_for_package(datapath, ROLLS_CONFIG_FILE) self._config_file = get_filename_for_package(config_file)
def pickle_cache(self, relativefilename, fullfilename=None): """ Save everything in the cache to a pickle EXCEPT 'nopickle' items :param relativefilename: cache location filename in 'dot' format eg 'systems.basesystem.py' is this file :type relativefilename: str :param fullfilename: full filename :type fullfilename: str :returns: None """ if fullfilename is None: filename=get_filename_for_package(relativefilename) else: filename=fullfilename itemstopickle = self.get_items_with_data() dont_pickle = self.get_nopickle_items() itemstopickle = [ itemname for itemname in itemstopickle if itemname not in dont_pickle] cache_to_pickle = self.partial_cache(itemstopickle) with open(filename, "wb") as fhandle: pickle.dump(cache_to_pickle, fhandle)
def unpickle(self, relativefilename, fullfilename=None, clearcache=True): """ Loads the saved cache Note that certain elements (accountCurve objects and optimisers) won't be pickled, and so won't be loaded. You will need to regenerate these. If clearcache is True then we clear the entire cache first. Otherwise we end up with a 'mix' - not advised so do at your peril :param filename: cache location :type filename: filename in 'dot' format eg 'systems.basesystem.py' is this file :param clearcache: Clear the entire cache, or overwrite what we have? :type clearcache: bool :returns: None """ if fullfilename is None: filename = get_filename_for_package(relativefilename) else: filename = fullfilename with open(filename, "rb") as fhandle: cache_from_pickled = pickle.load(fhandle) if clearcache: self.clear() for itemname in cache_from_pickled.keys(): self[itemname] = cache_from_pickled[itemname]
def store_backtest_state(data, system, strategy_name="default_strategy", backtest_config_filename=arg_not_supplied): """ Store a pickled backtest state and backtest config for a system :param data: data object, used to access the log :param system: a system object which has run :param strategy_name: str :param backtest_config_filename: the filename of the config used to run the backtest :return: success """ if backtest_config_filename is arg_not_supplied: error_msg = "Have to provide a backtest config file name to store state" data.log.warn(error_msg) raise Exception(error_msg) full_filename_prefix = get_state_filename_prefix( strategy_name) backtest_filename = full_filename_prefix + "_backtest.pck" pickle_state(data, system, backtest_filename) config_save_filename = full_filename_prefix + "_config.yaml" resolved_backtest_config_filename = get_filename_for_package(backtest_config_filename) copy_config_file(data, resolved_backtest_config_filename, config_save_filename) return success
def _filename_given_instrument_code_and_contract_date( self, instrument_code, contract_date ): contract_object = futuresContract(instrument_code, contract_date) return get_filename_for_package( self._datapath, "%s_%s.csv" % (instrument_code, contract_object.date))
def _filename_given_instrument_strategy( self, instrument_strategy: instrumentStrategy): return get_filename_for_package( self._datapath, "%s_%s.csv" % (instrument_strategy.strategy_name, instrument_strategy.instrument_code), )
def get_data(path): """ returns: DataFrame or Series if 1 col """ df = pd_readcsv(get_filename_for_package(path)) if len(df.columns) == 1: return df[df.columns[0]] return df
def __init__(self, datapath=arg_not_supplied, log=logtoscreen("csvRollStateData")): super().__init__(log=log) if datapath is arg_not_supplied: raise Exception("Datapath needs to be passed") self._config_file = get_filename_for_package(datapath, "roll_state.csv") self.name = "Roll state data from %s" % self._config_file
def __init__(self, config_path = INSTRUMENT_CONFIG_PATH): super().__init__() if config_path is None: config_path = INSTRUMENT_CONFIG_PATH self._config_file = get_filename_for_package(config_path+"."+CONFIG_FILE_NAME) self.name = "Instruments data from %s" % self._config_file
def get_private_config(): private_file = get_filename_for_package(PRIVATE_CONFIG_FILE) try: with open(private_file) as file_to_parse: config_dict = yaml.load(file_to_parse) except: config_dict = {} return config_dict
def get_system_defaults(): """ >>> system_defaults['average_absolute_forecast'] 10.0 """ default_file = get_filename_for_package(DEFAULT_FILENAME) with open(default_file) as file_to_parse: default_dict = yaml.load(file_to_parse) return default_dict
def __init__(self, datapath = INSTRUMENT_CONFIG_PATH, log=logtoscreen("csvFuturesInstrumentData")): super().__init__() if datapath is None: datapath = INSTRUMENT_CONFIG_PATH self._config_file = get_filename_for_package(datapath, CONFIG_FILE_NAME) self.name = "Instruments data from %s" % self._config_file self.log = logtoscreen
def __init__( self, datapath=arg_not_supplied, log=logtoscreen("csvFuturesInstrumentData"), ): super().__init__(log=log) if datapath is arg_not_supplied: datapath = INSTRUMENT_CONFIG_PATH config_file = get_filename_for_package(datapath, CONFIG_FILE_NAME) self._config_file = config_file
def get_system_defaults_dict(filename: str = arg_not_supplied) -> dict: """ >>> system_defaults['average_absolute_forecast'] 10.0 """ if filename is arg_not_supplied: filename = DEFAULT_FILENAME default_file = get_filename_for_package(filename) with open(default_file) as file_to_parse: default_dict = yaml.load(file_to_parse, Loader=yaml.FullLoader) return default_dict
def generate_html(process_observatory: processMonitor): resolved_filename = get_filename_for_package(filename) trading_server_description = describe_trading_server_login_data() dbase_description = str(process_observatory.data.mongo_db) with open(resolved_filename, "w") as file: file.write("<br/> Last update %s" % str(datetime.datetime.now())) file.write("<br/><br/>") file.write("Monitoring machine %s with database %s" % (trading_server_description, dbase_description)) file.write("<br/><br/>") process_observatory.process_dict_to_html_table(file) file.write("<br/><br/>") process_observatory.log_messages_to_html(file) file.write("<br/><br/>")
def pd_readcsv_frompackage(filename): """ Run pd_readcsv on a file in python :param args: List showing location in project directory of file eg systems, provided, tests.csv :type args: str :returns: pd.DataFrame """ full_filename = get_filename_for_package(filename) return pd_readcsv(full_filename)
def get_private_config_as_dict(filename:str = arg_not_supplied) -> dict: if filename is arg_not_supplied: filename = PRIVATE_CONFIG_FILE if not does_file_exist(filename): print( "Private configuration %s does not exist; no problem if running in sim mode" % filename ) return {} private_file = get_filename_for_package(filename) with open(private_file) as file_to_parse: private_dict = yaml.load(file_to_parse, Loader=yaml.FullLoader) return private_dict
def _create_config_from_item(self, config_item): if isinstance(config_item, dict): # its a dict self._create_config_from_dict(config_item) elif isinstance(config_item, str): # must be a file YAML'able, from which we load the filename = get_filename_for_package(config_item) with open(filename) as file_to_parse: dict_to_parse = yaml.load(file_to_parse) self._create_config_from_dict(dict_to_parse) else: error_msg="Can only create a config with a nested dict or the string of a 'yamable' filename, or a list comprising these things" self.log.critical(error_msg)
def _create_config_from_item(self, config_item): if isinstance(config_item, dict): # its a dict self._create_config_from_dict(config_item) elif isinstance(config_item, str): # must be a file YAML'able, from which we load the filename = get_filename_for_package(config_item) with open(filename) as file_to_parse: dict_to_parse = yaml.load(file_to_parse) self._create_config_from_dict(dict_to_parse) else: error_msg = "Can only create a config with a nested dict or the string of a 'yamable' filename, or a list comprising these things" self.log.critical(error_msg)
def pickle(self, relativefilename): """ Save everything in the cache to a pickle EXCEPT 'not picklable' items :param relativefilename: cache location filename in 'dot' format eg 'systems.basesystem.py' is this file :type relativefilename: str :returns: None """ filename = get_filename_for_package(relativefilename) pickable_cache_refs = self._get_pickable_items() cache_to_pickle = self.partial_cache(pickable_cache_refs) with open(filename, "wb+") as fhandle: pickle.dump(cache_to_pickle, fhandle)
def pickle_cache(self, filename): """ Save everything in the cache to a pickle EXCEPT 'nopickle' items :param filename: cache location :type filename: filename in 'dot' format eg 'systems.basesystem.py' is this file :returns: None """ itemstopickle=self.get_items_with_data() dont_pickle=self.get_nopickle_items() itemstopickle=[itemname for itemname in itemstopickle if itemname not in dont_pickle] cache_to_pickle=self.partial_cache(itemstopickle) with open(get_filename_for_package(filename), "wb") as fhandle: pickle.dump(cache_to_pickle, fhandle)
def __init__(self, config_file=ROLLS_CONFIG_FILE): super().__init__() self._config_file = get_filename_for_package(config_file)
import yaml from syscore.fileutils import get_filename_for_package QUANDL_PRIVATE_KEY_FILE = get_filename_for_package("private.private_config.yaml") def load_private_key(key_file =QUANDL_PRIVATE_KEY_FILE , dict_key = 'quandl_key'): """ Tries to load a private key :return: key """ try: with open(key_file) as file_to_parse: yaml_dict = yaml.load(file_to_parse) key = yaml_dict[dict_key] except: # no private key print("No private key found for QUANDL - you will be subject to data limits") key = None return key
def __init__(self, config_file=ROLLS_CONFIG_FILE): super().__init__() self._config_file = get_filename_for_package(config_file) self.name = "Roll data for initialising system config"
def _filename_given_instrument_code(self, instrument_code): return get_filename_for_package("%s.%s.csv" %(self._datapath,instrument_code))
def _filename_given_fx_code(self, code): return get_filename_for_package("%s.%s.csv" %(self._datapath,code))
import yaml from syscore.fileutils import get_filename_for_package from syscore.objects import missing_data, arg_not_supplied from systems.defaults import get_default_config_key_value, get_system_defaults, DEFAULT_FILENAME PRIVATE_CONFIG_FILE = get_filename_for_package("private.private_config.yaml") def get_private_config(): try: with open(PRIVATE_CONFIG_FILE) as file_to_parse: config_dict = yaml.load(file_to_parse, Loader=yaml.FullLoader) except: config_dict = {} return config_dict def get_private_config_key_value(key_name, private_config_dict=arg_not_supplied, raise_error=False): if private_config_dict is arg_not_supplied: private_config_dict = get_private_config() key_value = private_config_dict.get(key_name, missing_data) if key_value is missing_data and raise_error: raise KeyError("Can't find key %s in private config .yaml files" % (key_value, PRIVATE_CONFIG_FILE)) return key_value
def _filename_given_fx_code(self, code): return get_filename_for_package("%s.%s.csv" % (self._datapath, code))
from collections import namedtuple import pandas as pd from sysdata.fx.spotfx import fxPricesData from sysobjects.spot_fx_prices import fxPrices from syslogdiag.log import logtoscreen from syscore.fileutils import get_filename_for_package from syscore.objects import missing_file, missing_instrument IB_CCY_CONFIG_FILE = get_filename_for_package( "sysbrokers.IB.ib_config_spot_FX.csv") ibFXConfig = namedtuple("ibFXConfig", ["ccy1", "ccy2", "invert"]) class ibFxPricesData(fxPricesData): def __init__(self, ibconnection, log=logtoscreen("ibFxPricesData")): self._ibconnection = ibconnection super().__init__(log=log) def __repr__(self): return "IB FX price data" @property def ibconnection(self): return self._ibconnection def get_list_of_fxcodes(self) -> list: config_data = self._get_ib_fx_config() if config_data is missing_file: self.log.warn( "Can't get list of fxcodes for IB as config file missing")
import datetime import socket from syscore.dateutils import SECONDS_PER_HOUR from syscore.genutils import str2Bool from sysdata.data_blob import dataBlob from sysdata.mongodb.mongo_process_control import mongoControlProcessData import yaml from syscore.fileutils import get_filename_for_package from syscore.objects import missing_data, arg_not_supplied PRIVATE_CONTROL_CONFIG_FILE = get_filename_for_package( "private.private_control_config.yaml") PUBLIC_CONTROL_CONFIG_FILE = get_filename_for_package( "syscontrol.control_config.yaml") class dataControlProcess(object): def __init__(self, data=arg_not_supplied): # Check data has the right elements to do this if data is arg_not_supplied: data = dataBlob() data.add_class_object(mongoControlProcessData) self.data = data def get_dict_of_control_processes(self): return self.data.db_control_process.get_dict_of_control_processes() def check_if_okay_to_start_process(self, process_name):
def _filename_given_key_name(self, keyname: str): return get_filename_for_package(self._datapath, "%s.csv" % (keyname))
""" from sysobjects.contracts import futuresContract from sysdata.futures.futures_per_contract_prices import ( futuresContractPriceData, futuresContractPrices, ) from syscore.fileutils import get_filename_for_package from sysdata.quandl.quandl_utils import load_private_key import quandl import pandas as pd QUANDL_FUTURES_CONFIG_FILE = get_filename_for_package( "sysdata.quandl.QuandlFuturesConfig.csv" ) quandl.ApiConfig.api_key = load_private_key() class quandlFuturesConfiguration(object): def __init__(self, config_file=QUANDL_FUTURES_CONFIG_FILE): self._config_file = config_file def get_list_of_instruments(self): config_data = self._get_config_information() return list(config_data.index)
from pymongo import MongoClient, ASCENDING, IndexModel from copy import copy import numpy as np import yaml from syscore.fileutils import get_filename_for_package from syscore.genutils import get_safe_from_dict MONGO_CONFIG_FILE = get_filename_for_package( 'sysproduction.config.mongo_config.yaml') LIST_OF_MONGO_PARAMS = ['db', 'host'] # CHANGE THESE IN THE PRIVATE CONFIG FILE, NOT HERE. SEE THE PRECEDENCE IN MONGO DEFAULTS DEFAULT_MONGO_PARAMS = dict(db='production', host='localhost') # DO NOT CHANGE THIS VALUE!!!! IT WILL SCREW UP ARCTIC DEFAULT_MONGO_PORT = 27017 MONGO_ID_STR = '_id_' MONGO_ID_KEY = '_id' from syscore.fileutils import PRIVATE_CONFIG_FILE def mongo_defaults(mongo_config_file=MONGO_CONFIG_FILE, private_config_file=PRIVATE_CONFIG_FILE, **kwargs): """ Returns mongo configuration with following precedence 1- if passed in arguments: db, host, port, data_map, collection_suffix - use that
import pandas as pd from syscore.fileutils import get_filename_for_package from syscore.genutils import value_or_npnan, NOT_REQUIRED from sysdata.futures.contracts import futuresContractData from sysdata.futures.instruments import futuresInstrument from sysdata.futures.contract_dates_and_expiries import expiryDate from syslogdiag.log import logtoscreen from syscore.objects import missing_contract, missing_instrument, missing_file IB_FUTURES_CONFIG_FILE = get_filename_for_package( "sysbrokers.IB.ibConfigFutures.csv") class ibFuturesContractData(futuresContractData): """ Extends the baseData object to a data source that reads in and writes prices for specific futures contracts This gets HISTORIC data from interactive brokers. It is blocking code In a live production system it is suitable for running on a daily basis to get end of day prices """ def __init__(self, ibconnection, log=logtoscreen("ibFuturesContractData")): setattr(self, "ibconnection", ibconnection) setattr(self, "log", log) def __repr__(self): return "IB Futures per contract data %s" % str(self.ibconnection) def get_brokers_instrument_code(self, instrument_code): return get_instrument_object_from_config(
def _filename_given_instrument_code(self, instrument_code): return get_filename_for_package(self._datapath, "%s.csv" % (instrument_code))