def __init__(self, configuration): #NOTE Allowing different data access ? #self.metrics = None #self.server = ZMQ_Dealer(id=self.__class__.__name__) self.configuration = configuration if 'quandl' in configuration['env']: self.datafeed = DataFeed(configuration['env']['quandl']) else: self.datafeed = DataFeed()
def __init__(self, config_dir='', offline=False): ''' Parameters configuration_folder: str default is current location for configuration files offline: boolean Can force to switch off internal server ''' #NOTE timezone as parameter super(Setup, self).__init__() self.configuration_folder = config_dir if config_dir else '/'.join((os.environ['QTRADE'], 'config')) # Config data structures self.config_backtest = dict() self.config_strategie = dict() self.config_environment = self._inspect_environment() # Client for easy mysql database access self.datafeed = DataFeed() self.offline = offline if not offline: # It makes the entire simulator able to receive configuration and # send informations to remote users and other processes like web frontend self.server = network.ZMQ_Dealer(id=self.__class__.__name__)
def __init__(self, country_code=None): ''' Parameters country_code: str This information is used to setup International object and get the right local conventions for lang, dates and currencies. None will stand for 'fr' ''' #self.locatioon = world.International(country_code) self.datafeed = DataFeed()
def __init__(self, parameters): PortfolioManager.__init__(self, parameters) # Database access for symbols retrieving self.feeds = DataFeed() # R stuff: R functions file and rpy interface self.r = robjects.r portfolio_opt_file = '/'.join( (os.environ['QTRADE'], 'neuronquant/ai/opt_utils.R')) self.r('source("{}")'.format(portfolio_opt_file))
def __init__(self, data_descriptor, **kwargs): assert isinstance(data_descriptor['index'], pd.tseries.index.DatetimeIndex) self.data_descriptor = data_descriptor # Unpack config dictionary with default values. self.sids = kwargs.get('sids', data_descriptor['tickers']) self.start = kwargs.get('start', data_descriptor['index'][0]) self.end = kwargs.get('end', data_descriptor['index'][-1]) # Hash_value for downstream sorting. self.arg_string = hash_args(data_descriptor, **kwargs) self._raw_data = None self.feed = DataFeed()
def __init__(self, *args, **kwargs): super(Analyze, self).__init__() # MySQL Database client self.datafeed = kwargs.pop('datafeed') if 'datafeed' in kwargs else DataFeed() # R analysis file only need portfolio returns self.returns = kwargs.pop('returns') if 'returns' in kwargs else None # Final risk measurments as returned by the backtester self.results = kwargs.pop('results') if 'results' in kwargs else None # Simulation rolling performance self.metrics = kwargs.pop('metrics') if 'metrics' in kwargs else None # You better should know what was simulation's parameters self.configuration = kwargs.pop('configuration') if 'configuration' in kwargs else None
def smart_tickers_select(tickers_description, exchange=''): ''' Take tickers string description and return an array of explicit and usuable symbols ''' # Informations are coma separated within the string tickers_description = tickers_description.split(',') # Useful way of stocks selection in order to test algorithm strength if tickers_description[0] == 'random': # Basic check: the second argument is the the number, integer, of stocks to pick up randomly assert len(tickers_description) == 2 assert int(tickers_description[1]) # Pick up stocks on specified (or not) market exchange tickers_description = DataFeed().random_stocks( int(tickers_description[1]), exchange=exchange.split(',')) return tickers_description
def __init__(self, data, **kwargs): assert isinstance(data['index'], pd.tseries.index.DatetimeIndex) self.data = data # Unpack config dictionary with default values. self.sids = kwargs.get('sids', data['tickers']) self.start = kwargs.get('start', data['index'][0]) self.end = kwargs.get('end', data['index'][-1]) self.fake_index = pd.date_range(self.start, self.end, freq=pd.datetools.BDay()) # Hash_value for downstream sorting. self.arg_string = hash_args(data, **kwargs) self._raw_data = None self.remote = Fetcher() self.feed = DataFeed()
import pandas.rpy.common as com from neuronquant.data.datafeed import DataFeed r = robjects.r svmforecast_lib = 'e1071.R' r('source("%s")' % svmforecast_lib) #quantmod = importr('quantmod') symbol = 'GOOG' history = 500 today = datetime.datetime.now() start_date = today - pd.datetools.Day(history) #r('require(quantmod)') #tt = r('get( getSymbols("{}", from="{}"))'.format(symbol, start_date.strftime(format='%Y-%m-%d'))) returns = DataFeed().quotes(symbol, start_date=start_date, end_date=today) #returns = data.pct_change().fillna(0.0) returns = returns.rename(columns={symbol: "Close"}) r_quotes = com.convert_to_r_dataframe(returns) import ipdb ipdb.set_trace() data_matrix = r('svmFeatures')(r_quotes) #rets = r('na.trim( ROC(Cl({}), type="discrete"))'.format('tt')) #data_matrix = r('svmFeatures')(tt) #data_df = pd.rpy.common.convert_robj(data_matrix)
def __init__(self, data=None): #NOTE Allowing different data access ? #self.metrics = None #self.server = ZMQ_Dealer(id=self.__class__.__name__) self.datafeed = DataFeed()