def run(self): logger.debug("Start analyizing %s ..." % self.valuesFile) with open(self.valuesFile, 'r') as fin: reader = csv.reader(fin) dates = [] values = [] for row in reader: date = dt.datetime.strptime(row[0], '%Y-%m-%dT%H:%M:%S') dates.append(date) value = float(row[-1]) values.append(value) self.df_price = pd.DataFrame(data=values, index=dates, columns=[PORT_NAME]) startDate = self.df_price.index[0] endDate = self.df_price.index[-1] def receive(symbol, df): logger.debug("Baseline data received") self.df_price[symbol] = pd.Series(data=df['close'].values, index=self.df_price.index) self.analyze() import HistoricalData HistoricalData.request(self.baselineSymbol, startDate, endDate, self.data_type, self.bar_size, receive)
def run(self): self.orders = [] self.symbols = [] self.shares = {} with open(self.ordersFile, 'r') as fin: reader = csv.reader(fin) for row in reader: date = dt.datetime(int(row[0]), int(row[1]), int(row[2]), 16) symbol = row[3] action = row[4] quantity = float(row[5]) order = OrderWrapper(date, symbol, Orders.MarketOrder(action, quantity)) self.orders.append(order) if order.symbol not in self.symbols: self.symbols.append(order.symbol) self.shares[order.symbol] = 0 self.orders = sorted(self.orders, key=lambda order: order.date) startDate = self.orders[0].date endDate = self.orders[-1].date logger.debug("Needing %s", self.symbols) def doSimulate(df): self.df_data = df self.simulate() HistoricalData.requestMultiple(self.symbols, startDate, endDate, "ADJUSTED_LAST", "1 DAY", doSimulate)
def getSchedule(self, sid, ordersize, alg): if hd.updateStock(str(sid)) != 0: self.sidNotExsist.emit() return trade_style = 0 # not implemented yet self.schedPrepared.emit(td.tradeStock(sid, int(ordersize), trade_style, alg))
def restore(self): '''Starts the restore process from a file.''' backup = self.restoreFromFile() recordCount = len(backup) if(recordCount > 0): self.LOGGER.info(str(recordCount) + " record(s) found. Saving to DB") for record in backup: # Set data as valid by default hDataValid = True # Get backup record hData = record if self.VALIDATOR is not None and self.CONFIG.getBooleanConfig("Tolerence", "enabled"): try: validatedData = self.VALIDATOR.validateData(hData) except Exception as e: raise e hDataValid = validatedData[0] if hDataValid is True: hData = validatedData[1] if hDataValid and self.CONFIG.getBooleanConfig("Trigger", "enabled"): # Check trigger conditions which return true or false if it's valid try: hDataValid = self.TRIGGER.checkTriggers(hData) except Exception as e: raise e # Insert the first element in list and remove it if hDataValid: self.LOGGER.info("Inserting: " + str(record.__dict__)) #HistoricalData.insertData(record) HistoricalData.insertData(hData) else: self.LOGGER.info("Skipped backup record") # Remove backup file to prevent duplicate data from being restored. self.LOGGER.info("Restore from backup complete.") self.LOGGER.info("Removing backup file.") self.LOGGER.info("File deleted? " + str(self.deleteBackupFile()))
def checkTriggers(self, historicalData): '''Check if data received meets any one trigger conditions. Returns true if it's met''' trigger = False # Get last recorded data point. Used for trigger information try: previousDataPoint = HistoricalData.getLastHistoricalData(historicalData) except ConnectionException as ce: raise ce # Check if there was data in DB if previousDataPoint is not None: try: # Get all channel data channel = "" for key, value in previousDataPoint.energy.iteritems(): channel += key + "=" + str(value) + "w " self.LOGGER.info("Last data point for " + previousDataPoint.name +\ " app_id=" + str(previousDataPoint.applianceId) + " type=" +\ str(previousDataPoint.sensorType) + " " + "at " +\ str(previousDataPoint.time) + " was " + channel +\ str(previousDataPoint.temperature) + "c") # Check timeout timeout = self.checkTimeTrigger(previousDataPoint) if timeout is True: trigger = True # Check energy variation energy = self.checkEnergyTrigger(historicalData, previousDataPoint) if energy is True: trigger = True # Check temperature variation temp = self.checkTemperatureTrigger(historicalData, previousDataPoint) if temp is True: trigger = True except AttributeError as ae: self.LOGGER.error("Caught Error:" + str(ae)) trigger = False else: # No previous data point found self.LOGGER.info("No data history for device " + historicalData.name +\ " on app_id=" + str(historicalData.applianceId) +\ " type=" + str(historicalData.sensorType)) trigger = True # Historial data existed but no conditions were met. return trigger
def testBollinger(ls_symbols, s_market_sym, dt_start, dt_end, f_starting_cash, f_amount_per_trade, n_band_width, n_bar_to_look_back, bar_size, bs_strategy): if not os.path.isdir('strategyTest'): os.mkdir('strategyTest') id = dt.datetime.now().strftime('%Y-%m-%dT%H:%M:%S') s_bollinger_index_out_file = 'strategyTest/bollinger-index-' + id + '.csv' s_plot_out_file_prefix = 'strategyTest/bollinger-' + id s_orders_out_file = 'strategyTest/bollinger-orders-' + id + '.csv' s_values_out_file = 'strategyTest/bollinger-values-' + id + '.csv' def doTest(df_price): from BollingerBandAnalysis import BollingerBandAnalysis BollingerBandAnalysis(df_price, s_bollinger_index_out_file, s_plot_out_file_prefix, s_orders_out_file, s_values_out_file)\ .run(n_band_width, n_bar_to_look_back, f_starting_cash, f_amount_per_trade, bs_strategy) PortfolioAnalyzer(s_values_out_file, s_market_sym).run() HistoricalData.requestMultiple(ls_symbols, dt_start, dt_end, 'ADJUSTED_LAST', bar_size, doTest)
def test(ls_symbols, s_market_sym, dt_start, dt_end, f_starting_cash): if not os.path.isdir('strategyTest'): os.mkdir('strategyTest') id = dt.datetime.now().strftime('%Y-%m-%dT%H:%M:%S') profilerFile = 'strategyTest/EventStudy_' + id + '.pdf' ordersFile = 'strategyTest/orders_' + id + '.csv' valuesFile = 'strategyTest/values_' + id + '.csv' def analyze(): PortfolioAnalyzer(valuesFile, s_market_sym).run() def doTest(df_price): from SampleStrategy import SampleStrategy SampleStrategy(df_price, s_market_sym, profilerFile, ordersFile).studyEvents() MarketSimulator(f_starting_cash, ordersFile, valuesFile, 1, analyze).run() ls_symbols.append(s_market_sym) HistoricalData.requestMultiple(ls_symbols, dt_start, dt_end, 'ADJUSTED_LAST', '1 day', doTest)
def getCurrentXMLData(self, xmlDoc): '''Creates and populates HistoricalData from xml data.''' # Instantiate HistoricalData object. data = HistoricalData() if xmlDoc is not None: for msgNode in xmlDoc.getElementsByTagName('msg'): #print msgNode.toxml() dataNode = msgNode.childNodes # Populate object with data under the parent node for i in range(0, dataNode.length): try: if dataNode[i].nodeName == 'src': self.LOGGER.info(str(i) + ': name=' + dataNode[i].firstChild.nodeValue) data.name = dataNode[i].firstChild.nodeValue elif dataNode[i].nodeName == 'dsb': self.LOGGER.info(str(i) + ': dsb=' + dataNode[i].firstChild.nodeValue) data.dsb = int(dataNode[i].firstChild.nodeValue) elif dataNode[i].nodeName == 'id': self.LOGGER.info(str(i) + ': appliance id=' + dataNode[i].firstChild.nodeValue) data.channel_frequency = int(dataNode[i].firstChild.nodeValue) elif dataNode[i].nodeName == 'sensor': self.LOGGER.info(str(i) + ': sensor=' + dataNode[i].firstChild.nodeValue) data.applianceId = dataNode[i].firstChild.nodeValue elif dataNode[i].nodeName == 'type': self.LOGGER.info(str(i) + ': type=' + dataNode[i].firstChild.nodeValue) data.sensorType = int(dataNode[i].firstChild.nodeValue) elif dataNode[i].nodeName == 'time': self.LOGGER.info(str(i) + ': time=' + str(dataNode[i].firstChild.nodeValue)) data.time = dataNode[i].firstChild.nodeValue elif dataNode[i].nodeName == 'tmpr': self.LOGGER.info(str(i) + ': tmpr=' + dataNode[i].firstChild.nodeValue) data.temperature = float(dataNode[i].firstChild.nodeValue) # Match any channel elif dataNode[i].nodeName[:2] == 'ch': self.LOGGER.info(str(i) + ': ch' + dataNode[i].nodeName[2:] + '=' + self.getXMLChannel(dataNode[i].childNodes)) data.energy[dataNode[i].nodeName] = int(self.getXMLChannel(dataNode[i].childNodes)) except AttributeError: self.LOGGER.error('Skipped due to data error: ' + str(dataNode[i].nodeName)) except ValueError: self.LOGGER.error('Skipped due to data error: ' + str(dataNode[i].nodeName)) return data
def checkNewAppliance(self, historicalData): '''Checks if the data will append or insert a new appliance. False = not new True = new appliance''' import HistoricalData # Retrieve the device ID in the database if one exists deviceId = HistoricalData.getDeviceId(historicalData.name, historicalData.applianceId, historicalData.sensorType) if deviceId is None: # No ID found and therefore one would be created on insert return True else: # Device ID found and data will be appended return False
# -*- coding: utf-8 -*- import os, sys, io, time import pandas import urllib import re import shutil import numpy import datetime from numpy import * INTERVAL_MINUTE = 5 #this number should be a divisor of 60 INTERVAL_NUM = 240 / INTERVAL_MINUTE Train_day_num = 10 Read_data_day_num = 40 import HistoricalData import Trade import MySQL if __name__ == '__main__': #stockID = raw_input("Enter the stockID you want to buy : ") stockID = 'sh601988' buy_num = 50 trading_style = 0 #style: 0:normal 1:aggressive 2:passive HistoricalData.updateStock(stockID) Trade.tradeStock(stockID, buy_num, trading_style) #trainVWAP(stockID)
def run(): '''Reads, parses and stores data''' global CONFIG global LOGGER global VALIDATOR global TRIGGER global DEVICE global OFFLINE global OFFLINEMODE # Read data from USB/Serial data = DEVICE.read() # Instantiate parser xmlParser = Parser() # Parse xml data from device hData = xmlParser.parseXML(data) #valid data hDataValid = True if hData is not None: # Check time override if CONFIG.getBooleanConfig("Application", "useSystemTime"): # Overriding device time with system date time hData.time = datetime.now() else: try: # Parse time from device pre-pending system date tempDate = date.today().isoformat() + " " + hData.time hData.time = datetime.strptime(tempDate, "%Y-%m-%e %H:%M:%S") except ValueError: # Unable to parse time from device LOGGER.error("Error parsing time from device '" + hData.time + "'") # If error checking is enabled if VALIDATOR is not None and CONFIG.getBooleanConfig("Tolerence", "enabled"): try: # Validate data validatedData = VALIDATOR.validateData(hData) # Retrieve validation result hDataValid = validatedData[0] # If data is valid, retrieve the new valid data. The object may have # been cleansed e.g some channels may not meet validation parameters but # other channels might in the reading if hDataValid is True: hData = validatedData[1] except ConnectionException as ce: # Gracefully shutdown if it was unable to validate the data due to # database connection if CONFIG.getBooleanConfig("Application", "enableOffline"): # Set mode to offline OFFLINEMODE = True OFFLINE.backup(hData) else: shutdown() # Only check trigger conditions if it's enabled, not in offline mode and # data is valid after tolerence check if OFFLINEMODE is False and hDataValid and CONFIG.getBooleanConfig("Trigger", "enabled"): # Check trigger conditions which return true or false if it's valid try: hDataValid = TRIGGER.checkTriggers(hData) except ConnectionException as ce: # Gracefully shutdown if it was unable to check triggers due to database connection if CONFIG.getBooleanConfig("Application", "enableOffline"): # Set mode to offline OFFLINEMODE = True OFFLINE.backup(hData) else: shutdown() # Insert data if it passed all checks and mode is not offline if OFFLINEMODE is False: if hDataValid: try: HistoricalData.insertData(hData) except ConnectionException as ce: # Gracefully shutdown if it was unable to check triggers due to database connection if CONFIG.getBooleanConfig("Application", "enableOffline"): # Set mode to offline OFFLINEMODE = True OFFLINE.backup(hData) else: shutdown() else: LOGGER.info("Skipped") else: LOGGER.info("Offline mode: Active")