def getTradesForDrillDown(self, sym, time, state): # Return set of recent trades before and after anomaly # Get trade id ttable = "trans_live" if (state != 1): ttable = "trans_static" try: time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S.%f") except ValueError: time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S") upper = time + timedelta( hours=3 ) #create an upper and lower boundary frame TODO change if necessary lower = time - timedelta(hours=3) # Get trades beforehand query = "SELECT time,buyer,seller,price,volume,currency,symbol,sector,bidPrice,askPrice FROM " + \ ttable + " WHERE(symbol=%s AND unix_timestamp(time) BETWEEN unix_timestamp(%s) AND unix_timestamp(%s)) ORDER BY unix_timestamp(time) ASC" params = [sym, lower, upper] rows = self.query(query, params) trades = [] for row in rows: trade = mtrade.to_TradeData(row) trades.append(trade) return trades
def getTradesByPerson(self, person, sym, state): trades = [] table = "trans_live" if (state != 1): table = "trans_static" query = "SELECT time,buyer,seller,price,volume,currency,symbol,sector,bidPrice,askPrice FROM " + table + " WHERE(symbol=%s AND (buyer=%s OR seller=%s)) ORDER BY unix_timestamp(time) ASC" params = [sym, person, person] rows = self.query(query, params) for row in rows: trade = mtrade.to_TradeData(row) trades.append(trade) return trades
def getAnomalyById(self, id, state): # Useful function for the drill down stuff table1 = "anomalies_live" table2 = "trans_live" if (state != 1): table1 = "anomalies_static" table2 = "trans_static" query = "SELECT category,time,buyer,seller,price,volume,currency,symbol,sector,bidPrice,askPrice FROM " + table1 + " JOIN " + table2 + " ON " + table2 + ".id=" + table1 + ".tradeid WHERE " + table1 + ".id=%s" params = [id] data = self.query(query, params) t = data[0] category = t[0] trade = mtrade.to_TradeData(t[1:]) #remove category return mtrade.Anomaly(id, trade, category)
def getAnomalies(self, done, state): atable = "anomalies_live" ttable = "trans_live" if (state != 1): atable = "anomalies_static" ttable = "trans_static" query = "SELECT " + atable + ".id,tradeid,category,time,buyer,seller,price,volume,currency,symbol,sector,bidPrice,askPrice FROM " + atable + " JOIN " + ttable + " ON " + ttable + ".id=" + atable + ".tradeid WHERE actiontaken=%s" params = [done] rows = self.query(query, params) anomalies = [] for row in rows: t = mtrade.to_TradeData(row[3:]) a = mtrade.Anomaly(row[0], t, row[2]) anomalies.append(a) return anomalies
def parsefile(self): #read file global _qlock global _staticq global _loadedfile print("Prepared File") # Immediatly read straight to database db = database.Database() print("Starting to read in the file") startTime = time.time() db.action( "load data local infile 'trades.csv' into table trans_static fields terminated by ',' lines terminated by '\n' ignore 1 lines (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10) set id=NULL, time=@col1, buyer=@col2, seller=@col3, price=@col4, volume=@col5, currency=@col6, symbol=@col7, sector=@col8, bidPrice=@col9, askPrice=@col10", []) #print("Data read in") print("Took " + str(time.time() - startTime) + " seconds to complete") _loadedfile = 1 ''' db.getFirstId() with open('trades.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter = ',') for row in reader: if row[1] == 'buyer': continue else: row[0] = str(row[0]) row[1] = str(row[1]) #buyer row[2] = str(row[2]) #seller row[3] = str(row[3]) #price row[4] = str(row[4]) row[5] = str(row[5]) row[6] = str(row[6]) #symbol row[7] = str(row[7]) #sector row[8] = str(row[8]) row[9] = str(row[9]) #trade = mtrade.to_TradeData(row) #trade.id = db.query("select id from trans_static where(time=%s and buyer=%s and seller=%s)", [trade.time, trade.buyer, trade.seller])[0][0] _qlock.acquire() _staticq.put(mtrade.to_TradeData(row)) _qlock.release() print("Trying to load data for processing") firstId = db.query("select id from trans_static limit 1", [])[0][0] count = db.query("select count(*) from trans_static", [])[0][0] print("Beginning to process the data") # Loads in 10,000 trade groups step_increase = 500 limit = (firstId + count) currentStep = firstId print("First id is " + str(firstId)) startTime = time.time() while(currentStep <= limit): print("currentStep = " + str(currentStep)) rows = db.query("select * from trans_static where(id between %s and %s)", [currentStep, currentStep + step_increase]) print("Data is read in and ready to go") try: if(len(rows) == 0): # No more data to be read in break except KeyboardInterrupt: print("Quitting out") break for row in rows: trade = mtrade.to_TradeData(row[1:]) trade.id = row[0] _qlock.acquire() _staticq.put(trade) _qlock.release() currentStep += step_increase print("Took " + str(time.time() - startTime) + " seconds to complete") ''' with open('trades.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') for row in reader: if row[1] == 'buyer': continue else: row[0] = str(row[0]) row[1] = str(row[1]) #buyer row[2] = str(row[2]) #seller row[3] = str(row[3]) #price row[4] = str(row[4]) row[5] = str(row[5]) row[6] = str(row[6]) #symbol row[7] = str(row[7]) #sector row[8] = str(row[8]) row[9] = str(row[9]) _qlock.acquire() _staticq.put(mtrade.to_TradeData(row)) _qlock.release()