def crawl(): #initiate own classes postgres = postgresdbAccess.postgresAccess() trader = tradingEngineAccess.tradingAccess() test = validationAccess.liveAccess() #connect to binance client = Client(apiKey, apiSecret, {'timeout': 600}) #wait for api to not die time.sleep(1) #get current market data tickers = client.get_ticker() #get counts countOpen = postgres.sqlQuery("SELECT count(*)" + " FROM " + dbTable + " WHERE" + " resultpercent IS NULL" + " AND takeprofit IS NOT NULL;") for i in range(len(tickers)): intermDict = buildPairDict(tickers, i) #dont write data when not usable if (intermDict['askPrice'] <= 0 or intermDict['highPrice'] <= 0): continue if (1.015 >= intermDict['lowPrice'] / intermDict['highPrice'] >= 0.985 or "UP" in intermDict['symbol'] or "DOWN" in intermDict['symbol'] or not intermDict['symbol'].endswith(baseCurrency)): continue #write intermDict to database postgres.insertRow(intermDict) #run caluclation trader.runCalculation(intermDict) #check if any trades meet the conditions to be closed yet if countOpen[0][0] > 0: test.validate() #close database connection postgres.databaseClose()
def validate(self): self.postgres = postgresdbAccess.postgresAccess() sql = ( "SELECT id, symbol, takeprofit, stoploss, askprice, positioncost" + " FROM " + self.dbTable + " WHERE" + " resultpercent IS NULL " + " AND takeprofit IS NOT NULL;") self.bA = pd.DataFrame(self.postgres.sqlQuery(sql)) self.bA[0] = pd.to_numeric(self.bA[0]) self.bA[2] = pd.to_numeric(self.bA[2]) self.bA[3] = pd.to_numeric(self.bA[3]) self.bA[4] = pd.to_numeric(self.bA[4]) self.bA[5] = pd.to_numeric(self.bA[5]) for i, row in self.bA.iterrows(): sql = ("select id, askprice, bidprice from " + self.dbTable + " where id > '" + str(self.bA[0][i]) + "' and symbol = '" + str(self.bA[1][i]) + "';") validated = pd.DataFrame(self.postgres.sqlQuery(sql)) if len(validated) > 0: validated = validated.apply(pd.to_numeric, errors='coerce') tpData = validated[validated[1] >= self.bA[2][i]] slData = validated[validated[1] <= self.bA[3][i]] if (len(slData) > 0 and len(tpData) > 0 and slData[0].min() < tpData[0].min()): self.closeTrade(slData, i) elif (len(slData) > 0 and len(tpData) > 0 and slData[0].min() > tpData[0].min()): self.closeTrade(tpData, i) elif len(slData) > 0: self.closeTrade(slData, i) elif len(tpData) > 0: self.closeTrade(tpData, i) self.postgres.databaseClose()
def runCalculation(self, tick): if tick['symbol'] in [ "ENJBNB", "AAVEBNB", "NEARBNB", "ROSEBNB", "SOLBNB" ]: pass else: return self.postgres = postgresdbAccess.postgresAccess() sql = ("SELECT id, askprice FROM " + self.dbTable + " WHERE symbol LIKE '" + tick['symbol'] + "' AND time > NOW() - INTERVAL '24 hours';") largeData = pd.DataFrame(self.postgres.sqlQuery(sql)) #only run calculation if data for 1000 minutes (roughly 18 hours including calc. times ) to not buy ICOs on Biance (usually unprofitable at the beginning). if len(largeData) > 1000: #convert columns id and askprice to float largeData = largeData.apply(pd.to_numeric, errors='coerce', downcast='float') #calculate diff diff = largeData[1].max() - largeData[1].min() # calculate fibRetracements fibRetracement = pd.DataFrame(self.fibLvl) maxAsk = largeData[1].max() for lvl in fibRetracement: fibRetracement[1] = maxAsk - diff * fibRetracement[0] fibRetracement[2] = fibRetracement[1] * 0.999 fibRetracement[3] = fibRetracement[1] * 1.001 #see of currently an open trade exists sql = ( "SELECT count(*) FROM " + self.dbTable + """ WHERE takeprofit is not null and resultpercent is null and symbol like '""" + tick['symbol'] + "';") #get correlation of id and price corValue = largeData[0].corr(largeData[1]) #get statistical parameters statisticsTools = {} statisticsTools["stDev"] = statistics.stdev(largeData[1]) statisticsTools["skew"] = skew(largeData[1]) statisticsTools["kurtosis"] = kurtosis(largeData[1]) #if no open trade for symbol exists and price in between 7th fiblvl for i in [7, 8, 9]: if (int(self.postgres.sqlQuery(sql)[0][0]) == 0 and float(tick['askPrice']) <= fibRetracement[3][i] and float(tick['askPrice']) >= fibRetracement[2][i]): self.openTrade(fibRetracement, i, largeData, corValue, tick, statisticsTools) self.postgres.databaseClose()
def backtest(): #initiate empty open positions openPositions = {} #initiate postgresdb class postgres = postgresdbAccess.postgresAccess() #get unique simbol list sql = ("SELECT distinct symbol FROM " + dbTable + ";") uniqueSymbol = pd.DataFrame(postgres.sqlQuery(sql)) uniqueSymbol = uniqueSymbol[0] #initiate empty percent bar percentBar = 0 #loop over every symbol in symbol list for symbol in uniqueSymbol: #print progress print(percentBar, " / ", len(uniqueSymbol)) percentBar = percentBar + 1 #skip if symbol is not to be considered #null askprice is sorted out with crawler already if not (symbol.endswith(baseCurrency) and len(symbol) < 11): continue #check if open position was left over from last symbol and simulate close if openPositions: writeTrade(openPositions, row, postgres) openPositions = {} #query for every line for symbol sql = ("SELECT id, time, symbol, askprice, " + "bidprice, highprice, quotevolume, " + "pricechangepercent, weightedavgprice, openprice" + " FROM " + dbTable + " WHERE" + " symbol like '" + symbol + "' order by id;") print(sql) bigData = pd.DataFrame(postgres.sqlQuery(sql)) bigData[0] = pd.to_numeric(bigData[0], errors='coerce', downcast='float') bigData[1] = pd.to_datetime(bigData[1]) bigData[3] = pd.to_numeric(bigData[3], errors='coerce', downcast='float') bigData[4] = pd.to_numeric(bigData[4], errors='coerce', downcast='float') bigData[6] = pd.to_numeric(bigData[6], errors='coerce', downcast='float') bigData[7] = pd.to_numeric(bigData[7], errors='coerce', downcast='float') bigData[8] = pd.to_numeric(bigData[8], errors='coerce', downcast='float') bigData[9] = pd.to_numeric(bigData[9], errors='coerce', downcast='float') #get start of timedelta bigData[5] = bigData[1] - timedelta(hours=33) #loop over every row for index, row in bigData.iterrows(): before_start_date = bigData[1] <= row[5] if len(bigData.loc[before_start_date]) > 0: #get data for consideration after_start_date = bigData[1] >= row[5] before_end_date = bigData[1] <= row[1] between_two_dates = after_start_date & before_end_date fibDates = bigData.loc[between_two_dates] diff = fibDates[3].max() - fibDates[3].min() # calculate fibRetracements fibRetracement = pd.DataFrame(fibLvl) maxAsk = fibDates[3].max() for lvl in fibRetracement: fibRetracement[1] = maxAsk - diff * fibRetracement[0] fibRetracement[2] = fibRetracement[1] * 0.999 fibRetracement[3] = fibRetracement[1] * 1.001 #calculate corvalue corValue = fibDates[0].corr(fibDates[3]) #get statistical parameters statisticsTools = {} statisticsTools["stDev"] = statistics.stdev(fibDates[3]) statisticsTools["skew"] = skew(fibDates[3]) statisticsTools["kurtosis"] = kurtosis(fibDates[3]) #if an open position exists, check if it can be closed if openPositions: if (row[3] <= openPositions['stopLoss'] or row[3] >= openPositions['takeProfit']): writeTrade(openPositions, row, postgres) #clear open position openPositions = {} else: #loop over considered fibonacciretracements for i in [7]: #check if buy requirements are met if (row[7] <= -10): #if (statisticsTools["skew"] <= -0.1 and # row[3] >= fibRetracement[2][i] and # row[3] <= fibRetracement[3][i]): openPositions['startId'] = fibDates[0].min() openPositions['id'] = row[0] openPositions['midId'] = fibDates[0].max() openPositions['bidPrice'] = row[4] openPositions['fibLevel'] = i openPositions['symbol'] = symbol openPositions['askPrice'] = row[3] openPositions['corValue'] = corValue openPositions['stDev'] = statisticsTools["stDev"] openPositions['skew'] = statisticsTools["skew"] openPositions['kurtosis'] = statisticsTools[ "kurtosis"] openPositions['takeProfit'] = fibRetracement[2][i + 5] openPositions['stopLoss'] = fibRetracement[2][i - 7] #close database connection postgres.databaseClose()
def initiateTable(): postgres = postgresdbAccess.postgresAccess() postgres.tableCreate(cols) postgres.databaseClose() print("SUCCESS: initialteTable()")