def parse(self, response): system_id = response.url.split("/")[-1] if (dt.now().minute in [0, 5, 10, 15, 25, 30, 35, 40, 45, 50, 55]): #log parsed html every 30 minutes file_name = str(dt.now()) f = open(f"{BASE_DIR}/parsed/{system_id}-{file_name}-ALL.html", "w+") f.write(response.text) f.close() noData = response.selector.xpath( '//div[@id="openTrades"]/div/span/text()').extract_first() if (noData == 'No data to display.'): self.logger.info('No open trades found for %s', response.url) return else: self.logger.info('Open trades found for %s', response.url) operations = response.selector.xpath( '//div[@id="openTrades"]/table/tr') if not len(operations): operations = response.selector.xpath( '//div[@id="openTrades"]/table/tbody/tr') self.logger.info('Operations: ' + str(len(operations))) if len(operations) < 3: #header, [...], footer self.logger.info('Empty operations set') else: persistentstore = PersistentStore() persistentstore.set_inited( ) #are items availabe to compare for closing column_names = operations[0] for operation in operations[1:-1]: #remove header and total rows self.logger.info('processing operation...') if (dt.now().minute in [0, 30]): #log parsed html every 30 minutes file_name = str(dt.now()) f = open( f"{BASE_DIR}/parsed/{system_id}-{file_name}-OPENED.html", "w+") f.write(response.text) f.close() try: openDateRaw = operation.xpath( ('td[not(@style="display:none")][%i]//text()' % self.get_column_index_by_name( column_names, 'Open Date'))).extract_first().strip() symbolRaw = operation.xpath( ('td[not(@style="display:none")][%i]//span//text()' % self.get_column_index_by_name( column_names, 'Symbol'))).extract_first() actionRaw = operation.xpath( ('td[not(@style="display:none")][%i]//text()' % self.get_column_index_by_name( column_names, 'Action'))).extract_first().strip() lotsRaw = 5 openPriceRaw = operation.xpath( ('td[not(@style="display:none")][%i]//text()' % self.get_column_index_by_name( column_names, 'Open Price'))).extract_first().strip() if not openPriceRaw.strip(): openPriceRaw = 0 self.logger.info( 'openDate: %s, symbolRaw: %s, actionRaw: %s, lotsRaw: %s, openPriceRaw: %s', openDateRaw, symbolRaw, actionRaw, lotsRaw, openPriceRaw) item = Operation() str_hash = str(openDateRaw) + str(symbolRaw) + str( actionRaw) + str(lotsRaw) + str( openPriceRaw) + system_id item['hashid'] = self.get_hash(str_hash) item['timeOrderDiscovered'] = time.time() item['timeTradeOpened'] = None item['timeTradeClosed'] = None item['timeTradeAttempedOpened'] = None item['symbol'] = symbolRaw item['action'] = actionRaw item['lots'] = 1 #TODO fix this item['openPrice'] = float(openPriceRaw) item['closePrice'] = None item['url'] = response.url item['profit'] = None item['orderid'] = None item['tradeid'] = None self.logger.info('Position parsing completed') yield item except: self.logger.error('Exception parsing position')
def _crawl(result): deferred = process.crawl('rofxnet', domain='rofxnet') persistentstore = PersistentStore() logging.info('Persistent store...') persistentstore.log(logging) logging.info('Parsing items...') try: to_be_opened = persistentstore.to_be_opened_items() to_be_closed = persistentstore.to_be_closed_items() logging.info('To be opened: ' + str(len(to_be_opened))) logging.info('To be closed: ' + str(len(to_be_closed))) for k, v in orderTradeMap.items(): persistentstore.tag_trade_id(k, v) for key, item in to_be_opened.items(): symbol = get_close_matches(item['symbol'], symbols) # EURUSD > EUR/USD symbol = next(iter(symbol), None) if symbol: is_buy = (item['action'].upper() == 'BUY') lots = item['lots'] order = con.open_trade(symbol=symbol, is_buy=is_buy, rate=0, is_in_pips=False, amount=lots, time_in_force='GTC', order_type='AtMarket') if order != 0: persistentstore.tag_opened(key, str(order.__orderId__), time.time()) logging.info('New order opened: ' + key) persistentstore.tag_opened(key, str(order.__orderId__), time.time()) if order is None: #returns None when instantly opened? grab from trades table fake_order_id = int(time.time()) persistentstore.tag_attemped_opened(key, fake_order_id) logging.info( 'Server returned None instead of Order but order was opened' ) logging.info('Attemping manual grep from trades table.') trades = con.get_open_positions() trades = trades.loc[(trades['currency'] == symbol) & ( trades['isBuy'] == is_buy)].sort_values( by=['time']) #get open trades for given parameters for index, trade in trades.iterrows(): trade_id = trade['tradeId'] existing = persistentstore.get_item_by_trade_id( trade_id) #might be opened already? if existing is None: persistentstore.tag_opened(key, str(fake_order_id), time.time()) orderTradeMap[str(fake_order_id)] = str(trade_id) persistentstore.tag_trade_id( fake_order_id, trade_id) break for hashid in to_be_closed: item = persistentstore.get_item_by_id(hashid) lots = item['lots'] con.close_trade(trade_id=item['tradeid'], amount=lots) profit = 0 #TODO persistentstore.tag_closed(hashid, profit, time.time()) logging.info('Position closed: ' + hashid) except: logging.error('Exception parsing item') persistentstore.clear_parsed_items() persistentstore.dump() deferred.addCallback( lambda results: logging.info('waiting 60 seconds before restart...')) deferred.addCallback(sleep, seconds=60) deferred.addCallback(_crawl) deferred.addErrback(crash) # <-- add errback here return deferred
def process_item(self, item, spider): persistentstore = PersistentStore() persistentstore.add_parsed_item(item) return item