class BitsharesExplorerParser(BaseRin): _logger = logging.getLogger('Rin.BitsharesExplorerParser') _lock = asyncio.Lock() _date = utils.get_today_date() _old_file = utils.get_file(BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'pairs')) _new_file = utils.get_file(BaseRin.output_dir, f'pairs-{_date}.lst') _pairs_count = 0 def __init__(self, loop): self._ioloop = loop self._bts_price_in_usd = BTSPriceParser(loop).get_bts_price_in_usd() self._overall_min_daily_vol = self.overall_min_daily_volume / self._bts_price_in_usd self._assets_url = self.explorer_uri + '/assets' self._assets_markets_url = self.explorer_uri + '/get_markets?asset_id={}' self._market_data_url = self.explorer_uri + '/get_volume?base={}"e={}' async def _check_pair_on_valid(self, pair, base_price): market_data = await self.get_data(self._market_data_url.format(*pair), delay=5, logger=self._logger, json=True) if float(market_data['base_volume']) * float( base_price) > self.pair_min_daily_volume: await self.write_data('{}:{}'.format(*pair), self._new_file, self._lock) self._pairs_count += 1 async def _get_valid_pairs(self, asset_info): asset_markets_data = await self.get_data( self._assets_markets_url.format(asset_info.id), delay=5, logger=self._logger, json=True) pairs = list(map(lambda x: x[1].strip().split('/'), asset_markets_data)) [ await self._check_pair_on_valid(pair, asset_info.price) for pair in pairs ] async def _get_valid_assets(self): assets_data = await self.get_data(self._assets_url, delay=2, logger=self._logger, json=True) AssetInfo = namedtuple('AssetsInfo', ['id', 'price']) assets = [ AssetInfo(asset[2], asset[3]) for asset in assets_data if float(asset[4]) > self._overall_min_daily_vol ] self._logger.info(f'Parsed: {len(assets)} assets.') return assets def start_parsing(self): try: task = self._ioloop.create_task(self._get_valid_assets()) assets_info = (self._ioloop.run_until_complete( asyncio.gather(task)))[0] tasks = (self._ioloop.create_task( self._get_valid_pairs(asset_info)) for asset_info in assets_info) self._ioloop.run_until_complete(asyncio.gather(*tasks)) utils.remove_file(self._old_file) self._logger.info(f'Parsed: {self._pairs_count} pairs.') FileData = namedtuple('FileData', ['file', 'new_version']) return FileData(self._new_file, True) except TypeError: self._logger.exception('JSON data retrieval error.') return self.actions_when_error(self._old_file) except Exception as err: self._logger.exception('Exception occurred while parsing.', err) return self.actions_when_error(self._old_file)
class BTSPriceParser(BaseRin): _logger = logging.getLogger('Rin.BTSPriceParser') _site_url = 'https://www.coingecko.com/ru/%D0%B4%D0%B8%D0%BD%D0%B0%D0%BC%D0%B8%D0%BA%D0%B0_%D1%86%D0%B5%D0%BD' \ '/bitshares/usd' _node_url = 'http://185.208.208.184:5000/get_ticker?base=USD"e=BTS' _lock = asyncio.Lock() _date = utils.get_today_date() _old_file = utils.get_file( BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'bst_price')) _new_file = utils.get_file(BaseRin.output_dir, f'bst_price-{_date}.lst') def __init__(self, loop): self.ioloop = loop async def _get_price_from_node(self): response = await self.get_data(self._node_url, delay=2, logger=self._logger, json=True) try: return float(response['latest']) except KeyError: self._logger.warning(response['detail']) async def _parse_price_from_site(self): html = await self.get_data(self._site_url, delay=2, logger=self._logger) bs_obj = BeautifulSoup(html, 'lxml') price = bs_obj.find('span', {'data-coin-symbol': 'bts'}).get_text() \ .replace('$', '').replace(',', '.').replace(' ', '').strip() return float(price) async def _get_price(self): methods = [self._get_price_from_node, self._parse_price_from_site] for method in methods: price = await method() if price: await self.write_data(str(price), self._new_file, self._lock) return price self._logger.warning('Could not get BTS price in USD.') return self.actions_when_error(self._old_file, value_from_file=True) def get_bts_price_in_usd(self): task = self.ioloop.create_task(self._get_price()) try: price = self.ioloop.run_until_complete(asyncio.gather(task)) except ValueError: self._logger.exception('Could not convert parsed price to float.') return self.actions_when_error(self._old_file, value_from_file=True) except AttributeError: self._logger.exception('Could not get price from html.') return self.actions_when_error(self._old_file, value_from_file=True) except TypeError: self._logger.exception('HTML data retrieval error.') return self.actions_when_error(self._old_file, value_from_file=True) except Exception as err: self._logger.exception( 'Exception occurred while getting BTS price.', err) return self.actions_when_error(self._old_file, value_from_file=True) else: utils.remove_file(self._old_file) self._logger.info(f'BTS price is ${price[0]}.') return price[0]
class ChainsCreator(BaseRin): _logger = logging.getLogger('Rin.ChainsCreator') _lock = asyncio.Lock() _main_assets = ['BTS', 'BRIDGE.BTC', 'CNY', 'USD'] _old_file = utils.get_file(BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'chains')) _date = utils.get_today_date() _new_file = utils.get_file(BaseRin.output_dir, f'chains-{_date}.lst') _chains_count = 0 def __init__(self, loop): self._ioloop = loop self._blacklisted_assets = self.get_blacklisted_assets() self._file_with_pairs = self._get_file_with_pairs() def _get_file_with_pairs(self): parsers = [BitsharesExplorerParser, CryptofreshParser] file_with_pairs = [] for parser in parsers: file_data = parser(self._ioloop).start_parsing() try: if file_data.new_version: return file_data.file except AttributeError: file_with_pairs.append(file_data) return file_with_pairs[0] async def _check_chain_on_entry_in_blacklist(self, chain): for asset in chain: if asset in self._blacklisted_assets: return True @staticmethod async def _get_chain_with_ids(pygram_obj, *args): chains_with_ids = list(args) for i in range(0, len(args), 2): chains_with_ids[i] = chains_with_ids[i-1] = await pygram_obj.convert_name_to_id(args[i]) return '{}:{} {}:{} {}:{}'.format(*chains_with_ids), chains_with_ids @staticmethod async def _adjust_asset_location_in_seq(asset, seq): if seq[0] != asset: seq.reverse() return seq async def _create_chains_for_asset(self, main_asset, pairs): chains = [] pygram_asset = Asset() await pygram_asset.connect() for pair in pairs: if main_asset in pair: main = (await self._adjust_asset_location_in_seq(main_asset, pair)).copy() for pair2 in pairs: if main[1] in pair2 and main_asset not in pair2: secondary = (await self._adjust_asset_location_in_seq(main[1], pair2)).copy() for pair3 in pairs: if secondary[1] in pair3 and main_asset in pair3: tertiary = (await self._adjust_asset_location_in_seq(secondary[1], pair3)).copy() chain = await self._get_chain_with_ids(pygram_asset, *main, *secondary, *tertiary) if chain[0] not in chains: chains.append(chain[0]) self._chains_count += 1 if not await self._check_chain_on_entry_in_blacklist(chain[1]): await self.write_data(chain[0], self._new_file, lock=self._lock) await pygram_asset.close() @staticmethod def _remove_pairs_duplicates_from_seq(seq): new_seq = list(map(lambda x: x.split(':'), seq)) for el in new_seq: el.reverse() if el in new_seq: index = new_seq.index(el) del new_seq[index] return new_seq def start_creating_chains(self): try: pairs_lst = self._remove_pairs_duplicates_from_seq( self.get_data_from_file(self._file_with_pairs) ) tasks = [self._ioloop.create_task(self._create_chains_for_asset(asset, pairs_lst)) for asset in self._main_assets] self._ioloop.run_until_complete(asyncio.wait(tasks)) except Exception as err: self._logger.exception('Exception occurred while creating chains.', err) return self.actions_when_error(self._old_file) else: utils.remove_file(self._old_file) self._logger.info(f'Created: {self._chains_count} chains.') return self._new_file
class DefaultBTSFee(VolLimits): _logger = logging.getLogger('Rin.DefaultBTSFee') _lock = asyncio.Lock() _old_file = utils.get_file( VolLimits.output_dir, utils.get_dir_file(VolLimits.output_dir, 'btsdefaultfee')) _date = utils.get_today_date() _new_file = utils.get_file(VolLimits.output_dir, f'btsdefaultfee-{_date}.lst') _lifetime_member_percent = 0.2 _fees = None def __init__(self, ioloop): self._ioloop = ioloop super().__init__(self._ioloop) async def _get_converted_order_fee(self): assets = VolLimits.volume_limits.keys() prices = await asyncio.gather(*[ self._get_asset_price(asset, '1.3.0') for asset in assets if asset != '1.3.0' ]) blockchain_obj = await Blockchain().connect( ws_node=VolLimits.wallet_uri) order_create_fee = \ await blockchain_obj.get_global_properties(create_order_fee=True) * self._lifetime_member_percent * 3 await blockchain_obj.close() prices.insert(0, order_create_fee) final_fees = {} for asset, price in zip(assets, prices): if asset == '1.3.0': final_fees[asset] = price continue final_fees[asset] = float( (Decimal(order_create_fee) * Decimal(price)).quantize( Decimal('0.00000000'), rounding=ROUND_HALF_UP)) self._fees = '{}:{} {}:{} {}:{} {}:{}' \ .format(*itertools.chain(*final_fees.items())) await self.write_data(ujson.dumps(final_fees), self._new_file, self._lock) return final_fees def get_converted_default_bts_fee(self): tasks = [self._ioloop.create_task(self._get_converted_order_fee())] try: converted_fees = self._ioloop.run_until_complete( asyncio.gather(*tasks))[0] except ClientConnectionError: self._logger.exception( 'Client connection error occurred while getting converted default bts fee' ) return ujson.loads( self.actions_when_errors_with_read_data(self._old_file)[0]) else: utils.remove_file(self._old_file) self._logger.info( f'Successfully got prices and calculate fees: {self._fees}\n') return converted_fees
class VolLimits(BaseRin): _lock = asyncio.Lock() _logger = logging.getLogger('Rin.VolLimits') _url = 'http://185.208.208.184:5000/get_ticker?base={}"e={}' _old_file = utils.get_file( BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'vol_limits')) _date = utils.get_today_date() _new_file = utils.get_file(BaseRin.output_dir, f'vol_limits-{_date}.lst') _vol_limits_pattern = None def __init__(self, loop): self._ioloop = loop async def _calculate_limits(self, prices): limits = {} for i, (key, val) in enumerate(self.volume_limits.items()): if key == '1.3.121': limits[key] = val break limits[key] = float( Decimal(val) * Decimal(prices[i]).quantize( Decimal('0.00000000'), rounding=ROUND_HALF_UP)) return limits async def _get_asset_price(self, base_asset, quote_asset): response = await self.get_data(self._url.format( base_asset, quote_asset), logger=None, delay=1, json=True) try: return response['latest'] except KeyError: self._logger.warning(response['detail']) async def _get_limits(self): assets = self.volume_limits.keys() prices = await asyncio.gather(*[ self._get_asset_price(asset, '1.3.121') for asset in assets if asset != '1.3.121' ]) vol_limits = await self._calculate_limits(prices) self._vol_limits_pattern = '{}:{} {}:{} {}:{} {}:{}'\ .format(*itertools.chain(*vol_limits.items())) await self.write_data(ujson.dumps(vol_limits), self._new_file, self._lock) return vol_limits def get_volume_limits(self): tasks = [self._ioloop.create_task(self._get_limits())] try: vol_limits = self._ioloop.run_until_complete( asyncio.gather(*tasks))[0] except ClientConnectionError: self._logger.exception( 'Client connection error occurred while getting volume limits.' ) return ujson.loads( self.actions_when_errors_with_read_data(self._old_file)[0]) else: utils.remove_file(self._old_file) self._logger.info( f'Successfully got prices and calculate limits: {self._vol_limits_pattern}' ) return vol_limits
class ChainsWithGatewayPairFees(BaseRin): _url = 'https://wallet.bitshares.org/#/market/{}_{}' _logger = logging.getLogger('Rin.ChainsWithGatewayPairFees') _lock = asyncio.Lock() _old_file = utils.get_file( BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'chains_with_fees')) _date = utils.get_today_date() _new_file = utils.get_file(BaseRin.output_dir, f'chains_with_fees-{_date}.lst') def __init__(self, loop): self._ioloop = loop self._file_with_chains = ChainsCreator( self._ioloop).start_creating_chains() self._fees_count = 0 async def _get_fees_for_chain(self, chain): assets_objs = [Asset() for _ in range(len(chain))] [await asset_obj.connect(self.wallet_uri) for asset_obj in assets_objs] raw_chain_fees = await asyncio.gather( *(obj.get_asset_info(pair.split(':')[1]) for obj, pair in zip(assets_objs, chain))) [await asset_obj.close() for asset_obj in assets_objs] arr = np.array([ *(float(fee['options']['market_fee_percent']) / float(100) for fee in raw_chain_fees) ], dtype=self.dtype_float64) return arr async def _get_chain_fees(self, chain): fees = await self._get_fees_for_chain(chain) data = '{} {} {} {} {} {}'.format(*itertools.chain(chain, fees)) await self.write_data(data, self._new_file, self._lock) self._fees_count += 3 ChainAndFees = namedtuple('ChainAndFees', ['chain', 'fees']) return ChainAndFees(tuple(chain), fees) def _final_data_preparation(self, data): ChainAndFees = namedtuple('ChainAndFees', ['chain', 'fees']) for el in data: arr = np.array([*itertools.islice(el, 3, None)], dtype=self.dtype_float64) yield ChainAndFees(tuple(itertools.islice(el, 0, 3)), arr) def get_chains_with_fees(self): chains = self.get_transformed_data(self._file_with_chains) chains_num = len(chains) tasks = [ self._ioloop.create_task(self._get_chain_fees(chain)) for chain in chains ] try: chains_and_fees = self._ioloop.run_until_complete( asyncio.gather(*tasks)) except ClientConnectionError: self._logger.error( 'Client connection error occurred while getting chain fees.') return self._final_data_preparation( self.get_transformed_data(self._old_file, generator=True)) else: utils.remove_file(self._old_file) self._logger.info( f'Successfully got {self._fees_count} fees for {chains_num} chains.' ) return chains_and_fees
class CryptofreshParser(BaseRin): _logger = logging.getLogger('Rin.CryptofreshParser') _main_page_url = 'https://cryptofresh.com/assets' _assets_url = 'https://cryptofresh.com{}' _lock = asyncio.Lock() _date = utils.get_today_date() _old_file = utils.get_file(BaseRin.output_dir, utils.get_dir_file(BaseRin.output_dir, 'pairs')) _new_file = utils.get_file(BaseRin.output_dir, f'pairs-{_date}.lst') _pairs_count = 0 def __init__(self, loop): self._ioloop = loop @staticmethod async def _get_volume(str_): pattern = re.compile(r'(\$\d+([,.]?\d+)*)') res = re.findall(pattern, str_)[2] new_res = float(re.sub(r'\$?,?', '', res[0]).strip()) return new_res @staticmethod async def _get_asset(str_, find_asset=False): pattern = re.compile(r'/a/\w+\.?\w+') if find_asset \ else re.compile(r'\w+\.?\w+ : \w+\.?\w+') return re.findall(pattern, str_)[0].replace(' ', '').strip() async def _get_valid_data(self, html, min_volume, find_asset=False): bs_obj = BeautifulSoup(html, 'lxml') table = bs_obj.find('tbody') valid_assets = [] for i, elem in enumerate(table.find_all('tr')): data = await self._get_asset(str(elem), find_asset) try: vol = await self._get_volume(str(elem)) except IndexError: break if vol > min_volume: if not find_asset: await self.write_data(data, self._new_file, self._lock) self._pairs_count += 1 continue valid_assets.append(data) else: break if find_asset: self._logger.info(f'Parsed: {len(valid_assets)} assets.') return valid_assets def start_parsing(self): try: task = self._ioloop.create_task( self.get_data(self._main_page_url, delay=2, logger=self._logger) ) assets_page_html = self._ioloop.run_until_complete(asyncio.gather(task)) task = self._ioloop.create_task( self._get_valid_data(*assets_page_html, self.overall_min_daily_volume, True) ) assets = self._ioloop.run_until_complete(asyncio.gather(task))[0] if assets: tasks = (self._ioloop.create_task(self.get_data(self._assets_url.format(asset), delay=30, logger=self._logger)) for asset in assets) htmls = self._ioloop.run_until_complete(asyncio.gather(*tasks)) tasks = (self._ioloop.create_task(self._get_valid_data(html_, self.pair_min_daily_volume)) for html_ in htmls) self._ioloop.run_until_complete(asyncio.wait(tasks)) utils.remove_file(self._old_file) self._logger.info(f'Parsed: {self._pairs_count} pairs.') FileData = namedtuple('FileData', ['file', 'new_version']) return FileData(self._new_file, True) else: self._logger.info('Cryptofresh assets is corrupted (low vol).') return self._old_file except TypeError: self._logger.exception('HTML data retrieval error.') return self.actions_when_error(self._old_file) except Exception as err: self._logger.exception('Exception occurred.', err) return self.actions_when_error(self._old_file)