def test_lexical_analyze_divide_single_line_multi_line(self): # Test divide, single_line_comment, and multi_line_comment source_code = """var a = 1 / 2 // Hello World /* Bye World */ """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) divide = Token("divide", "", 1) single_line_comment = Token("single_line_comment", " Hello World", 2) multi_line_comment = Token( "multi_line_comment", """ Bye World """, 3, ) self.assertEqual(tokens[4], divide) self.assertEqual(tokens[7], single_line_comment) self.assertEqual(tokens[-4], multi_line_comment)
def get_price(web3, bpool_address, tokenin_address, tokenout_address): """Get price at a balancer pool located at address bpool_address. Price is given as the number of tokenin required to buy a single tokenout.""" if is_pool_empty(web3, bpool_address): return 0 if not is_token_in_exchange( Token.from_address(tokenin_address).symbol, bpool_address): return 0 if not is_token_in_exchange( Token.from_address(tokenout_address).symbol, bpool_address): return 0 bpool = web3.eth.contract(address=bpool_address, abi=bpool_abi) tokenin_decimals = Token().from_address(tokenin_address).decimals tokenout_decimals = Token().from_address(tokenout_address).decimals # Get spot price - it represents the ratio of one asset to another in terms of wei # on both sides. It's the number of wei you'll receive of one asset per wei of the # other asset. It is represented as an 18-decimal number so we divide that out. # mathematically: spot_price = num_wei_output_token / one_wei_input_token spot_price = ( 10**-18 * bpool.functions.getSpotPrice(tokenin_address, tokenout_address).call()) # since spot price is in terms of wei, we multiply out the two tokens decimals # to get price in 'normal' units spot_price_converted = spot_price * (10**(tokenout_decimals - tokenin_decimals)) # print(f"{spot_price_converted} units of {tokenin_address} buys 1 unit of {tokenout_address}") return spot_price_converted
async def _get_liquidity_and_price_at_pool_addr(self, exchange_address): """Get liquidity (in eth, dai, and tokens) and get price of token priced in eth and dai. Returns a 5-tuple.""" liquidity_eth = 0 liquidity_dai = 0 liquidity_tokens = 0 price_eth = 0 price_dai = 0 for token_address, token_amount in get_reserves( self._w3, exchange_address): if token_address.lower() == Token("WETH").address.lower(): liquidity_eth = token_amount if token_address.lower() == Token("DAI").address.lower(): liquidity_dai = token_amount if token_address.lower() == self._currency_address.lower(): liquidity_tokens = token_amount if liquidity_tokens == 0: # no liquidity; don't bother updating price pass else: price_eth = get_price(self._w3, exchange_address, Token("WETH").address, self._currency_address) price_dai = get_price(self._w3, exchange_address, Token("DAI").address, self._currency_address) return liquidity_eth, liquidity_dai, liquidity_tokens, price_eth, price_dai
def check_playable_position(self, number): if self.dico_sum_position[number] < 8: self.dico_sum_position[number] += 1 self.player_list[self.current_player()].list_token.append( Token(int(number), self.dico_sum_position[number])) Player.total_token.append( (Token(int(number), self.dico_sum_position[number]), self.player_list[self.current_player()])) return True return False
def getExchangeAddressForTokenPair(first_token_name, second_token_name): token_addresses = sorted([ Token().from_symbol(first_token_name).address.lower(), Token().from_symbol(second_token_name).address.lower() ]) for token1_name, token2_name, address in exchanges: if (token1_name in [first_token_name, second_token_name] and token2_name in [first_token_name, second_token_name]): return (address, Token().from_address(token_addresses[0]).symbol, Token().from_address(token_addresses[1]).symbol) raise PairNotDefinedError( f"No pair {first_token_name}-{second_token_name} found")
def get_reserves(web3, token0_name, token1_name): """get the reserves, in tokens, of a particular uniswap v2 pool""" exchange_address, first_token_name, second_token_name = getExchangeAddressForTokenPair( token0_name, token1_name) exchange = web3.eth.contract(address=exchange_address, abi=exchange_abi) reserves = exchange.functions.getReserves().call() reserves[0] = reserves[0] / 10**Token().from_symbol( first_token_name).decimals reserves[1] = reserves[1] / 10**Token().from_symbol( second_token_name).decimals if token0_name == second_token_name: reserves[0], reserves[1] = reserves[1], reserves[0] return reserves[0], reserves[1]
def __init__(self, currency_symbol): super().__init__() try: # self._exchange_addresses = getExchangeAddressesForToken(currency_symbol) self._decimals = Token().from_symbol(currency_symbol).decimals except IndexError: raise RuntimeError( "Unknown currency_symbol {}, need to add address to token_class.py" .format(currency_symbol)) self.currency_symbol = currency_symbol self.exchange_name = "Uniswap v3" self.command_names = ["uniswap"] self.short_url = "https://bit.ly/35nae4n" # main uniswap pre-selected to 0xbtc self.volume_eth = 0 self.show_yield = True self.hourly_volume_tokens = [ ] # list of volume for each of the last N hours self._time_volume_last_updated = 0 self._w3 = Web3(Web3.HTTPProvider(ETHEREUM_NODE_URL)) self._uniswap_api = Uniswap(address=None, private_key=None, version=3, web3=self._w3)
async def _update_volume(self): volume_eth = 0 volume_dai = 0 volume_tokens = 0 for exchange_address in self._exchange_addresses: pool_volume = get_volume(self._w3, exchange_address, num_hours=24) for token_address in pool_volume.keys(): if token_address.lower() == Token("WETH").address.lower(): volume_eth += pool_volume[token_address] if token_address.lower() == Token("DAI").address.lower(): volume_dai += pool_volume[token_address] elif token_address.lower() == self._currency_address.lower(): volume_tokens += pool_volume[token_address] self.volume_eth = volume_eth self.volume_usd = volume_dai self.volume_tokens = volume_tokens
def __init__(self, currency_symbol): super().__init__() try: self._exchange_addresses = getExchangeAddressesForToken( currency_symbol) self._decimals = Token().from_symbol(currency_symbol).decimals except IndexError: raise RuntimeError( "Unknown currency_symbol {}, need to add address to uniswap_v2.py" .format(currency_symbol)) self.currency_symbol = currency_symbol self.exchange_name = "Uniswap v2" self.command_names = ["uniswapv2", "univ2", "uniswap v2", "uni v2"] self.short_url = "https://bit.ly/3wPyeu5" # main uniswap pre-selected to 0xbtc self.volume_eth = 0 self.show_yield = True self.hourly_volume_tokens = [ ] # list of volume for each of the last N hours self._time_volume_last_updated = 0 self._w3 = Web3(Web3.HTTPProvider(ETHEREUM_NODE_URL)) self._exchanges = [ self._w3.eth.contract(address=a, abi=exchange_abi) for a in self._exchange_addresses ]
class TestTokenClass(unittest.TestCase): def setUp(self): self.token = Token("number", 1, 2) self.other = Token("number", 2, 2) def test__str__(self): self.assertEqual(str(self.token), "Token('number', '1', '2')") def test__eq__(self): self.assertTrue(self.token != self.other) def test_token2dig(self): self.assertEqual(self.token.token2dig("string"), 2) self.assertEqual(self.token.token2dig("multiply"), 11) self.assertEqual(self.token.token2dig("assignment"), 8) self.assertEqual(self.token.token2dig("while"), 22) self.assertEqual(self.token.token2dig("hello"), 0)
def test_lexical_analyze_assignment_equal(self): # Test assignment and equal source_code = "var a = 1 == 1" with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) assignment = Token("assignment", "", 1) equal = Token("equal", "", 1) self.assertEqual(tokens[2], assignment) self.assertEqual(tokens[-2], equal)
def get_volume(web3, bpool_address, num_hours=24): """Get total volume in a balancer pool for all tokens in the pool. Returns a dictionaty like: { "token_address": token_volume, "token_address": token_volume } """ bpool = web3.eth.contract(address=bpool_address, abi=bpool_abi) swap_topic = "0x908fb5ee8f16c6bc9bc3690973819f32a4d4b10188134543c88706e0e1d43378" token_volumes = defaultdict(int) current_eth_block = web3.eth.blockNumber for event in web3.eth.getLogs({ 'fromBlock': current_eth_block - (int(60 * 60 * num_hours / SECONDS_PER_ETH_BLOCK)), 'toBlock': current_eth_block - 1, 'address': bpool_address }): topic0 = web3.toHex(event['topics'][0]) if topic0 == swap_topic: #print('swap in tx', web3.toHex(event['transactionHash'])) receipt = web3.eth.getTransactionReceipt(event['transactionHash']) parsed_logs = bpool.events.LOG_SWAP().processReceipt(receipt) # one TX may contain multiple logs - so find the correct one correct_log = None for log in parsed_logs: if log.address.lower() == bpool.address.lower(): correct_log = log if correct_log is None: logging.warning('bad swap transaction {}'.format( web3.toHex(event['transactionHash']))) continue tokenAmountIn = correct_log.args.tokenAmountIn tokenAmountOut = correct_log.args.tokenAmountOut tokenIn = correct_log.args.tokenIn tokenOut = correct_log.args.tokenOut #print(f"swap {tokenAmountIn} of {tokenIn} for {tokenAmountOut} of {tokenOut}") token_volumes[tokenIn] += tokenAmountIn token_volumes[tokenOut] += tokenAmountOut continue else: # we only care about swaps, so ignore all else continue # logging.debug('unknown topic txhash {}'.format(web3.toHex(event['transactionHash']))) # logging.debug('unknown topic topic0 {}'.format(topic0)) for token_address in token_volumes.keys(): token_volumes[token_address] /= 10**Token().from_address( token_address).decimals return token_volumes
def get_reserves(web3, token0_name, token1_name, fee): """get the reserves, in tokens, of a particular uniswap v3 pool""" exchange_address, first_token_name, second_token_name = getExchangeAddressForTokenPair( token0_name, token1_name, fee) token0_contract = web3.eth.contract( address=Token().from_symbol(token0_name).address, abi=erc20_abi) token0_balance = ( token0_contract.functions.balanceOf(exchange_address).call() / 10**Token().from_symbol(token0_name).decimals) token1_contract = web3.eth.contract( address=Token().from_symbol(token1_name).address, abi=erc20_abi) token1_balance = ( token1_contract.functions.balanceOf(exchange_address).call() / 10**Token().from_symbol(token1_name).decimals) return token0_balance, token1_balance
def scanTokens(self): while not self.isAtEnd(): # We are at the beginning of the next lexeme. self.start = self.current self.scanToken() # last token in token list, end of input self.tokens.append(Token(TokenType.EOF, "", None, self.line)) return self.tokens
def add_spaces(cls, current, spaces, tokens, id, new_lines, res): if current in spaces: tokens[id] = Token(u' ') id += 1 current += 1 current, tokens, id = cls.add_new_lines(current, new_lines, tokens, id, res, spaces) current, tokens, id = cls.add_r_lines(current, res, tokens, id, spaces, new_lines) current, tokens, id = cls.add_spaces(current, spaces, tokens, id, new_lines, res) return current, tokens, id
def test_lexical_analyze_left_right_paren_call_end(self): # Test left_paren, right_paren, and call_end source_code = "var a = (1)" with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) left_paren = Token("left_paren", "", 1) right_paren = Token("right_paren", "", 1) call_end = Token("call_end", "", 1) self.assertEqual(tokens[3], left_paren) self.assertEqual(tokens[5], right_paren) self.assertEqual(tokens[6], call_end)
def test_lexical_analyze_modulus_equal_modulus(self): # Test modulus_equal and modulus source_code = """var a = 1 % 2 a %= 3 """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) modulus_equal = Token("modulus_equal", "", 2) modulus = Token("modulus", "", 1) self.assertEqual(tokens[8], modulus_equal) self.assertEqual(tokens[4], modulus)
def __init__(self, currency_symbol): super().__init__() self._exchange_addresses = get_exchange_addresses_for_token( currency_symbol) if len(self._exchange_addresses) == 0: raise RuntimeError( "Unknown currency_symbol {}, need to add address to balancer.py" .format(currency_symbol)) self._currency_address = Token().from_symbol(currency_symbol).address self._decimals = Token().from_symbol(currency_symbol).decimals self.currency_symbol = currency_symbol self.exchange_name = "Balancer" self.command_names = ["balancer"] self.short_url = "https://bit.ly/3mp1qCS" # balancer configured to eth->0xbtc self._time_volume_last_updated = 0 self._w3 = Web3(Web3.HTTPProvider(ETHEREUM_NODE_URL))
def test_lexical_analyze_multiply_equal_multiply(self): # Test multiply_equal and multiply source_code = """var a = 1 * 2 a *= 1 """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) multiply_equal = Token("multiply_equal", "", 2) multiply = Token("multiply", "", 1) self.assertEqual(tokens[8], multiply_equal) self.assertEqual(tokens[4], multiply)
async def _get_price_and_liquidity_for_pair(self, token0_address, token1_address, fee): paired_token_address = token0_address if token1_address.lower( ) == Token().from_symbol( self.currency_symbol).address.lower() else token1_address paired_token_symbol = Token().from_address(paired_token_address).symbol liquidity_tokens, liquidity_pair = get_reserves( self._w3, self.currency_symbol, paired_token_symbol, fee) # bail early if the number of tokens LPd is very small # TODO: this should probably be configurable. Or generated automatically # based on some USD value, not token value if liquidity_tokens < _MINIMUM_ALLOWED_LIQUIDITY_IN_TOKENS: raise NoLiquidityException( f"Less than {_MINIMUM_ALLOWED_LIQUIDITY_IN_TOKENS} tokens LP'd for exchange contract." ) # get price of paired token (in USD) to determine price of # <self.currency_symbol> in USD. Strategy changes depending on pair price_in_paired_token = get_price(self._uniswap_api, paired_token_symbol, self.currency_symbol, fee) if paired_token_symbol == "WETH": paired_token_price_in_usd = self.eth_price_usd else: # get the paired token's price in Eth. If there is less than $500 in # liquidity to determine this, then skip this pair when determining price. liquidity_eth_of_paired_token, _ = get_reserves( self._w3, "WETH", paired_token_symbol, _DEFAULT_PAIR_FEE) if liquidity_eth_of_paired_token < 500 / self.eth_price_usd: raise NoLiquidityException( f"Less than {500} USD LP'd for paired token {paired_token_symbol}, pair token price not considered accurate. Skipping pair." ) else: paired_token_price_in_eth = get_price(self._uniswap_api, "WETH", paired_token_symbol, _DEFAULT_PAIR_FEE) paired_token_price_in_usd = paired_token_price_in_eth * self.eth_price_usd price_in_usd = price_in_paired_token * paired_token_price_in_usd return price_in_usd, liquidity_tokens
def get_list_of_terms(tokenizedTermList, docId): token_list = list() # Loops through all the terms in the document and adds them to the list with their associated docId for term in tokenizedTermList: term = normalize(term) if term != '': tokenObj = Token(term, docId) token_list.append(tokenObj) #To remove duplicates uncomment the following #term_dict.append(term) return token_list
def keyword_identifier(source_code, i, table, scanner_obj): """ Process keywords and identifiers in source code Params ====== source_code (str) : The string containing pulse source code i (int) : The current index in the source code table (SymbolTable) : Symbol table constructed holding information about identifiers and constants scanner_obj (Scanner) : Instance of Scanner class Returns ======= (Token) : The token generated for the keyword or identifier (int) : Current position in source code """ value = "" # Loop until we get a non-digit character while is_alnum(source_code[i]): value += source_code[i] i += 1 # Check if value is keyword or not if is_keyword(value): return Token(value, "", scanner_obj.line_num), i # Check if identifier is in symbol table id = table.get_by_symbol(value) # If identifier is not in symbol table then give a placeholder datatype var if id == -1: id = table.entry(value, "var", "variable") # Returns the id, token and current index in source code return Token("id", id, scanner_obj.line_num), i
def numeric_val(source_code, i, table, scanner_obj): """ Processes numeric values in the source code Params ====== source_code (str) : The string containing simc source code i (int) : The current index in the source code table (SymbolTable) : Symbol table constructed holding information about identifiers and constants scanner_obj (Scanner) : Instance of Scanner class Returns ======= (Token) : The token generated for the numeric constant (int) : Current position in source code """ numeric_constant = "" # Loop until we get a non-digit character while is_digit(source_code[i]): numeric_constant += source_code[i] i += 1 # If a numeric constant contains more than 1 decimal point (.) then that is invalid if numeric_constant.count(".") > 1: error( "Invalid numeric constant, cannot have more than one decimal point in a" " number!", scanner_obj.line_num, ) # Check the length after . to distinguish between float and double length = len( numeric_constant.split(".")[1]) if "." in numeric_constant else 0 # Determine type of numeric value type = "int" if length != 0: if length <= 7: type = "float" elif length >= 7: type = "double" # Make entry in symbol table id = table.entry(numeric_constant, type, "constant") # Return number token and current index in source code return Token("number", id, scanner_obj.line_num), i
def test_lexical_analyze_left_right_brace_newline(self): # Test left_brace, right_brace, and newline source_code = """if(1 == 1) { print(1) } """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) left_brace = Token("left_brace", "", 1) right_brace = Token("right_brace", "", 3) newline = Token("newline", "", 1) self.assertEqual(tokens[7], left_brace) self.assertEqual(tokens[-2], right_brace) self.assertEqual(tokens[8], newline)
def test_numeric_val_int(self): source_code = "3\0" i = 0 table = SymbolTable() line_num = 1 token, _ = numeric_val(source_code, i, table, line_num) other = Token("number", 1, 1) self.assertEqual(token, other) self.assertEqual(table.symbol_table, {1: ["3", "int", "constant"]})
def get_price(uniswap_api, token0_name, token1_name, fee): """Get the price at a particular uniswap v3 pool, in terms of token0 / token1""" if token0_name == "ETH": token0_address = "0x0000000000000000000000000000000000000000" token0_decimals = 18 else: token0_address = Token().from_symbol(token0_name).address token0_decimals = Token().from_symbol(token0_name).decimals if token1_name == "ETH": token1_address = "0x0000000000000000000000000000000000000000" token1_decimals = 18 else: token1_address = Token().from_symbol(token1_name).address token1_decimals = Token().from_symbol(token1_name).decimals price = (uniswap_api.get_price_input(token1_address, token0_address, 1 * 10**token1_decimals, fee) / 10**token0_decimals) return price
def tokenize(input_text): nltk.download('stopwords') stopwords = set(nltk.corpus.stopwords.words('english')) sentences = split_input_text_into_sentences(input_text) all_tokens_of_all_sentences = [] for sent_index, sent in enumerate(sentences): tokens_in_this_sentence = [] for word_index, word in enumerate(sent.split()): token = Token() token.original_word = word token.word_without_punctuations = remove_surrounding_punctuations( word).lower() if token.word_without_punctuations in stopwords: token.is_stopword = True else: token.is_stopword = False tokens_in_this_sentence.append(token) all_tokens_of_all_sentences.append(tokens_in_this_sentence) set_parts_of_speech_in_tokens(all_tokens_of_all_sentences) return all_tokens_of_all_sentences
def test_string_val_char(self): source_code = '"h"\\0' i = 0 table = SymbolTable() line_num = 1 token, _ = string_val(source_code, i, table, line_num) other = Token("string", 1, 1) self.assertEqual(token, other) self.assertEqual(table.symbol_table, {1: ["'h'", "char", "constant"]})
def test_lexical_analyze_less_than_less_than_equal_left_shift(self): # Test less_than, less_than_equal, left_shift source_code = """1 < 2 1 <= 2 1 << 2 """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) less_than = Token("less_than", "", 1) less_than_equal = Token("less_than_equal", "", 2) left_shift = Token("left_shift", "", 3) self.assertEqual(tokens[1], less_than) self.assertEqual(tokens[5], less_than_equal) self.assertEqual(tokens[9], left_shift)
def test_keyword_identifier_keyword(self): # Test a keyword source_code = "fun\\0" i = 0 table = SymbolTable() line_num = 1 token, _ = keyword_identifier(source_code, i, table, line_num) other = Token("fun", "", 1) self.assertEqual(token, other)