async def add_token(self, *, contract_address, name=None, symbol=None, decimals=None): if not self.user_toshi_id: raise JsonRPCInvalidParamsError(data={'id': 'bad_arguments', 'message': "Missing authorisation"}) token = await self.get_token(contract_address) if token is None: raise JsonRPCError(None, -32000, "Invalid ERC20 Token", {'id': 'bad_arguments', 'message': "Invalid ERC20 Token"}) contract_address = token['contract_address'] if 'balance' not in token: log.warning("didn't find a balance when adding custom token: {}".format(contract_address)) balance = '0x0' else: balance = token['balance'] async with self.db: await self.db.execute("INSERT INTO token_balances (eth_address, contract_address, name, symbol, decimals, balance, visibility) " "VALUES ($1, $2, $3, $4, $5, $6, $7) " "ON CONFLICT (eth_address, contract_address) DO UPDATE " "SET name = EXCLUDED.name, symbol = EXCLUDED.symbol, decimals = EXCLUDED.decimals, balance = EXCLUDED.balance, visibility = EXCLUDED.visibility", self.user_toshi_id, contract_address, name, symbol, decimals, balance, 2) await self.db.commit() return token
async def wait_for_migration(con, poll_frequency=1): """finds the latest expected database version and only exits once the current version in the database matches. Use for sub processes that depend on a main process handling database migration""" if not os.path.exists("sql/create_tables.sql"): log.warning( "Missing sql/create_tables.sql: cannot initialise database") return version = 0 while True: version += 1 if not os.path.exists("sql/migrate_{:08}.sql".format(version)): version -= 1 break while True: try: row = await con.fetchrow( "SELECT version_number FROM database_version LIMIT 1") if version == row['version_number']: break except asyncpg.exceptions.UndefinedTableError: # if this happens, it could just be the first time starting the app, # just keep waiting pass log.info("waiting for database migration...".format(version)) # wait some time before checking again await asyncio.sleep(poll_frequency) # done! log.info("got database version: {}".format(version)) return
def __init__(self, urls, **kwargs): cookie_secret = kwargs.pop('cookie_secret', None) if cookie_secret is None: cookie_secret = config['general'].get('cookie_secret', None) super(Application, self).__init__(urls, debug=config['general'].getboolean('debug'), cookie_secret=cookie_secret, **kwargs) if 'executor' in config: max_workers = config['executor'].getint('max_workers', None) else: max_workers = None self.executor = concurrent.futures.ThreadPoolExecutor( max_workers=max_workers) if 'mixpanel' in config and 'token' in config['mixpanel']: try: from toshi.analytics import TornadoMixpanelConsumer import mixpanel self.mixpanel_consumer = TornadoMixpanelConsumer() self.mixpanel_instance = mixpanel.Mixpanel( config['mixpanel']['token'], consumer=self.mixpanel_consumer) except: log.warning( "Mixpanel is configured, but the mixpanel-python library hasn't been installed" ) self.mixpanel_instance = None else: self.mixpanel_instance = None
async def post(self): toshi_id = self.verify_request() payload = self.json if 'addresses' not in payload or len(payload['addresses']) == 0: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Bad Arguments' }] }) addresses = payload['addresses'] for address in addresses: if not validate_address(address): raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Bad Arguments' }] }) async with self.db: # see if this toshi_id is already registered, listening to it's own toshi_id rows = await self.db.fetch( "SELECT * FROM notification_registrations " "WHERE toshi_id = $1 AND eth_address = $1 AND service != 'ws'", toshi_id) if rows: if len(rows) > 1: log.warning( "LEGACY REGISTRATION FOR '{}' HAS MORE THAN ONE DEVICE OR SERVICE" .format(toshi_id)) registration_id = rows[0]['registration_id'] service = rows[0]['service'] else: service = 'LEGACY' registration_id = 'LEGACY' # simply store all the entered addresses with no service/registrations id for address in addresses: await self.db.execute( "INSERT INTO notification_registrations (toshi_id, service, registration_id, eth_address) " "VALUES ($1, $2, $3, $4) ON CONFLICT (toshi_id, service, registration_id, eth_address) DO NOTHING", toshi_id, service, registration_id, address) await self.db.commit() self.set_status(204)
def get_client_type(self): agent = self.request.headers['User-Agent'] try: if agent.startswith("Toshi/"): return IOS_CLIENT elif agent.startswith("Android"): return ANDROID_CLIENT return UNKNOWN_CLIENT except ValueError: log.warning("Got unexpected user agent: {}".format(agent)) return UNKNOWN_CLIENT
def should_filter_special_dapps(self): agent = self.request.headers['User-Agent'] try: if agent.startswith("Toshi/"): ios_build = int(agent.split(' ')[0].split('/')[1]) return ios_build <= 150 elif agent.startswith("Android"): android_build = int(agent.split(":")[-1]) return android_build <= 34 return False except ValueError: log.warning("Got unexpected user agent: {}".format(agent)) return False
async def _run(self): while len(self._keys) > 0: try: # I'm worried here about attacks on the login endpoint that # would fill the keys list and block further login attempts # and/or use up all the memory on the server. This block # is in place to warn and debug issues should this end up # happening if len(self._keys) > 500: grouping = len(self._keys) // 500 if grouping > self._posted_warning: log.warning( "Login keys list has reached {} keys".format( len(self._keys))) self._posted_warning = grouping elif grouping < self._posted_warning: log.warning( "Login keys list has returned to {} keys".format( len(self._keys))) self._posted_warning = grouping elif self._posted_warning > 0: log.warning( "Login keys list length has returned to {} keys". format(len(self._keys))) self._posted_warning = 0 allkeys = self._keys[:] for offset in range(0, len(allkeys), 500): keys = allkeys[offset:offset + 500] if len(keys) == 0: continue # the timeout here will cause <1 second latency on new login # requests. result = await get_redis_connection().blpop( *keys, timeout=1, encoding='utf-8') if result: key, result = result self._keys.remove(key) keys.remove(key) if key not in self._futures: log.warning("got result for missing login key") else: future = self._futures.pop(key) future.set_result(result) # cleanup stale keys for key in keys: if key not in self._futures: self._keys.remove(key) future = self._futures[key] if asyncio.get_event_loop().time( ) - future._time > LOGIN_TOKEN_EXPIRY: future.set_excetion(TimeoutError()) if future.done(): del self._futures[key] self._keys.remove(key) except: log.exception("error while checking logins") if len(self._keys) > 0: await asyncio.sleep(1) self._running = False
async def post(self): try: # normalize inputs if 'from' in self.json: self.json['from_address'] = self.json.pop('from') if 'to' in self.json: self.json['to_address'] = self.json.pop('to') elif 'to_address' not in self.json: self.json['to_address'] = None # the following are to deal with different representations # of the same concept from different places if 'gasPrice' in self.json: self.json['gas_price'] = self.json.pop('gasPrice') if 'gasprice' in self.json: self.json['gas_price'] = self.json.pop('gasprice') if 'startgas' in self.json: self.json['gas'] = self.json.pop('startgas') if 'gasLimit' in self.json: self.json['gas'] = self.json.pop('gasLimit') if 'networkId' in self.json: self.json['network_id'] = self.json.pop('networkId') if 'chainId' in self.json: self.json['network_id'] = self.json.pop('chainId') if 'chain_id' in self.json: self.json['network_id'] = self.json.pop('chain_id') if 'tokenAddress' in self.json: self.json['token_address'] = self.json.pop('tokenAddress') result = await ToshiEthJsonRPC( None, self.application, self.request).create_transaction_skeleton(**self.json) except JsonRPCError as e: log.warning("/tx/skel failed: " + json_encode(e.data) + "\" -> arguments: " + json_encode(self.json) + "\"") raise JSONHTTPError(400, body={'errors': [e.data]}) except TypeError: log.warning("/tx/skel failed: bad arguments \"" + json_encode(self.json) + "\"") raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Bad Arguments' }] }) self.write(result)
async def wrapper(self, *args, **kwargs): psql = POSTGRESQL_FACTORY() # this fixes a regression in the testing.commons library that causes # the setup method to be called multiple times when `cache_initialize_db` # is used without an init_handler psql.setup() psql.start() config['database'] = psql.dsn() config['database']['ssl'] = '0' set_database_pool(None) self.pool = await prepare_database() try: f = fn(self, *args, **kwargs) if asyncio.iscoroutine(f): await f # wait for all the connections to be released if hasattr(self.pool, '_con_count'): # pre 0.10.0 con_count = lambda: self.pool._con_count elif hasattr(self.pool, '_holders'): # post 0.10.0 con_count = lambda: len(self.pool._holders) else: raise Exception( "Don't know how to get connection pool count") err_count = 0 while con_count() != self.pool._queue.qsize(): # if there are connections still in use, there should be some # other things awaiting to be run. this simply pass control back # to the ioloop to continue execution, looping until all the # connections are released. err_count += 1 if err_count > 5: log.warning("database connections still unreleased") await asyncio.sleep(0.1) finally: await self.pool.close() set_database_pool(None) psql.stop(_signal=signal.SIGKILL) del config['database']
def request_to_migrate(address): data = {"toshiId": address} url = os.getenv('WALLET_URL', None) username = os.getenv('WALLET_BASIC_AUTH_USERNAME') password = os.getenv('WALLET_BASIC_AUTH_PASSWORD') try: tornado.httpclient.AsyncHTTPClient().fetch( url, method="POST", headers={ "Content-Type": "application/json", "Authorization": 'Basic %s' % (binascii.b2a_base64( ('%s:%s' % (username, password) ).encode('ascii')).decode('ascii').strip()) }, body=tornado.escape.json_encode(data)) except Exception as e: log.warning("Error in request_to_migrate. toshi_id: {} \"{}\"".format( address, str(e)))
async def create_transaction_skeleton(self, *, to_address, from_address, value=0, nonce=None, gas=None, gas_price=None, data=None, token_address=None): if not validate_address(from_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_from_address', 'message': 'Invalid From Address' }) if to_address is not None and not validate_address(to_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_to_address', 'message': 'Invalid To Address' }) if from_address != from_address.lower( ) and not checksum_validate_address(from_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_from_address', 'message': 'Invalid From Address Checksum' }) if to_address is not None and to_address != to_address.lower( ) and not checksum_validate_address(to_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Invalid To Address Checksum' }) # check if we should ignore the given gasprice # NOTE: only meant to be here while cryptokitty fever is pushing # up gas prices... this shouldn't be perminant # anytime the nonce is also set, use the provided gas (this is to # support easier overwriting of transactions) if gas_price is not None and nonce is None: async with self.db: whitelisted = await self.db.fetchrow( "SELECT 1 FROM from_address_gas_price_whitelist WHERE address = $1", from_address) if not whitelisted: whitelisted = await self.db.fetchrow( "SELECT 1 FROM to_address_gas_price_whitelist WHERE address = $1", to_address) if not whitelisted: gas_price = None if gas_price is None: # try and use cached gas station gas price gas_station_gas_price = await self.redis.get( 'gas_station_standard_gas_price') if gas_station_gas_price: gas_price = parse_int(gas_station_gas_price) if gas_price is None: gas_price = config['ethereum'].getint('default_gasprice', DEFAULT_GASPRICE) else: gas_price = parse_int(gas_price) if gas_price is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas_price', 'message': 'Invalid Gas Price' }) if gas is not None: gas = parse_int(gas) if gas is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas', 'message': 'Invalid Gas' }) if nonce is None: # check cache for nonce nonce = await self.get_transaction_count(from_address) else: nonce = parse_int(nonce) if nonce is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_nonce', 'message': 'Invalid Nonce' }) if data is not None: if isinstance(data, int): data = hex(data) if isinstance(data, str): try: data = data_decoder(data) except binascii.Error: pass if not isinstance(data, bytes): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_data', 'message': 'Invalid Data field' }) else: data = b'' # flag to force arguments into an erc20 token transfer if token_address is not None: if not validate_address(token_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_token_address', 'message': 'Invalid Token Address' }) if data != b'': raise JsonRPCInvalidParamsError( data={ 'id': 'bad_arguments', 'message': 'Cannot include both data and token_address' }) if isinstance(value, str) and value.lower() == "max": # get the balance in the database async with self.db: value = await self.db.fetchval( "SELECT value FROM token_balances " "WHERE contract_address = $1 AND eth_address = $2", token_address, from_address) if value is None: # get the value from the ethereum node data = "0x70a08231000000000000000000000000" + from_address[ 2:].lower() value = await self.eth.eth_call(to_address=token_address, data=data) value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_value', 'message': 'Invalid Value' }) data = data_decoder( "0xa9059cbb000000000000000000000000{}{:064x}".format( to_address[2:].lower(), value)) token_value = value value = 0 to_address = token_address elif value: if value == "max": network_balance, balance, _, _ = await self.get_balances( from_address) if gas is None: code = await self.eth.eth_getCode(to_address) if code: # we might have to do some work try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=0) except JsonRPCError: # no fallback function implemented in the contract means no ether can be sent to it raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) attempts = 0 # because the default function could do different things based on the eth sent, we make sure # the value is suitable. if we get different values 3 times abort while True: if attempts > 2: log.warning( "Hit max attempts trying to get max value to send to contract '{}'" .format(to_address)) raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) value = balance - (gas_price * gas) try: gas_with_value = await self.eth.eth_estimateGas( from_address, to_address, data=data, value=value) except JsonRPCError: # no fallback function implemented in the contract means no ether can be sent to it raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) if gas_with_value != gas: gas = gas_with_value attempts += 1 continue else: break else: # normal address, 21000 gas per transaction gas = 21000 value = balance - (gas_price * gas) else: # preset gas, run with it! value = balance - (gas_price * gas) else: value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_value', 'message': 'Invalid Value' }) if gas is None: try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=value) except JsonRPCError: # this can occur if sending a transaction to a contract that doesn't match a valid method # and the contract has no default method implemented. # this can also happen if the current state of the blockchain means that submitting the # transaction would fail (abort). if token_address is not None: # when dealing with erc20, this usually means the user's balance for that token isn't # high enough, check that and throw an error if it's the case, and if not fall # back to the standard invalid_data error async with self.db: bal = await self.db.fetchval( "SELECT value FROM token_balances " "WHERE contract_address = $1 AND eth_address = $2", token_address, from_address) if bal is not None: bal = parse_int(bal) if bal < token_value: raise JsonRPCInsufficientFundsError( data={ 'id': 'insufficient_funds', 'message': 'Insufficient Funds' }) raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_data', 'message': 'Unable to estimate gas for contract call' }) # if data is present, buffer gas estimate by 20% if len(data) > 0: gas = int(gas * 1.2) try: tx = create_transaction(nonce=nonce, gasprice=gas_price, startgas=gas, to=to_address, value=value, data=data, network_id=self.network_id) except InvalidTransaction as e: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': str(e) }) if tx.intrinsic_gas_used > gas: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.' .format(tx.intrinsic_gas_used, gas) }) transaction = encode_transaction(tx) return { "tx": transaction, "gas": hex(gas), "gas_price": hex(gas_price), "nonce": hex(nonce), "value": hex(token_value) if token_address else hex(value) }
async def create_tables(con): # make sure the create tables script exists if not os.path.exists("sql/create_tables.sql"): log.warning( "Missing sql/create_tables.sql: cannot initialise database") return try: row = await con.fetchrow( "SELECT version_number FROM database_version LIMIT 1") version = row['version_number'] log.info("got database version: {}".format(version)) except asyncpg.exceptions.UndefinedTableError: # fresh DB path await con.execute( "CREATE TABLE database_version (version_number INTEGER)") await con.execute( "INSERT INTO database_version (version_number) VALUES (0)") # fresh database, nothing to migrate with open("sql/create_tables.sql") as create_tables_file: sql = create_tables_file.read() await con.execute(sql) # verify that if there are any migration scripts, that the # database_version table has been updated appropriately version = 0 while True: version += 1 if not os.path.exists("sql/migrate_{:08}.sql".format(version)): version -= 1 break if version > 0: row = await con.fetchrow( "SELECT version_number FROM database_version LIMIT 1") if row['version_number'] != version: log.warning( "Warning, migration scripts exist but database version has not been set in create_tables.sql" ) log.warning( "DB version: {}, latest migration script: {}".format( row['version_number'], version)) return # check for migration files exception = None while True: version += 1 fn = "sql/migrate_{:08}.sql".format(version) if os.path.exists(fn): log.info("applying migration script: {:08}".format(version)) with open(fn) as migrate_file: sql = migrate_file.read() try: await con.execute(sql) except Exception as e: version -= 1 exception = e break else: version -= 1 break await con.execute("UPDATE database_version SET version_number = $1", version) if exception: raise exception