def __init__(self, env, master_server: MasterServer, port, methods: AsyncMethods): self.loop = asyncio.get_event_loop() self.port = port self.env = env self.master = master_server self.counters = dict() # Bind RPC handler functions to this instance self.handlers = AsyncMethods() for rpc_name in methods: func = methods[rpc_name] self.handlers[rpc_name] = func.__get__(self, self.__class__)
def __init__(self, config=None, db=None): self.config = config self.db = db self.services = {} self.service_tasks = {} self.loop = asyncio.get_event_loop() self.web_app = web.Application(loop=self.loop) self.web_app['config'] = { 'yo_config': self.config, 'yo_db': self.db, 'yo_app': self } self.api_methods = AsyncMethods() self.running = False
def run(host=None, port=None, database_url=None, database_extra=None, app_extra=None, **kwargs): app_extra = app_extra or dict() # layout basic aiohttp config and context app = web.Application() app['config'] = dict() if kwargs: app['config'].update(**kwargs) app['config']['database_url'] = database_url app['config']['database_extra'] = database_extra app['db'] = None # this will be defined by init_pg at app startup # register app lifecycle callbacks app.on_startup.append(init_pg) app.on_cleanup.append(on_cleanup) # register app routes app.router.add_post('/', handle_api) app.router.add_get('/.well-known/healthcheck.json', healthcheck_handler) app.router.add_get('/health', healthcheck_handler) # create jsonrpc method dispatcher jsonrpc_methods = AsyncMethods() # register jsonrpc methods with dispatcher jsonrpc_methods.add(api_healthcheck, 'sbds.health') # TODO add additional methods here # add jsonrpc method dispatcher to aiohttp app context app['jsonrpc_methods_dispatcher'] = jsonrpc_methods # run aiohttp webapp web.run_app(app, host=host, port=port, **app_extra)
def __init__(self, config=None, db=None): self.config = config self.db = db self.services = {} self.service_tasks = {} self.loop = asyncio.get_event_loop() self.web_app = web.Application(loop=self.loop) self.api_methods = AsyncMethods() self.running = False self.web_app.router.add_post('/', self.handle_api) self.web_app.router.add_get('/.well-known/healthcheck.json', self.healthcheck_handler) self.api_methods.add(yo.api_methods.api_get_notifications, 'yo.get_db_notifications') self.api_methods.add(yo.api_methods.api_mark_read, 'yo.mark_read') self.api_methods.add(yo.api_methods.api_mark_unread, 'yo.mark_unread') self.api_methods.add(yo.api_methods.api_mark_shown, 'yo.mark_shown') self.api_methods.add(yo.api_methods.api_mark_unshown, 'yo.mark_unshown') self.api_methods.add(yo.api_methods.api_get_transports, 'yo.get_transports') self.api_methods.add(yo.api_methods.api_set_transports, 'yo.set_transports') self.api_methods.add(self.api_healthcheck, 'health')
def __init__(self, database_url=None, loop=None, http_host=None, http_port=None): super().__init__(database_url=database_url, loop=loop) self.host = http_host self.port = http_port self.web_app = web.Application(loop=self.loop) self.api_methods = AsyncMethods() self.web_app.router.add_post('/', self.handle_api) self.web_app.router.add_get('/.well-known/healthcheck.json', self.healthcheck_handler) self.web_app.router.add_get('/health', self.healthcheck_handler) self.api_methods.add(api_get_notifications, 'yo.get_db_notifications') self.api_methods.add(api_mark_read, 'yo.mark_read') self.api_methods.add(api_mark_unread, 'yo.mark_unread') self.api_methods.add(api_mark_shown, 'yo.mark_shown') self.api_methods.add(api_mark_unshown, 'yo.mark_unshown') self.api_methods.add(api_get_transports, 'yo.get_transports') self.api_methods.add(api_set_transports, 'yo.set_transports') self.api_methods.add(self.api_healthcheck, 'health')
def build_methods(): """Build a map of all supported hive_api/condenser_api calls.""" methods = AsyncMethods() hive_methods = ( hive_api.db_head_state, # --- disabled until #92 #hive_api.get_followers, #hive_api.get_following, #hive_api.get_follow_count, #hive_api.get_user_feed, #hive_api.get_blog_feed, #hive_api.get_discussions_by_sort_and_tag, #hive_api.get_related_posts, # --- hive_api.payouts_total, hive_api.payouts_last_24h, ) for method in hive_methods: methods.add(method) #methods.add(method, 'hive.' + method.__name__) #test jussi-ns w/o jussi condenser_methods = ( condenser_api.call, condenser_api.get_followers, condenser_api.get_following, condenser_api.get_follow_count, condenser_api.get_discussions_by_trending, condenser_api.get_discussions_by_hot, condenser_api.get_discussions_by_promoted, condenser_api.get_discussions_by_created, condenser_api.get_discussions_by_blog, condenser_api.get_discussions_by_feed, condenser_api.get_discussions_by_comments, condenser_api.get_replies_by_last_update, condenser_api.get_content, condenser_api.get_content_replies, condenser_api.get_state, ) for method in condenser_methods: # todo: unclear if appbase uses condenser_api.call vs call.condenser_api methods.add(method, 'condenser_api.' + method.__name__) # todo: temporary non-appbase endpoint (remove once appbase is in prod) methods.add(method, method.__name__) return methods
def start_test_server(cls, env, master_server): methods = AsyncMethods() for method in public_methods.values(): methods.add(method) for method in private_methods.values(): methods.add(method) server = cls(env, master_server, env.cluster_config.JSON_RPC_PORT, methods) server.start() return server
def build_methods(): """Register all supported hive_api/condenser_api.calls.""" # pylint: disable=expression-not-assigned methods = AsyncMethods() [ methods.add(method, 'hive.' + method.__name__) for method in ( hive_api.db_head_state, hive_api.payouts_total, hive_api.payouts_last_24h, hive_api.get_accounts, hive_api.get_accounts_ac, # --- disabled until #92 #hive_api.get_followers, #hive_api.get_following, #hive_api.get_follow_count, #hive_api.get_user_feed, #hive_api.get_blog_feed, #hive_api.get_discussions_by_sort_and_tag, #hive_api.get_related_posts, ) ] [ methods.add(method, 'condenser_api.' + method.__name__) for method in ( condenser_api.get_followers, condenser_api.get_following, condenser_api.get_follow_count, condenser_api.get_content, condenser_api.get_content_replies, condenser_api_get_state, condenser_api_get_trending_tags, condenser_api.get_discussions_by_trending, condenser_api.get_discussions_by_hot, condenser_api.get_discussions_by_promoted, condenser_api.get_discussions_by_created, condenser_api.get_discussions_by_blog, condenser_api.get_discussions_by_feed, condenser_api.get_discussions_by_comments, condenser_api.get_replies_by_last_update, condenser_api.get_discussions_by_author_before_date, condenser_api.get_blog, condenser_api.get_blog_entries, ) ] methods.add(condenser_api_call) return methods
class JSONRPCServer: @classmethod def start_public_server(cls, env, master_server): server = cls(env, master_server, env.cluster_config.JSON_RPC_PORT, public_methods) server.start() return server @classmethod def start_private_server(cls, env, master_server): server = cls( env, master_server, env.cluster_config.PRIVATE_JSON_RPC_PORT, private_methods, ) server.start() return server @classmethod def start_test_server(cls, env, master_server): methods = AsyncMethods() for method in public_methods.values(): methods.add(method) for method in private_methods.values(): methods.add(method) server = cls(env, master_server, env.cluster_config.JSON_RPC_PORT, methods) server.start() return server def __init__(self, env, master_server: MasterServer, port, methods: AsyncMethods): self.loop = asyncio.get_event_loop() self.port = port self.env = env self.master = master_server self.counters = dict() # Bind RPC handler functions to this instance self.handlers = AsyncMethods() for rpc_name in methods: func = methods[rpc_name] self.handlers[rpc_name] = func.__get__(self, self.__class__) async def __handle(self, request): request = await request.text() Logger.info(request) d = dict() try: d = json.loads(request) except Exception: pass method = d.get("method", "null") if method in self.counters: self.counters[method] += 1 else: self.counters[method] = 1 # Use armor to prevent the handler from being cancelled when # aiohttp server loses connection to client response = await armor(self.handlers.dispatch(request)) if "error" in response: Logger.error(response) if response.is_notification: return web.Response() return web.json_response(response, status=response.http_status) def start(self): app = web.Application(client_max_size=JSON_RPC_CLIENT_REQUEST_MAX_SIZE) cors = aiohttp_cors.setup(app) route = app.router.add_post("/", self.__handle) cors.add( route, { "*": aiohttp_cors.ResourceOptions( allow_credentials=True, expose_headers=("X-Custom-Server-Header", ), allow_methods=["POST", "PUT"], allow_headers=("X-Requested-With", "Content-Type"), ) }, ) self.runner = web.AppRunner(app, access_log=None) self.loop.run_until_complete(self.runner.setup()) site = web.TCPSite(self.runner, "0.0.0.0", self.port) self.loop.run_until_complete(site.start()) def shutdown(self): self.loop.run_until_complete(self.runner.cleanup()) # JSON RPC handlers @public_methods.add @decode_arg("quantity", quantity_decoder) @encode_res(quantity_encoder) async def echoQuantity(self, quantity): return quantity @public_methods.add @decode_arg("data", data_decoder) @encode_res(data_encoder) async def echoData(self, data): return data @public_methods.add async def networkInfo(self): return { "networkId": quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID), "shardSize": quantity_encoder(self.master.get_shard_size()), "syncing": self.master.is_syncing(), "mining": self.master.is_mining(), "shardServerCount": len(self.master.slave_pool), } @public_methods.add @decode_arg("address", address_decoder) @decode_arg("block_height", block_height_decoder) @encode_res(quantity_encoder) async def getTransactionCount(self, address, block_height=None): account_branch_data = await self.master.get_primary_account_data( Address.deserialize(address), block_height) return account_branch_data.transaction_count @public_methods.add @decode_arg("address", address_decoder) @decode_arg("block_height", block_height_decoder) async def getBalance(self, address, block_height=None): account_branch_data = await self.master.get_primary_account_data( Address.deserialize(address), block_height) branch = account_branch_data.branch balance = account_branch_data.balance return { "branch": quantity_encoder(branch.value), "shard": quantity_encoder(branch.get_shard_id()), "balance": quantity_encoder(balance), } @public_methods.add @decode_arg("address", address_decoder) @decode_arg("block_height", block_height_decoder) async def getAccountData(self, address, block_height=None, include_shards=False): # do not allow specify height if client wants info on all shards if include_shards and block_height is not None: return None address = Address.deserialize(address) if not include_shards: account_branch_data = await self.master.get_primary_account_data( address, block_height) branch = account_branch_data.branch balance = account_branch_data.balance count = account_branch_data.transaction_count primary = { "branch": quantity_encoder(branch.value), "shard": quantity_encoder(branch.get_shard_id()), "balance": quantity_encoder(balance), "transactionCount": quantity_encoder(count), "isContract": account_branch_data.is_contract, } return {"primary": primary} branch_to_account_branch_data = await self.master.get_account_data( address) shard_size = self.master.get_shard_size() shards = [] for shard in range(shard_size): branch = Branch.create(shard_size, shard) account_branch_data = branch_to_account_branch_data[branch] data = { "branch": quantity_encoder(account_branch_data.branch.value), "shard": quantity_encoder(account_branch_data.branch.get_shard_id()), "balance": quantity_encoder(account_branch_data.balance), "transactionCount": quantity_encoder(account_branch_data.transaction_count), "isContract": account_branch_data.is_contract, } shards.append(data) if shard == address.get_shard_id(shard_size): primary = data return {"primary": primary, "shards": shards} @public_methods.add async def sendUnsigedTransaction(self, **data): """ Returns the unsigned hash of the evm transaction """ if not isinstance(data, dict): raise InvalidParams("Transaction must be an object") def get_data_default(key, decoder, default=None): if key in data: return decoder(data[key]) return default nonce = get_data_default("nonce", quantity_decoder, None) to = get_data_default("to", recipient_decoder, b"") startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) value = get_data_default("value", quantity_decoder, 0) data_ = get_data_default("data", data_decoder, b"") from_full_shard_id = get_data_default("fromFullShardId", full_shard_id_decoder, None) to_full_shard_id = get_data_default("toFullShardId", full_shard_id_decoder, None) if nonce is None: raise InvalidParams("nonce is missing") if from_full_shard_id is None: raise InvalidParams("fromFullShardId is missing") if to_full_shard_id is None: to_full_shard_id = from_full_shard_id evm_tx = EvmTransaction( nonce, gasprice, startgas, to, value, data_, from_full_shard_id=from_full_shard_id, to_full_shard_id=to_full_shard_id, network_id=self.master.env.quark_chain_config.NETWORK_ID, ) return { "txHashUnsigned": data_encoder(evm_tx.hash_unsigned), "nonce": quantity_encoder(evm_tx.nonce), "to": data_encoder(evm_tx.to), "fromFullShardId": full_shard_id_encoder(evm_tx.from_full_shard_id), "toFullShardId": full_shard_id_encoder(evm_tx.to_full_shard_id), "value": quantity_encoder(evm_tx.value), "gasPrice": quantity_encoder(evm_tx.gasprice), "gas": quantity_encoder(evm_tx.startgas), "data": data_encoder(evm_tx.data), "networkId": quantity_encoder(evm_tx.network_id), } @public_methods.add async def sendTransaction(self, data): def get_data_default(key, decoder, default=None): if key in data: return decoder(data[key]) return default to = get_data_default("to", recipient_decoder, b"") startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) gasprice = get_data_default("gasPrice", quantity_decoder, DEFAULT_GASPRICE) value = get_data_default("value", quantity_decoder, 0) data_ = get_data_default("data", data_decoder, b"") v = get_data_default("v", quantity_decoder, 0) r = get_data_default("r", quantity_decoder, 0) s = get_data_default("s", quantity_decoder, 0) nonce = get_data_default("nonce", quantity_decoder, None) to_full_shard_id = get_data_default("toFullShardId", full_shard_id_decoder, None) from_full_shard_id = get_data_default("fromFullShardId", full_shard_id_decoder, None) network_id = get_data_default( "networkId", quantity_decoder, self.master.env.quark_chain_config.NETWORK_ID) if nonce is None: raise InvalidParams("Missing nonce") if not (v and r and s): raise InvalidParams("Missing v, r, s") if from_full_shard_id is None: raise InvalidParams("Missing fromFullShardId") if to_full_shard_id is None: to_full_shard_id = from_full_shard_id evm_tx = EvmTransaction( nonce, gasprice, startgas, to, value, data_, v, r, s, from_full_shard_id=from_full_shard_id, to_full_shard_id=to_full_shard_id, network_id=network_id, ) tx = Transaction(code=Code.create_evm_code(evm_tx)) success = await self.master.add_transaction(tx) if not success: return None return id_encoder(tx.get_hash(), from_full_shard_id) @public_methods.add @decode_arg("tx_data", data_decoder) async def sendRawTransaction(self, tx_data): evm_tx = rlp.decode(tx_data, EvmTransaction) tx = Transaction(code=Code.create_evm_code(evm_tx)) success = await self.master.add_transaction(tx) if not success: return "0x" + bytes(32 + 4).hex() return id_encoder(tx.get_hash(), evm_tx.from_full_shard_id) @public_methods.add @decode_arg("block_id", data_decoder) async def getRootBlockById(self, block_id): try: block = self.master.root_state.db.get_root_block_by_hash( block_id, False) return root_block_encoder(block) except Exception: return None @public_methods.add async def getRootBlockByHeight(self, height=None): if height is not None: height = quantity_decoder(height) block = self.master.root_state.get_root_block_by_height(height) if not block: return None return root_block_encoder(block) @public_methods.add @decode_arg("block_id", id_decoder) @decode_arg("include_transactions", bool_decoder) async def getMinorBlockById(self, block_id, include_transactions=False): block_hash, full_shard_id = block_id shard_size = self.master.get_shard_size() branch = Branch.create(shard_size, (shard_size - 1) & full_shard_id) block = await self.master.get_minor_block_by_hash(block_hash, branch) if not block: return None return minor_block_encoder(block, include_transactions) @public_methods.add @decode_arg("shard", quantity_decoder) @decode_arg("include_transactions", bool_decoder) async def getMinorBlockByHeight(self, shard: int, height=None, include_transactions=False): shard_size = self.master.get_shard_size() if height is not None: height = quantity_decoder(height) if shard >= shard_size: raise InvalidParams( "shard is larger than shard size {} > {}".format( shard, shard_size)) branch = Branch.create(shard_size, shard) block = await self.master.get_minor_block_by_height(height, branch) if not block: return None return minor_block_encoder(block, include_transactions) @public_methods.add @decode_arg("tx_id", id_decoder) async def getTransactionById(self, tx_id): tx_hash, full_shard_id = tx_id shard_size = self.master.get_shard_size() branch = Branch.create(shard_size, (shard_size - 1) & full_shard_id) minor_block, i = await self.master.get_transaction_by_hash( tx_hash, branch) if not minor_block: return None if len(minor_block.tx_list) <= i: return None return tx_encoder(minor_block, i) @public_methods.add @decode_arg("block_height", block_height_decoder) async def call(self, data, block_height=None): return await self._call_or_estimate_gas(is_call=True, block_height=block_height, **data) @public_methods.add async def estimateGas(self, data): return await self._call_or_estimate_gas(is_call=False, **data) @public_methods.add @decode_arg("tx_id", id_decoder) async def getTransactionReceipt(self, tx_id): tx_hash, full_shard_id = tx_id shard_size = self.master.get_shard_size() branch = Branch.create(shard_size, (shard_size - 1) & full_shard_id) resp = await self.master.get_transaction_receipt(tx_hash, branch) if not resp: return None minor_block, i, receipt = resp return receipt_encoder(minor_block, i, receipt) @public_methods.add @decode_arg("shard", shard_id_decoder) async def getLogs(self, data, shard): return await self._get_logs(data, shard, decoder=address_decoder) @public_methods.add @decode_arg("address", address_decoder) @decode_arg("key", quantity_decoder) @decode_arg("block_height", block_height_decoder) # TODO: add block number async def getStorageAt(self, address, key, block_height=None): res = await self.master.get_storage_at(Address.deserialize(address), key, block_height) return data_encoder(res) if res is not None else None @public_methods.add @decode_arg("address", address_decoder) @decode_arg("block_height", block_height_decoder) async def getCode(self, address, block_height=None): res = await self.master.get_code(Address.deserialize(address), block_height) return data_encoder(res) if res is not None else None @public_methods.add @decode_arg("address", address_decoder) @decode_arg("start", data_decoder) @decode_arg("limit", quantity_decoder) async def getTransactionsByAddress(self, address, start="0x", limit="0xa"): """ "start" should be the "next" in the response for fetching next page. "start" can also be "0x" to fetch from the beginning (i.e., latest). "start" can be "0x00" to fetch the pending outgoing transactions. """ address = Address.create_from(address) if limit > 20: limit = 20 result = await self.master.get_transactions_by_address( address, start, limit) if not result: return None tx_list, next = result txs = [] for tx in tx_list: txs.append({ "txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_id), "fromAddress": address_encoder(tx.from_address.serialize()), "toAddress": address_encoder(tx.to_address.serialize()) if tx.to_address else "0x", "value": quantity_encoder(tx.value), "blockHeight": quantity_encoder(tx.block_height), "timestamp": quantity_encoder(tx.timestamp), "success": tx.success, }) return {"txList": txs, "next": data_encoder(next)} @public_methods.add async def getJrpcCalls(self): return self.counters @public_methods.add async def gasPrice(self, shard): shard = shard_id_decoder(shard) if shard is None: return None branch = Branch.create(self.master.get_shard_size(), shard) ret = await self.master.gas_price(branch) if ret is None: return None return quantity_encoder(ret) @public_methods.add @decode_arg("shard", shard_id_decoder) @decode_arg("header_hash", hash_decoder) @decode_arg("nonce", quantity_decoder) @decode_arg("mixhash", hash_decoder) async def submitWork(self, shard, header_hash, nonce, mixhash): branch = None # `None` means getting work from root chain if shard is not None: branch = Branch.create(self.master.get_shard_size(), shard) return await self.master.submit_work(branch, header_hash, nonce, mixhash) @public_methods.add @decode_arg("shard", shard_id_decoder) async def getWork(self, shard): branch = None # `None` means getting work from root chain if shard is not None: branch = Branch.create(self.master.get_shard_size(), shard) ret = await self.master.get_work(branch) if ret is None: return None return [ data_encoder(ret.hash), quantity_encoder(ret.height), quantity_encoder(ret.difficulty), ] ######################## Ethereum JSON RPC ######################## @public_methods.add async def net_version(self): return quantity_encoder(self.master.env.quark_chain_config.NETWORK_ID) @public_methods.add async def eth_gasPrice(self, shard): return await self.gasPrice(shard) @public_methods.add @decode_arg("block_height", block_height_decoder) @decode_arg("include_transactions", bool_decoder) async def eth_getBlockByNumber(self, block_height, include_transactions): """ NOTE: only support block_id "latest" or hex """ def block_transcoder(block): """ QuarkChain Block => ETH Block """ return { **block, "number": block["height"], "parentHash": block["hashPrevMinorBlock"], "sha3Uncles": "", "logsBloom": "", "transactionsRoot": block["hashMerkleRoot"], # ? "stateRoot": block["hashEvmStateRoot"], # ? } block = await self.master.get_minor_block_by_height( block_height, Branch.create(self.master.get_shard_size(), 0)) if block is None: return None return block_transcoder(minor_block_encoder(block)) @public_methods.add @decode_arg("address", eth_address_to_quarkchain_address_decoder) @decode_arg("shard", shard_id_decoder) @encode_res(quantity_encoder) async def eth_getBalance(self, address, shard=None): address = Address.deserialize(address) if shard is not None: address = Address(address.recipient, shard) account_branch_data = await self.master.get_primary_account_data( address) balance = account_branch_data.balance return balance @public_methods.add @decode_arg("address", eth_address_to_quarkchain_address_decoder) @decode_arg("shard", shard_id_decoder) @encode_res(quantity_encoder) async def eth_getTransactionCount(self, address, shard=None): address = Address.deserialize(address) if shard is not None: address = Address(address.recipient, shard) account_branch_data = await self.master.get_primary_account_data( address) return account_branch_data.transaction_count @public_methods.add @decode_arg("address", eth_address_to_quarkchain_address_decoder) @decode_arg("shard", shard_id_decoder) async def eth_getCode(self, address, shard=None): addr = Address.deserialize(address) if shard is not None: addr = Address(addr.recipient, shard) res = await self.master.get_code(addr, None) return data_encoder(res) if res is not None else None @public_methods.add @decode_arg("shard", shard_id_decoder) async def eth_call(self, data, shard=None): """ Returns the result of the transaction application without putting in block chain """ data = self._convert_eth_call_data(data, shard) return await self.call(data) @public_methods.add async def eth_sendRawTransaction(self, tx_data): return await self.sendRawTransaction(tx_data) @public_methods.add async def eth_getTransactionReceipt(self, tx_id): return await self.getTransactionReceipt(tx_id) @public_methods.add @decode_arg("shard", shard_id_decoder) async def eth_estimateGas(self, data, shard): data = self._convert_eth_call_data(data, shard) return await self.estimateGas(**data) @public_methods.add @decode_arg("shard", shard_id_decoder) async def eth_getLogs(self, data, shard): return await self._get_logs( data, shard, decoder=eth_address_to_quarkchain_address_decoder) @public_methods.add @decode_arg("address", eth_address_to_quarkchain_address_decoder) @decode_arg("key", quantity_decoder) @decode_arg("shard", shard_id_decoder) async def eth_getStorageAt(self, address, key, shard=None): addr = Address.deserialize(address) if shard is not None: addr = Address(addr.recipient, shard) res = await self.master.get_storage_at(addr, key, None) return data_encoder(res) if res is not None else None ######################## Private Methods ######################## @private_methods.add @decode_arg("coinbase_address", address_decoder) @decode_arg("shard_mask_value", quantity_decoder) async def getNextBlockToMine(self, coinbase_address, shard_mask_value, prefer_root=False): address = Address.deserialize(coinbase_address) is_root_block, block = await self.master.get_next_block_to_mine( address, shard_mask_value, prefer_root=prefer_root) if not block: return None return { "isRootBlock": is_root_block, "blockData": data_encoder(block.serialize()), } @private_methods.add @decode_arg("branch", quantity_decoder) @decode_arg("block_data", data_decoder) async def addBlock(self, branch, block_data): if branch == 0: block = RootBlock.deserialize(block_data) return await self.master.add_root_block_from_miner(block) return await self.master.add_raw_minor_block(Branch(branch), block_data) @private_methods.add async def getPeers(self): peer_list = [] for peer_id, peer in self.master.network.active_peer_pool.items(): peer_list.append({ "id": data_encoder(peer_id), "ip": quantity_encoder(int(peer.ip)), "port": quantity_encoder(peer.port), }) return {"peers": peer_list} @private_methods.add async def getSyncStats(self): return self.master.synchronizer.get_stats() @private_methods.add async def getStats(self): # This JRPC doesn't follow the standard encoding return await self.master.get_stats() @private_methods.add async def getBlockCount(self): # This JRPC doesn't follow the standard encoding return self.master.get_block_count() @private_methods.add async def createTransactions(self, **load_test_data): """Create transactions for load testing""" def get_data_default(key, decoder, default=None): if key in load_test_data: return decoder(load_test_data[key]) return default num_tx_per_shard = load_test_data["numTxPerShard"] x_shard_percent = load_test_data["xShardPercent"] to = get_data_default("to", recipient_decoder, b"") startgas = get_data_default("gas", quantity_decoder, DEFAULT_STARTGAS) gasprice = get_data_default("gasPrice", quantity_decoder, int(DEFAULT_GASPRICE / 10)) value = get_data_default("value", quantity_decoder, 0) data = get_data_default("data", data_decoder, b"") # FIXME: can't support specifying full shard ID to 0. currently is regarded as not set from_full_shard_id = get_data_default("fromFullShardId", full_shard_id_decoder, 0) # build sample tx evm_tx_sample = EvmTransaction( 0, gasprice, startgas, to, value, data, from_full_shard_id=from_full_shard_id, ) tx = Transaction(code=Code.create_evm_code(evm_tx_sample)) return await self.master.create_transactions(num_tx_per_shard, x_shard_percent, tx) @private_methods.add async def setTargetBlockTime(self, root_block_time=0, minor_block_time=0): """0 will not update existing value""" return await self.master.set_target_block_time(root_block_time, minor_block_time) @private_methods.add async def setMining(self, mining): """Turn on / off mining""" return await self.master.set_mining(mining) @private_methods.add async def getJrpcCalls(self): return self.counters @staticmethod def _convert_eth_call_data(data, shard): to_address = Address.create_from( eth_address_to_quarkchain_address_decoder(data["to"])) if shard: to_address = Address(to_address.recipient, shard) data["to"] = "0x" + to_address.serialize().hex() if "from" in data: from_address = Address.create_from( eth_address_to_quarkchain_address_decoder(data["from"])) if shard: from_address = Address(from_address.recipient, shard) data["from"] = "0x" + from_address.serialize().hex() return data async def _get_logs(self, data, shard, decoder: Callable[[str], bytes]): start_block = data.get("fromBlock", "latest") end_block = data.get("toBlock", "latest") # TODO: not supported yet for "earliest" or "pending" block if (isinstance(start_block, str) and start_block != "latest") or (isinstance(end_block, str) and end_block != "latest"): return None # parse addresses / topics addresses, topics = [], [] if "address" in data: if isinstance(data["address"], str): addresses = [Address.deserialize(decoder(data["address"]))] elif isinstance(data["address"], list): addresses = [ Address.deserialize(decoder(a)) for a in data["address"] ] if shard is not None: addresses = [Address(a.recipient, shard) for a in addresses] if "topics" in data: for topic_item in data["topics"]: if isinstance(topic_item, str): topics.append([data_decoder(topic_item)]) elif isinstance(topic_item, list): topics.append([data_decoder(tp) for tp in topic_item]) branch = Branch.create(self.master.get_shard_size(), shard) logs = await self.master.get_logs(addresses, topics, start_block, end_block, branch) if logs is None: return None return loglist_encoder(logs) async def _call_or_estimate_gas(self, is_call: bool, **data): """ Returns the result of the transaction application without putting in block chain """ if not isinstance(data, dict): raise InvalidParams("Transaction must be an object") def get_data_default(key, decoder, default=None): if key in data: return decoder(data[key]) return default to = get_data_default("to", address_decoder, None) if to is None: raise InvalidParams("Missing to") to_full_shard_id = int.from_bytes(to[20:], "big") gas = get_data_default("gas", quantity_decoder, 0) gas_price = get_data_default("gasPrice", quantity_decoder, 0) value = get_data_default("value", quantity_decoder, 0) data_ = get_data_default("data", data_decoder, b"") sender = get_data_default("from", address_decoder, b"\x00" * 20 + to[20:]) sender_address = Address.create_from(sender) network_id = self.master.env.quark_chain_config.NETWORK_ID nonce = 0 # slave will fill in the real nonce evm_tx = EvmTransaction( nonce, gas_price, gas, to[:20], value, data_, from_full_shard_id=sender_address.full_shard_id, to_full_shard_id=to_full_shard_id, network_id=network_id, ) tx = Transaction(code=Code.create_evm_code(evm_tx)) if is_call: res = await self.master.execute_transaction( tx, sender_address, data["block_height"]) return data_encoder(res) if res is not None else None else: # estimate gas res = await self.master.estimate_gas(tx, sender_address) return quantity_encoder(res) if res is not None else None
except Exception: return None def eth_address_to_quarkchain_address_decoder(hex_str): eth_hex = hex_str[2:] if len(eth_hex) != 40: raise InvalidParams("Addresses must be 40 or 0 bytes long") full_shard_id_hex = "" for i in range(4): index = i * 10 full_shard_id_hex += eth_hex[index:index + 2] return address_decoder("0x" + eth_hex + full_shard_id_hex) public_methods = AsyncMethods() private_methods = AsyncMethods() # noinspection PyPep8Naming class JSONRPCServer: @classmethod def start_public_server(cls, env, master_server): server = cls(env, master_server, env.cluster_config.JSON_RPC_PORT, public_methods) server.start() return server @classmethod def start_private_server(cls, env, master_server): server = cls(
class YoAPIServer(YoBaseService): service_name = 'api_server' def __init__(self, database_url=None, loop=None, http_host=None, http_port=None): super().__init__(database_url=database_url, loop=loop) self.host = http_host self.port = http_port self.web_app = web.Application(loop=self.loop) self.api_methods = AsyncMethods() self.web_app.router.add_post('/', self.handle_api) self.web_app.router.add_get('/.well-known/healthcheck.json', self.healthcheck_handler) self.web_app.router.add_get('/health', self.healthcheck_handler) self.api_methods.add( api_get_notifications, 'yo.get_db_notifications') self.api_methods.add(api_mark_read, 'yo.mark_read') self.api_methods.add(api_mark_unread, 'yo.mark_unread') self.api_methods.add(api_mark_shown, 'yo.mark_shown') self.api_methods.add(api_mark_unshown, 'yo.mark_unshown') self.api_methods.add(api_get_transports, 'yo.get_transports') self.api_methods.add(api_set_transports, 'yo.set_transports') self.api_methods.add(self.api_healthcheck, 'health') async def healthcheck_handler(self, request): return web.json_response(await self.api_healthcheck()) async def handle_api(self, request): request = await request.json() context = {'app': self} response = await self.api_methods.dispatch(request, context=context) return json_response(response) @staticmethod async def api_healthcheck(): return { 'status': 'OK', 'source_commit': os.environ.get('SOURCE_COMMIT'), 'docker_tag': os.environ.get('DOCKER_TAG'), 'datetime': datetime.datetime.utcnow().isoformat() } async def main_task(self): web.run_app(self.web_app, host=self.host,port=self.port)
class YoApp: def __init__(self, config=None, db=None): self.config = config self.db = db self.services = {} self.service_tasks = {} self.loop = asyncio.get_event_loop() self.web_app = web.Application(loop=self.loop) self.web_app['config'] = { 'yo_config': self.config, 'yo_db': self.db, 'yo_app': self } self.api_methods = AsyncMethods() self.running = False async def handle_api(self, request): req_app = request.app request = await request.json() logger.debug('Incoming request: %s', request) if 'params' not in request.keys(): request['params'] = {} # fix for API methods that have no params context = {'yo_db': req_app['config']['yo_db']} response = await self.api_methods.dispatch(request, context=context) return web.json_response(response) def add_api_method(self, func, func_name): logger.debug('Adding API method %s', func_name) self.api_methods.add(func, name='yo.%s' % func_name) # pylint: disable=unused-argument async def start_background_tasks(self, app): logger.info('Starting tasks...') for k, v in self.service_tasks.items(): logger.info('Starting %s', k) self.web_app['service_task:%s' % k] = self.web_app.loop.create_task(v()) # pylint: enable=unused-argument @staticmethod async def api_healthcheck(): return { 'status': 'OK', 'source_commit': os.environ.get('SOURCE_COMMIT'), 'docker_tag': os.environ.get('DOCKER_TAG'), 'datetime': datetime.datetime.utcnow().isoformat() } # pylint: disable=unused-argument async def healthcheck_handler(self, request): return web.json_response(await self.api_healthcheck()) # pylint: enable=unused-argument @staticmethod async def handle_options(request): origin = request.headers['Origin'] if origin in ALLOWED_ORIGINS: response = web.Response(status=204, headers={ 'Access-Control-Allow-Methods': 'POST', 'Access-Control-Allow-Origin': origin, 'Access-Control-Allow-Headers': '*' }) else: response = web.Response(status=403) return response # pylint: disable=unused-argument async def setup_standard_api(self, app): self.add_api_method(self.api_healthcheck, 'healthcheck') self.web_app.router.add_post('/', self.handle_api) self.web_app.router.add_get('/.well-known/healthcheck.json', self.healthcheck_handler) # pylint: enable=unused-argument def run(self): self.running = True self.web_app.on_startup.append(self.start_background_tasks) self.web_app.on_startup.append(self.setup_standard_api) web.run_app(self.web_app, host=self.config.get_listen_host(), port=self.config.get_listen_port()) def add_service(self, service_kls): logger.debug('Adding service %s', service_kls) service = service_kls(yo_app=self, config=self.config, db=self.db) name = service.get_name() self.service_tasks[name] = service.async_task service.yo_app = self self.services[name] = service service.init_api() async def invoke_private_api(self, service=None, api_method=None, **kwargs): # TODO - add support for URLs other than :local: if service not in self.services.keys(): return {'error': 'No such service found!'} if api_method not in self.services[service].private_api_methods.keys(): return {'error': 'No such method in service'} return await self.services[service].private_api_methods[api_method]( **kwargs)
hive_api.payouts_last_24h) jrpc_condenser = (condenser_api.get_followers, condenser_api.get_following, condenser_api.get_follow_count, condenser_api.get_discussions_by_trending, condenser_api.get_discussions_by_hot, condenser_api.get_discussions_by_promoted, condenser_api.get_discussions_by_created, condenser_api.get_discussions_by_blog, condenser_api.get_discussions_by_feed, condenser_api.get_discussions_by_comments, condenser_api.get_replies_by_last_update, condenser_api.get_content, condenser_api.get_content_replies, condenser_api.get_state) methods = AsyncMethods() legacy_methods = AsyncMethods() legacy_methods.add(condenser_api.call, 'call') methods.add(condenser_api.call, 'call') for m in jrpc_methods: methods.add(m) for m in jrpc_condenser: methods.add(m, 'condenser_api.' + m.__name__) legacy_methods.add(m) app = web.Application() app['config'] = dict() app['config']['hive.MAX_BLOCK_NUM_DIFF'] = 10 app['config']['hive.MAX_DB_ROW_RESULTS'] = 100000 app['config'][
class YoApp: def __init__(self, config=None, db=None): self.config = config self.db = db self.services = {} self.service_tasks = {} self.loop = asyncio.get_event_loop() self.web_app = web.Application(loop=self.loop) self.api_methods = AsyncMethods() self.running = False self.web_app.router.add_post('/', self.handle_api) self.web_app.router.add_get('/.well-known/healthcheck.json', self.healthcheck_handler) self.api_methods.add(yo.api_methods.api_get_notifications, 'yo.get_db_notifications') self.api_methods.add(yo.api_methods.api_mark_read, 'yo.mark_read') self.api_methods.add(yo.api_methods.api_mark_unread, 'yo.mark_unread') self.api_methods.add(yo.api_methods.api_mark_shown, 'yo.mark_shown') self.api_methods.add(yo.api_methods.api_mark_unshown, 'yo.mark_unshown') self.api_methods.add(yo.api_methods.api_get_transports, 'yo.get_transports') self.api_methods.add(yo.api_methods.api_set_transports, 'yo.set_transports') self.api_methods.add(self.api_healthcheck, 'health') async def handle_api(self, request): request = await request.json() context = {'yo_db': self.db} response = await self.api_methods.dispatch(request, context=context) return json_response(response) # pylint: disable=unused-argument async def start_background_tasks(self, app): logger.info('starting tasks') for k, v in self.service_tasks.items(): logger.info('starting service task', task=k) self.web_app['service_task:%s' % k] = self.web_app.loop.create_task(v()) # pylint: enable=unused-argument @staticmethod async def api_healthcheck(): return { 'status': 'OK', 'source_commit': os.environ.get('SOURCE_COMMIT'), 'docker_tag': os.environ.get('DOCKER_TAG'), 'datetime': datetime.datetime.utcnow().isoformat() } # pylint: disable=unused-argument async def healthcheck_handler(self, request): return web.json_response(await self.api_healthcheck()) # pylint: enable=unused-argument # pylint: disable=unused-argument async def on_cleanup(self, app): logger.info('executing on_cleanup signal handler') futures = [service.shutdown() for service in self.services.values()] await asyncio.gather(*futures) # pylint: enable=unused-argument def run(self): self.running = True self.web_app.on_startup.append(self.start_background_tasks) self.web_app.on_cleanup.append(self.on_cleanup) web.run_app(self.web_app, host=self.config.http_host, port=self.config.http_port) def add_service(self, service_kls): logger.debug('Adding service', service=service_kls.service_name) service = service_kls(yo_app=self, config=self.config, db=self.db) name = service.get_name() self.services[name] = service service.init_api()
condenser_api.get_follow_count, condenser_api.get_discussions_by_trending, condenser_api.get_discussions_by_hot, condenser_api.get_discussions_by_promoted, condenser_api.get_discussions_by_created, condenser_api.get_discussions_by_blog, condenser_api.get_discussions_by_feed, condenser_api.get_discussions_by_comments, condenser_api.get_replies_by_last_update, condenser_api.get_content, condenser_api.get_content_replies, condenser_api.get_state ) # Register hive_api methods and (appbase) condenser_api methods methods = AsyncMethods() for m in hive_methods: methods.add(m) methods.add(m, 'hive_api.' + m.__name__) # TODO: temp, for testing jussi-style path without jussi for m in condenser_methods: # note: unclear if appbase expects condenser_api.call or call.condenser_api methods.add(m, 'condenser_api.' + m.__name__) methods.add(m, 'hive_api.condenser_api.' + m.__name__) # TODO: temp, for testing jussi-style path without jussi # Register non-appbase condenser_api endpoint (remove after appbase in prod) non_appbase_methods = AsyncMethods() non_appbase_methods.add(condenser_api.call, 'condenser_api.non_appb.call') non_appbase_methods.add(condenser_api.call, 'hive_api.condenser_api.non_appb.call') # TODO: temp, for testing jussi-style path without jussi for m in condenser_methods: non_appbase_methods.add(m)