def __init__(self, config): super().__init__() self._config = config self._worker_registry_list = None self._worker_instance = JRPCWorkerRegistryImpl(self._config) self._work_order_instance = JRPCWorkOrderImpl(self._config) self._work_order_receipt = JRPCWorkOrderReceiptImpl(self._config)
def __init__(self, config, registry_instance, worker_instance, work_order_instance, wo_receipt_instance): """ Initialize the connector with instances of registry, worker, work order and work order receipt implementation classes of blockchains @param config - dict containing connector configurations. @param registry_instance - implementation class object for registry @param worker_instance - implementation class object for worker @param work_order_instance - implementation class object for work order @param wo_receipt_instance - implementation class object for work order receipt """ self._registry_instance = registry_instance self._wo_receipt_instance = wo_receipt_instance # JSON RPC based worker and work order instances jprc_worker_instance = JRPCWorkerRegistryImpl(config) jrpc_work_order_instance = JRPCWorkOrderImpl(config) self._worker_delegate = WorkerDelegate(config, jprc_worker_instance, worker_instance) self._work_order_delegate = WorkOrderDelegate(jrpc_work_order_instance, work_order_instance) # List of active available worker ids in Avalon self._active_worker_ids = []
def __init__(self, config_file=None, config=None): """ Parameters: config_file Optional configuration file path as a string config Optional dictionary loaded from config_file Either one of config_file or config needs to be passed. If both are passed, then config takes precedence. """ if (config is not None): self.__config = config else: if not isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: """ Catch the exception related to toml file format except for the File does not exist exception. """ if e.errno != errno.ENOENT: raise Exception('Could not open config file: %s' % e) self.__blockchain_type = self.__config['blockchain']['type'] if self.__blockchain_type.lower() == "ethereum": self.__worker_registry_list = EthereumWorkerRegistryListImpl( self.__config) else: self.__worker_registry_list = None self.__worker_registry = JRPCWorkerRegistryImpl(self.__config) self.__work_order = JRPCWorkOrderImpl(self.__config) self.__work_order_receipts = JRPCWorkOrderReceiptImpl(self.__config)
def create_worker_registry_instance(self, blockchain_type, config): # create worker registry instance for direct/proxy model if blockchain_type == 'fabric': return FabricWorkerRegistryImpl(config) elif blockchain_type == 'ethereum': return EthereumWorkerRegistryImpl(config) else: return JRPCWorkerRegistryImpl(config)
def _create_worker_registry_instance(blockchain_type, config): # create worker registry instance for direct/proxy model if constants.proxy_mode and blockchain_type == 'fabric': return FabricWorkerRegistryImpl(config) elif constants.proxy_mode and blockchain_type == 'ethereum': return EthereumWorkerRegistryImpl(config) else: logger.info("Direct SDK code path\n") return JRPCWorkerRegistryImpl(config)
def __init__(self, listener_url): tcf_home = environ.get("TCF_HOME", "../../../") config_file = tcf_home + "/sdk/avalon_sdk/tcf_connector.toml" if not path.isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( path.realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: if e.errno != errno.ENOENT: raise Exception("Could not open config file: %s", e) self.__config['tcf']['json_rpc_uri'] = listener_url self.__fabric_worker = FabricWorkerRegistryImpl(self.__config) self.__fabric_work_order = FabricWorkOrderImpl(self.__config) self.__jrpc_worker = JRPCWorkerRegistryImpl(self.__config) self.__jrpc_work_order = JRPCWorkOrderImpl(self.__config) # List of active available worker ids in Avalon self.__worker_ids = [] # Wait time in sec self.WAIT_TIME = 31536000 nest_asyncio.apply()
def start(self): logging.info("Ethereum Connector service started") # Fetch first worker details from shared KV (via direct API) # and add the worker to block chain. # TODO: Fetch all workers from shared KV and block chain # and do 2-way sync. jrpc_worker_registry = JRPCWorkerRegistryImpl(self._config) worker_ids_onchain = self._lookup_workers_onchain() self.__worker_ids = self._lookup_workers_in_kv_storage( jrpc_worker_registry) self._add_update_worker_to_chain(worker_ids_onchain, jrpc_worker_registry) # Start an event listener that listens for events from the proxy # blockchain, extracts request payload from there and make a request # to avalon-listener w3 = BlockchainInterface(self._config) contract = self._work_order_contract_instance_evt # Listening only for workOrderSubmitted event now listener = w3.newListener(contract, "workOrderSubmitted") try: daemon = EventProcessor(self._config) asyncio.get_event_loop().run_until_complete( daemon.start( listener, self.handleEvent, account=None, contract=contract, )) except KeyboardInterrupt: asyncio.get_event_loop().run_until_complete(daemon.stop())
def Main(args=None): options = _parse_command_line(args) config = _parse_config_file(options.config) if config is None: logger.error("\n Error in parsing config file: {}\n".format( options.config)) sys.exit(-1) # mode should be one of listing or registry (default) mode = options.mode # Http JSON RPC listener uri uri = options.uri if uri: config["tcf"]["json_rpc_uri"] = uri # Address of smart contract address = options.address if address: if mode == "listing": config["ethereum"]["direct_registry_contract_address"] = \ address elif mode == "registry": logger.error( "\n Only Worker registry listing address is supported." + "Worker registry address is unsupported \n") sys.exit(-1) # worker id worker_id = options.worker_id worker_id_hex = options.worker_id_hex worker_id = worker_id_hex if not worker_id \ else hex_utils.get_worker_id_from_name(worker_id) # work load id of worker workload_id = options.workload_id if not workload_id: logger.error("\nWorkload id is mandatory\n") sys.exit(-1) # work order input data in_data = options.in_data # Option to send input data in plain text in_data_plain_text = options.in_data_plain # show receipt in output show_receipt = options.receipt # show decrypted result in output show_decrypted_output = options.decrypted_output # requester signature for work order requests requester_signature = options.requester_signature # setup logging config["Logging"] = {"LogFile": "__screen__", "LogLevel": "INFO"} plogger.setup_loggers(config.get("Logging", {})) sys.stdout = plogger.stream_to_logger(logging.getLogger("STDOUT"), logging.DEBUG) sys.stderr = plogger.stream_to_logger(logging.getLogger("STDERR"), logging.WARN) logger.info("******* Hyperledger Avalon Generic client *******") if mode == "registry" and address: logger.error("\n Worker registry contract address is unsupported \n") sys.exit(-1) # Retrieve JSON RPC uri from registry list if not uri and mode == "listing": uri = _retrieve_uri_from_registry_list(config) if uri is None: logger.error("\n Unable to get http JSON RPC uri \n") sys.exit(-1) # Prepare worker # JRPC request id. Choose any integer value jrpc_req_id = 31 worker_registry = JRPCWorkerRegistryImpl(config) if not worker_id: # Get first worker from worker registry worker_id = _lookup_first_worker(worker_registry, jrpc_req_id) if worker_id is None: logger.error("\n Unable to get worker \n") sys.exit(-1) # Retrieve worker details jrpc_req_id += 1 worker_retrieve_result = worker_registry.worker_retrieve( worker_id, jrpc_req_id) logger.info("\n Worker retrieve response: {}\n".format( json.dumps(worker_retrieve_result, indent=4))) if "error" in worker_retrieve_result: logger.error("Unable to retrieve worker details\n") sys.exit(1) # Create session key and iv to sign work order request session_key = crypto_utility.generate_key() session_iv = crypto_utility.generate_iv() # Initializing Worker Object worker_obj = worker_details.SGXWorkerDetails() worker_obj.load_worker(worker_retrieve_result['result']['details']) # Do worker verification _do_worker_verification(worker_obj) logger.info( "**********Worker details Updated with Worker ID" + "*********\n%s\n", worker_id) # Create work order if in_data_plain_text: # As per TC spec, if encryptedDataEncryptionKey is "-" then # input data is not encrypted encrypted_data_encryption_key = "-" else: # As per TC spec, if encryptedDataEncryptionKey is not # provided then set it to None which means # use default session key to encrypt input data encrypted_data_encryption_key = None wo_params = _create_work_order_params(worker_id, workload_id, in_data, worker_obj.encryption_key, session_key, session_iv, encrypted_data_encryption_key) client_private_key = crypto_utility.generate_signing_keys() if requester_signature: # Add requester signature and requester verifying_key if wo_params.add_requester_signature(client_private_key) is False: logger.info("Work order request signing failed") exit(1) # Submit work order logger.info("Work order submit request : %s, \n \n ", wo_params.to_jrpc_string(jrpc_req_id)) work_order = JRPCWorkOrderImpl(config) jrpc_req_id += 1 response = work_order.work_order_submit(wo_params.get_work_order_id(), wo_params.get_worker_id(), wo_params.get_requester_id(), wo_params.to_string(), id=jrpc_req_id) logger.info("Work order submit response : {}\n ".format( json.dumps(response, indent=4))) if "error" in response and response["error"]["code"] != \ WorkOrderStatus.PENDING: sys.exit(1) # Create receipt wo_receipt = JRPCWorkOrderReceiptImpl(config) if show_receipt: jrpc_req_id += 1 _create_work_order_receipt(wo_receipt, wo_params, client_private_key, jrpc_req_id) # Retrieve work order result jrpc_req_id += 1 res = work_order.work_order_get_result(wo_params.get_work_order_id(), jrpc_req_id) logger.info("Work order get result : {}\n ".format( json.dumps(res, indent=4))) # Check if result field is present in work order response if "result" in res: # Verify work order response signature if _verify_wo_res_signature(res['result'], worker_obj.verification_key, wo_params.get_requester_nonce()) is False: logger.error("Work order response signature verification Failed") sys.exit(1) # Decrypt work order response if show_decrypted_output: decrypted_res = crypto_utility.decrypted_response( res['result'], session_key, session_iv) logger.info("\nDecrypted response:\n {}".format(decrypted_res)) else: logger.error("\n Work order get result failed {}\n".format(res)) sys.exit(1) if show_receipt: # Retrieve receipt jrpc_req_id += 1 retrieve_wo_receipt \ = _retrieve_work_order_receipt(wo_receipt, wo_params, jrpc_req_id) # Verify receipt signature if "result" in retrieve_wo_receipt: if _verify_receipt_signature(retrieve_wo_receipt) is False: logger.error("Receipt signature verification Failed") sys.exit(1) else: logger.info("Work Order receipt retrieve failed") sys.exit(1)
class DirectModelGenericClient(BaseGenericClient): """ Generic client class to test end to end flow for direct model. """ def __init__(self, config): super().__init__() self._config = config self._worker_registry_list = None self._worker_instance = JRPCWorkerRegistryImpl(self._config) self._work_order_instance = JRPCWorkOrderImpl(self._config) self._work_order_receipt = JRPCWorkOrderReceiptImpl(self._config) def _get_random_jrpc_id(self): return random.randint(1, 10000) def retrieve_uri_from_registry_list(self, config): # Retrieve Http JSON RPC listener uri from registry # in case of direct model logging.info("\n Retrieve Http JSON RPC listener uri from registry \n") # Get block chain type blockchain_type = config['blockchain']['type'] if blockchain_type == "Ethereum": self._worker_registry_list = EthereumWorkerRegistryListImpl(config) else: logging.error("\n Worker registry list is currently " "supported only for " "ethereum block chain \n") return None # Lookup returns tuple, first element is number of registries and # second is element is lookup tag and # third is list of organization ids. registry_count, lookup_tag, registry_list = \ self._worker_registry_list.registry_lookup() logging.info("\n Registry lookup response: registry count: {} " "lookup tag: {} registry list: {}\n".format( registry_count, lookup_tag, registry_list)) if (registry_count == 0): logging.error("No registries found") return None # Retrieve the fist registry details. registry_retrieve_result = \ self._worker_registry_list.registry_retrieve( registry_list[0]) logging.info("\n Registry retrieve response: {}\n".format( registry_retrieve_result)) return registry_retrieve_result[0] def get_worker_details(self, worker_id): """ Fetch worker details for given worker id """ worker_retrieve_res = self._worker_instance.worker_retrieve( worker_id, self._get_random_jrpc_id()) if worker_retrieve_res and "result" in worker_retrieve_res: status = worker_retrieve_res["result"]["status"] details = worker_retrieve_res["result"]["details"] logging.info("\n Worker retrieve: worker status {} " "details : {}\n".format(status, json.dumps(details, indent=4))) if status == WorkerStatus.ACTIVE.value: # Initializing Worker Object worker_obj = SGXWorkerDetails() worker_obj.load_worker(details) return worker_obj else: logging.error("Worker is not active") else: return None def get_work_order_result(self, work_order_id): """ Retrieve work order result for given work order id """ work_order_res = self._work_order_instance.work_order_get_result( work_order_id, self._get_random_jrpc_id()) logging.info("Work order get result {}".format( json.dumps(work_order_res, indent=4))) if work_order_res and "result" in work_order_res: return True, work_order_res return False, work_order_res def submit_work_order(self, wo_params): """ Submit work order request """ jrpc_id = self._get_random_jrpc_id() wo_request = wo_params.to_string() logging.info("\n Work order sumbit request {}\n".format( wo_params.to_jrpc_string(jrpc_id))) wo_submit_res = self._work_order_instance.work_order_submit( wo_params.get_work_order_id(), wo_params.get_worker_id(), wo_params.get_requester_id(), wo_request, jrpc_id) logging.info("Work order submit response : {}\n ".format( json.dumps(wo_submit_res, indent=4))) if wo_submit_res: # in asynchronous mode if "error" in wo_submit_res: if wo_submit_res["error"]["code"] == WorkOrderStatus.PENDING: return True, wo_submit_res # in synchronous mode elif "result" in wo_submit_res: return True, wo_submit_res else: return False, wo_submit_res else: return False, wo_submit_res def create_work_order_receipt(self, wo_params, client_private_key): # Create a work order receipt object using # WorkOrderReceiptRequest class. # This function will send a WorkOrderReceiptCreate # JSON RPC request. wo_request = json.loads( wo_params.to_jrpc_string(self._get_random_jrpc_id())) wo_receipt_request_obj = WorkOrderReceiptRequest() wo_create_receipt = wo_receipt_request_obj.create_receipt( wo_request, ReceiptCreateStatus.PENDING.value, client_private_key) logging.info("Work order create receipt request : {} \n \n ".format( json.dumps(wo_create_receipt, indent=4))) # Submit work order create receipt jrpc request wo_receipt_resp = self._work_order_receipt.work_order_receipt_create( wo_create_receipt["workOrderId"], wo_create_receipt["workerServiceId"], wo_create_receipt["workerId"], wo_create_receipt["requesterId"], wo_create_receipt["receiptCreateStatus"], wo_create_receipt["workOrderRequestHash"], wo_create_receipt["requesterGeneratedNonce"], wo_create_receipt["requesterSignature"], wo_create_receipt["signatureRules"], wo_create_receipt["receiptVerificationKey"], self._get_random_jrpc_id()) logging.info("Work order create receipt response : {} \n \n ".format( wo_receipt_resp)) def retrieve_work_order_receipt(self, wo_id): """ Retrieve work order receipt for given work order id """ receipt_res = self._work_order_receipt.work_order_receipt_retrieve( wo_id, id=self._get_random_jrpc_id()) logging.info("\n Retrieve receipt response:\n {}".format( json.dumps(receipt_res, indent=4))) # Retrieve last update to receipt by passing 0xFFFFFFFF receipt_update_retrieve = \ self._work_order_receipt.work_order_receipt_update_retrieve( wo_id, None, 1 << 32, id=self._get_random_jrpc_id()) logging.info("\n Last update to receipt receipt is:\n {}".format( json.dumps(receipt_update_retrieve, indent=4))) return receipt_update_retrieve def verify_receipt_signature(self, receipt_update_retrieve): """ Verify work order receipt signature """ signer = worker_signing.WorkerSign() status = signer.verify_update_receipt_signature( receipt_update_retrieve['result']) if status == SignatureStatus.PASSED: logging.info( "Work order receipt retrieve signature verification " + "successful") else: logging.error( "Work order receipt retrieve signature verification failed!!") return False return True
class FabricConnector(): """ Fabric blockchain connector """ def __init__(self, listener_url): tcf_home = environ.get("TCF_HOME", "../../../") config_file = tcf_home + "/sdk/avalon_sdk/tcf_connector.toml" if not path.isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( path.realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: if e.errno != errno.ENOENT: raise Exception("Could not open config file: %s", e) self.__config['tcf']['json_rpc_uri'] = listener_url self.__fabric_worker = FabricWorkerRegistryImpl(self.__config) self.__fabric_work_order = FabricWorkOrderImpl(self.__config) self.__jrpc_worker = JRPCWorkerRegistryImpl(self.__config) self.__jrpc_work_order = JRPCWorkOrderImpl(self.__config) # List of active available worker ids in Avalon self.__worker_ids = [] # Wait time in sec self.WAIT_TIME = 31536000 nest_asyncio.apply() def start(self): self.sync_worker() loop = asyncio.get_event_loop() tasks = self.get_work_order_event_handler_tasks() loop.run_until_complete( asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)) loop.close() def sync_worker(self): """ Check for existing worker and update worker to fabric blockchain """ # Get all TEE Intel SGX based workers ids from the Fabric blockchain worker_ids_onchain = self._lookup_workers_onchain() # Get all Intel SGX TEE based worker ids from shared kv self.__worker_ids = self._lookup_workers_in_kv_storage() # If worker id exists in shared kv then update details of # worker to with details field. # otherwise add worker to blockchain # Update all worker which are not in shared kv and # present in blockchain to Decommissioned status self._add_update_worker_to_chain(worker_ids_onchain, self.__worker_ids) def get_work_order_event_handler_tasks(self): """ Sync work order with blockchain 1. listen to work order submit event 2. Submit work order request to listener 3. Wait for a work order result 4. Update work order result to fabric """ event_handler = self.__fabric_work_order.\ get_work_order_submitted_event_handler( self.workorder_event_handler_func ) if event_handler: tasks = [ event_handler.start_event_handling(), event_handler.stop_event_handling(int(self.WAIT_TIME)) ] return tasks else: logging.info("get work order submitted event handler failed") return None def workorder_event_handler_func(self, event, block_num, txn_id, status): logging.info("Event payload: {}\n Block number: {}\n" "Transaction id: {}\n Status {}".format( event, block_num, txn_id, status)) jrpc_req_id = 301 # Add workorder id to work order list payload_string = event['payload'].decode("utf-8") work_order_req = json.loads(payload_string) work_order_id = work_order_req['workOrderId'] # Submit the work order to listener if worker id from the event # matches with available worker ids if work_order_req['workerId'] in self.__worker_ids: logging.info("Submitting to work order to listener") response = self.__jrpc_work_order.work_order_submit( work_order_req['workOrderId'], work_order_req['workerId'], work_order_req['requesterId'], work_order_req["workOrderRequest"], id=jrpc_req_id) logging.info("Work order submit response {}".format(response)) if response and 'error' in response and \ response['error']['code'] == \ WorkOrderStatus.PENDING.value: # get the work order result jrpc_req_id += 1 work_order_result = \ self.__jrpc_work_order.work_order_get_result( work_order_req['workOrderId'], jrpc_req_id ) logging.info( "Work order get result {}".format(work_order_result)) # With Synchronous work order processing work order submit # return result elif response and 'result' in response: work_order_result = response else: logging.info("work_order_submit is failed") work_order_result = None if work_order_result: logging.info("Commit work order result to blockchain") # call to chain code to store result to blockchain status = self.__fabric_work_order.work_order_complete( work_order_id, json.dumps(work_order_result)) if status == ContractResponse.SUCCESS: # remove the entry from work order list logging.info( "Chaincode invoke call work_order_complete success") else: logging.info( "Chaincode invoke call work_order_complete failed") def _lookup_workers_in_kv_storage(self): """ Retrieves the worker ids from shared kv using worker_lookup direct API. Returns list of worker ids """ jrpc_req_id = random.randint(0, 100000) worker_lookup_result = self.__jrpc_worker.worker_lookup( worker_type=WorkerType.TEE_SGX, id=jrpc_req_id) logging.info("\nWorker lookup response from kv storage : {}\n".format( json.dumps(worker_lookup_result, indent=4))) if "result" in worker_lookup_result and \ "ids" in worker_lookup_result["result"].keys(): if worker_lookup_result["result"]["totalCount"] != 0: return worker_lookup_result["result"]["ids"] else: logging.error("No workers found in kv storage") else: logging.error("Failed to lookup worker in kv storage") return [] def _retrieve_worker_details_from_kv_storage(self, worker_id): """ Retrieve worker details from shared kv using direct json rpc API Returns the worker details in json string format """ jrpc_req_id = random.randint(0, 100000) worker_info = self.__jrpc_worker.worker_retrieve( worker_id, jrpc_req_id) logging.info("Worker retrieve response from kv storage: {}".format( json.dumps(worker_info, indent=4))) if "error" in worker_info: logging.error("Unable to retrieve worker details from kv storage") return "" else: return worker_info["result"] def _lookup_workers_onchain(self): """ Lookup all workers on chain to sync up with kv storage Return list of worker ids """ worker_lookup_result = self.__fabric_worker.worker_lookup( worker_type=WorkerType.TEE_SGX) logging.info("Worker lookup response from blockchain: {}\n".format( json.dumps(worker_lookup_result, indent=4))) if worker_lookup_result and worker_lookup_result[0] > 0: return worker_lookup_result[2] else: logging.info("No workers found in fabric blockchain") return [] def _add_update_worker_to_chain(self, wids_onchain, wids_kv): """ This function adds/updates a worker in the fabric blockchain """ for wid in wids_kv: worker_info = self._retrieve_worker_details_from_kv_storage(wid) worker_id = wid worker_type = WorkerType(worker_info["workerType"]) org_id = worker_info["organizationId"] app_type_id = worker_info["applicationTypeId"] details = json.dumps(worker_info["details"]) result = None if wid in wids_onchain: logging.info( "Updating worker {} on fabric blockchain".format(wid)) result = self.__fabric_worker.worker_update(worker_id, details) else: logging.info( "Adding new worker {} to fabric blockchain".format(wid)) result = self.__fabric_worker.worker_register( worker_id, worker_type, org_id, [app_type_id], details) if result != ContractResponse.SUCCESS: logging.error("Error while adding/updating worker to fabric" + " blockchain") for wid in wids_onchain: # Mark all stale workers on blockchain as decommissioned if wid not in wids_kv: worker = self.__fabric_worker.worker_retrieve(wid) # worker_retrieve returns tuple and first element # denotes status of worker. worker_status_onchain = worker[0] # If worker is not already decommissioned, # mark it decommissioned # as it is no longer available in the kv storage if worker_status_onchain != WorkerStatus.DECOMMISSIONED.value: update_status = self.__fabric_worker.worker_set_status( wid, WorkerStatus.DECOMMISSIONED) if update_status == ContractResponse.SUCCESS: logging.info("Marked worker " + wid + " as decommissioned on" + " fabric blockchain") else: logging.info("Update worker " + wid + " is failed")