def __init__(self, config_file=None, config=None): """ "config_file" is config file path as a string. "config" is a dictionary loaded from config_file. Either one of config_file or config needs to be passed. If both are passed config takes precedence. """ if(config is not None): self.__config = config else: if not isfile(config_file): raise FileNotFoundError("File not found at path: {0}" .format(realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: """ Catch the exception related to toml file format except for the File does not exist exception. """ if e.errno != errno.ENOENT: raise Exception('Could not open config file: %s' % e) self.__blockchain_type = self.__config['blockchain']['type'] if self.__blockchain_type.lower() == "ethereum": self.__worker_registry_list = EthereumWorkerRegistryListImpl( self.__config) else: self.__worker_registry_list = None self.__worker_registry = JRPCWorkerRegistryImpl(self.__config) self.__work_order = JRPCWorkOrderImpl(self.__config) self.__work_order_receipts = JRPCWorkOrderReceiptImpl(self.__config)
def _create_work_order_instance(blockchain_type, config): # create work order instance for direct/proxy model if blockchain_type == 'fabric': return FabricWorkOrderImpl(config) elif blockchain_type == 'ethereum': return EthereumWorkOrderProxyImpl(config) else: return JRPCWorkOrderImpl(config)
def __init__(self, listener_url): tcf_home = environ.get("TCF_HOME", "../../../") config_file = tcf_home + "/sdk/avalon_sdk/tcf_connector.toml" if not path.isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( path.realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: if e.errno != errno.ENOENT: raise Exception("Could not open config file: %s", e) self.__config['tcf']['json_rpc_uri'] = listener_url self.__fabric_worker = FabricWorkerRegistryImpl(self.__config) self.__fabric_work_order = FabricWorkOrderImpl(self.__config) self.__jrpc_worker = JRPCWorkerRegistryImpl(self.__config) self.__jrpc_work_order = JRPCWorkOrderImpl(self.__config) # Wait time in sec self.WAIT_TIME = 31536000 nest_asyncio.apply()
def _submit_work_order_and_get_result(self, work_order_id, worker_id, requester_id, work_order_params): """ This function submits work order using work_order_submit direct API """ work_order_impl = JRPCWorkOrderImpl(self._config) response = work_order_impl\ .work_order_submit(work_order_id, worker_id, requester_id, work_order_params, id=random.randint(0, 100000)) logging.info("Work order submit response : {}".format( json.dumps(response, indent=4))) work_order_result = work_order_impl\ .work_order_get_result(work_order_id, id=random.randint(0, 100000)) logging.info("Work order get result : {} ".format( json.dumps(work_order_result, indent=4))) return work_order_result
class FabricConnector(): """ Fabric blockchain connector """ def __init__(self, listener_url): tcf_home = environ.get("TCF_HOME", "../../../") config_file = tcf_home + "/sdk/avalon_sdk/tcf_connector.toml" if not path.isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( path.realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: if e.errno != errno.ENOENT: raise Exception("Could not open config file: %s", e) self.__config['tcf']['json_rpc_uri'] = listener_url self.__fabric_worker = FabricWorkerRegistryImpl(self.__config) self.__fabric_work_order = FabricWorkOrderImpl(self.__config) self.__jrpc_worker = JRPCWorkerRegistryImpl(self.__config) self.__jrpc_work_order = JRPCWorkOrderImpl(self.__config) # Wait time in sec self.WAIT_TIME = 31536000 nest_asyncio.apply() def start(self): self.sync_worker() loop = asyncio.get_event_loop() tasks = self.get_work_order_event_handler_tasks() loop.run_until_complete( asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)) loop.close() def sync_worker(self): """ Check for existing worker and update worker to fabric blockchain """ # Get all TEE Intel SGX based workers ids from the Fabric blockchain worker_ids_onchain = self._lookup_workers_onchain() # Get all Intel SGX TEE based worker ids from shared kv worker_ids_kv = self._lookup_workers_in_kv_storage() # If worker id exists in shared kv then update details of # worker to with details field. # otherwise add worker to blockchain # Update all worker which are not in shared kv and # present in blockchain to Decommissioned status self._add_update_worker_to_chain(worker_ids_onchain, worker_ids_kv) def get_work_order_event_handler_tasks(self): """ Sync work order with blockchain 1. listen to work order submit event 2. Submit work order request to listener 3. Wait for a work order result 4. Update work order result to fabric """ event_handler = self.__fabric_work_order.\ get_work_order_submitted_event_handler( self.workorder_event_handler_func ) if event_handler: tasks = [ event_handler.start_event_handling(), event_handler.stop_event_handling(int(self.WAIT_TIME)) ] return tasks else: logging.info("get work order submitted event handler failed") return None def workorder_event_handler_func(self, event, block_num, txn_id, status): logging.info("Event payload: {}\n Block number: {}\n" "Transaction id: {}\n Status {}".format( event, block_num, txn_id, status)) jrpc_req_id = 301 # Add workorder id to work order list payload_string = event['payload'].decode("utf-8") work_order_req = json.loads(payload_string) work_order_id = work_order_req['workOrderId'] # Submit the work order to listener logging.info("Submitting to work order to listener") response = self.__jrpc_work_order.work_order_submit( work_order_req['workOrderId'], work_order_req['workerId'], work_order_req['requesterId'], work_order_req["workOrderRequest"], id=jrpc_req_id) logging.info("Work order submit response {}".format(response)) if response and 'error' in response and \ response['error']['code'] == WorkOrderStatus.PENDING.value: # get the work order result jrpc_req_id += 1 work_order_result = \ self.__jrpc_work_order.work_order_get_result( work_order_req['workOrderId'], jrpc_req_id ) logging.info("Work order get result {}".format(work_order_result)) if work_order_result: logging.info("Commit work order result to blockchain") # call to chain code to store result to blockchain status = self.__fabric_work_order.work_order_complete( work_order_id, json.dumps(work_order_result)) if status == ContractResponse.SUCCESS: # remove the entry from work order list logging.info( "Chaincode invoke call work_order_complete success") else: logging.info( "Chaincode invoke call work_order_complete failed") else: logging.info("work_order_get_result is failed") else: logging.info("work_order_submit is failed") def _lookup_workers_in_kv_storage(self): """ Retrieves the worker ids from shared kv using worker_lookup direct API. Returns list of worker ids """ jrpc_req_id = random.randint(0, 100000) worker_lookup_result = self.__jrpc_worker.worker_lookup( worker_type=WorkerType.TEE_SGX, id=jrpc_req_id) logging.info("\nWorker lookup response from kv storage : {}\n".format( json.dumps(worker_lookup_result, indent=4))) if "result" in worker_lookup_result and \ "ids" in worker_lookup_result["result"].keys(): if worker_lookup_result["result"]["totalCount"] != 0: return worker_lookup_result["result"]["ids"] else: logging.error("No workers found in kv storage") else: logging.error("Failed to lookup worker in kv storage") return [] def _retrieve_worker_details_from_kv_storage(self, worker_id): """ Retrieve worker details from shared kv using direct json rpc API Returns the worker details in json string format """ jrpc_req_id = random.randint(0, 100000) worker_info = self.__jrpc_worker.worker_retrieve( worker_id, jrpc_req_id) logging.info("Worker retrieve response from kv storage: {}".format( json.dumps(worker_info, indent=4))) if "error" in worker_info: logging.error("Unable to retrieve worker details from kv storage") return "" else: return worker_info["result"] def _lookup_workers_onchain(self): """ Lookup all workers on chain to sync up with kv storage Return list of worker ids """ worker_lookup_result = self.__fabric_worker.worker_lookup( worker_type=WorkerType.TEE_SGX) logging.info("Worker lookup response from blockchain: {}\n".format( json.dumps(worker_lookup_result, indent=4))) if worker_lookup_result and worker_lookup_result[0] > 0: return worker_lookup_result[2] else: logging.info("No workers found in fabric blockchain") return [] def _add_update_worker_to_chain(self, wids_onchain, wids_kv): """ This function adds/updates a worker in the fabric blockchain """ for wid in wids_kv: worker_info = self._retrieve_worker_details_from_kv_storage(wid) worker_id = wid worker_type = WorkerType(worker_info["workerType"]) org_id = worker_info["organizationId"] app_type_id = worker_info["applicationTypeId"] details = json.dumps(worker_info["details"]) result = None if wid in wids_onchain: logging.info( "Updating worker {} on fabric blockchain".format(wid)) result = self.__fabric_worker.worker_update(worker_id, details) else: logging.info( "Adding new worker {} to fabric blockchain".format(wid)) result = self.__fabric_worker.worker_register( worker_id, worker_type, org_id, [app_type_id], details) if result != ContractResponse.SUCCESS: logging.error("Error while adding/updating worker to fabric" + " blockchain") for wid in wids_onchain: # Mark all stale workers on blockchain as decommissioned if wid not in wids_kv: worker = self.__fabric_worker.worker_retrieve(wid) # worker_retrieve returns tuple and first element # denotes status of worker. worker_status_onchain = worker[0] # If worker is not already decommissioned, # mark it decommissioned # as it is no longer available in the kv storage if worker_status_onchain != WorkerStatus.DECOMMISSIONED.value: update_status = self.__fabric_worker.worker_set_status( wid, WorkerStatus.DECOMMISSIONED) if update_status == ContractResponse.SUCCESS: logging.info("Marked worker " + wid + " as decommissioned on" + " fabric blockchain") else: logging.info("Update worker " + wid + " is failed")
class FabricConnector(): """ Fabric blockchain connector """ def __init__(self, listener_url): tcf_home = environ.get("TCF_HOME", "../../../") config_file = tcf_home + "/sdk/avalon_sdk/tcf_connector.toml" if not path.isfile(config_file): raise FileNotFoundError("File not found at path: {0}".format( path.realpath(config_file))) try: with open(config_file) as fd: self.__config = toml.load(fd) except IOError as e: if e.errno != errno.ENOENT: raise Exception("Could not open config file: %s", e) self.__config['tcf']['json_rpc_uri'] = listener_url self.__fabric_worker = FabricWorkerRegistryImpl(self.__config) self.__fabric_work_order = FabricWorkOrderImpl(self.__config) self.__jrpc_worker = JRPCWorkerRegistryImpl(self.__config) self.__jrpc_work_order = JRPCWorkOrderImpl(self.__config) # Wait time in sec self.WAIT_TIME = 31536000 nest_asyncio.apply() def start(self): self.sync_worker() loop = asyncio.get_event_loop() tasks = self.get_work_order_event_handler_tasks() loop.run_until_complete( asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)) loop.close() def sync_worker(self): """ Check for existing worker and update worker to fabric blockchain """ worker_type = WorkerType.TEE_SGX req_id = 15 lookup_result = self.__jrpc_worker.worker_lookup( worker_type=worker_type, id=req_id) logging.info("worker lookup result {}".format(lookup_result)) if lookup_result and 'result' in lookup_result: # Since currently we have only worker, get the first worker if lookup_result['result']['totalCount'] > 0: worker_id = lookup_result['result']['ids'][0] worker_result = self.__jrpc_worker.worker_retrieve( worker_id, req_id + 1) logging.info("worker retrieve result {}".format(worker_result)) if worker_result and 'result' in worker_result: worker = worker_result['result'] # add worker to fabric block chain status = self.__fabric_worker.worker_register( worker_id, WorkerType(int(worker['workerType'])), worker['organizationId'], [worker['applicationTypeId']], json.dumps(worker['details'])) if status == 0: logging.info("Added worker to fabric blockchain") else: logging.info("Failed to add worker to fabric \ blockchain") else: logging.info("Failed to retrieve worker") else: logging.info("No workers are available!") else: logging.info("Failed to lookup workers") def get_work_order_event_handler_tasks(self): """ Sync work order with blockchain 1. listen to work order submit event 2. Submit work order request to listener 3. Wait for a work order result 4. Update work order result to fabric """ event_handler = self.__fabric_work_order.\ get_work_order_submitted_event_handler( self.workorder_event_handler_func ) if event_handler: tasks = [ event_handler.start_event_handling(), event_handler.stop_event_handling(int(self.WAIT_TIME)) ] return tasks else: logging.info("get work order submitted event handler failed") return None def workorder_event_handler_func(self, event, block_num, txn_id, status): logging.info("Event payload: {}\n Block number: {}\n" "Transaction id: {}\n Status {}".format( event, block_num, txn_id, status)) jrpc_req_id = 301 # Add workorder id to work order list payload_string = event['payload'].decode("utf-8") logging.info("work order request {}\n {}".format( payload_string, type(payload_string))) work_order_req = json.loads(payload_string) work_order_id = work_order_req['workOrderId'] # Submit the work order to listener logging.info("Submitting to work order to listener") response = self.__jrpc_work_order.work_order_submit( work_order_req['workOrderId'], work_order_req['workerId'], work_order_req['requesterId'], work_order_req["workOrderRequest"], id=jrpc_req_id) logging.info("Work order submit response {}".format(response)) if response and 'error' in response and \ response['error']['code'] == WorkOrderStatus.PENDING.value: # get the work order result jrpc_req_id += 1 work_order_result = self.__jrpc_work_order.work_order_get_result( work_order_req['workOrderId'], jrpc_req_id) logging.info("Work order get result {}".format(work_order_result)) if work_order_result and 'result' in work_order_result: logging.info("Commit work order result to blockchain") # call to chain code to store result to blockchain status = self.__fabric_work_order.work_order_complete( work_order_id, json.dumps(work_order_result['result'])) if status == 0: # remove the entry from work order list logging.info( "Chaincode invoke call work_order_complete success") else: logging.info( "Chaincode invoke call work_order_complete failed") else: logging.info("work_order_get_result is failed") else: logging.info("work_order_submit is failed")
def Main(args=None): options = _parse_command_line(args) config = _parse_config_file(options.config) if config is None: logger.error("\n Error in parsing config file: {}\n".format( options.config)) sys.exit(-1) # mode should be one of listing or registry (default) mode = options.mode # Http JSON RPC listener uri uri = options.uri if uri: config["tcf"]["json_rpc_uri"] = uri # Address of smart contract address = options.address if address: if mode == "listing": config["ethereum"]["direct_registry_contract_address"] = \ address elif mode == "registry": logger.error( "\n Only Worker registry listing address is supported." + "Worker registry address is unsupported \n") sys.exit(-1) # worker id worker_id = options.worker_id # work load id of worker workload_id = options.workload_id if not workload_id: logger.error("\nWorkload id is mandatory\n") sys.exit(-1) # work order input data in_data = options.in_data # show receipt in output show_receipt = options.receipt # show decrypted result in output show_decrypted_output = options.decrypted_output # requester signature for work order requests requester_signature = options.requester_signature # setup logging config["Logging"] = {"LogFile": "__screen__", "LogLevel": "INFO"} plogger.setup_loggers(config.get("Logging", {})) sys.stdout = plogger.stream_to_logger(logging.getLogger("STDOUT"), logging.DEBUG) sys.stderr = plogger.stream_to_logger(logging.getLogger("STDERR"), logging.WARN) logger.info("******* Hyperledger Avalon Generic client *******") if mode == "registry" and address: logger.error("\n Worker registry contract address is unsupported \n") sys.exit(-1) # Retrieve JSON RPC uri from registry list if not uri and mode == "listing": uri = _retrieve_uri_from_registry_list(config) if uri is None: logger.error("\n Unable to get http JSON RPC uri \n") sys.exit(-1) # Prepare worker # JRPC request id. Choose any integer value jrpc_req_id = 31 worker_registry = JRPCWorkerRegistryImpl(config) if not worker_id: # Get first worker from worker registry worker_id = _lookup_first_worker(worker_registry, jrpc_req_id) if worker_id is None: logger.error("\n Unable to get worker \n") sys.exit(-1) # Retrieve worker details jrpc_req_id += 1 worker_retrieve_result = worker_registry.worker_retrieve( worker_id, jrpc_req_id) logger.info("\n Worker retrieve response: {}\n".format( json.dumps(worker_retrieve_result, indent=4))) if "error" in worker_retrieve_result: logger.error("Unable to retrieve worker details\n") sys.exit(1) # Create session key and iv to sign work order request session_key = crypto_utility.generate_key() session_iv = crypto_utility.generate_iv() # Initializing Worker Object worker_obj = worker_details.SGXWorkerDetails() worker_obj.load_worker(worker_retrieve_result) logger.info( "**********Worker details Updated with Worker ID" + "*********\n%s\n", worker_id) # Create work order wo_params = _create_work_order_params(worker_id, workload_id, in_data, worker_obj.encryption_key, session_key, session_iv) client_private_key = crypto_utility.generate_signing_keys() if requester_signature: # Add requester signature and requester verifying_key if wo_params.add_requester_signature(client_private_key) is False: logger.info("Work order request signing failed") exit(1) # Submit work order logger.info("Work order submit request : %s, \n \n ", wo_params.to_jrpc_string(jrpc_req_id)) work_order = JRPCWorkOrderImpl(config) jrpc_req_id += 1 response = work_order.work_order_submit(wo_params.get_work_order_id(), wo_params.get_worker_id(), wo_params.get_requester_id(), wo_params.to_string(), id=jrpc_req_id) logger.info("Work order submit response : {}\n ".format( json.dumps(response, indent=4))) if "error" in response and response["error"]["code"] != \ WorkOrderStatus.PENDING: sys.exit(1) # Create receipt wo_receipt = JRPCWorkOrderReceiptImpl(config) if show_receipt: jrpc_req_id += 1 _create_work_order_receipt(wo_receipt, wo_params, client_private_key, jrpc_req_id) # Retrieve work order result jrpc_req_id += 1 res = work_order.work_order_get_result(wo_params.get_work_order_id(), jrpc_req_id) logger.info("Work order get result : {}\n ".format( json.dumps(res, indent=4))) # Check if result field is present in work order response if "result" in res: # Verify work order response signature if _verify_wo_res_signature(res, worker_obj.verification_key) is False: logger.error("Work order response signature verification Failed") sys.exit(1) # Decrypt work order response if show_decrypted_output: decrypted_res = crypto_utility.decrypted_response( res, session_key, session_iv) logger.info("\nDecrypted response:\n {}".format(decrypted_res)) else: logger.error("\n Work order get result failed {}\n".format(res)) sys.exit(1) if show_receipt: # Retrieve receipt jrpc_req_id += 1 retrieve_wo_receipt \ = _retrieve_work_order_receipt(wo_receipt, wo_params, jrpc_req_id) # Verify receipt signature if _verify_receipt_signature(retrieve_wo_receipt) is False: logger.error("Receipt signature verification Failed") sys.exit(1)