def paymentService(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) client_addr = json_data['client_addr'] mySrvExchange.service_payment(client_addr) return jsonify({'paymentService': 'Succeed'}), 201
def test_query(tx_json): query_ret = PRC_Client.abci_query(tx_json) print(query_ret) key_str = query_ret['result']['response']['key'] if (key_str != None): print("key:" + TypesUtil.base64_to_ascii(key_str)) value_str = query_ret['result']['response']['value'] if (value_str != None): str_ascii = TypesUtil.base64_to_ascii(value_str) print("value:" + str_ascii)
def test_ABACRules(): #test Api path_db = 'ABAC.db' # new Userdata table ABACRuleManager.create_table(path_db) #ABACRuleManager.remove_table(path_db) #test insert user data rule_arg = {} rule_arg['Name'] = "rule2" rule_arg['AttrUser'] = "******" rule_arg['AttrAction'] = "GET" rule_arg['AttrResource'] = "/test/api/v1.0/dt" #define environmental attribute env_time = {} env_time['type'] = 'Timespan' env_time['value'] = {} env_time['value']['start'] = '14:12:32' env_time['value']['end'] = '23:12:32' rule_arg['AttrEnvironment'] = TypesUtil.json_to_string(env_time) ABACRuleManager.insert_entry(path_db, rule_arg) #search test rules_list = ABACRuleManager.select_Allentry(path_db) print(rules_list) print(TypesUtil.string_to_json(rules_list[0]['AttrEnvironment'])['type']) rules_entry = ABACRuleManager.select_ByName(path_db, 'rule1') print(rules_entry) #build up fields condition field_data = {} #field_data['Name'] = "rule1" field_data['AttrUser'] = "******" field_data['AttrAction'] = "GET" field_data['AttrResource'] = "/test/api/v1.0/dt/project" #define environmental attribute env_time = {} env_time['type'] = 'Timespan' env_time['value'] = {} env_time['value']['start'] = '8:12:32' env_time['value']['end'] = '14:12:32' #field_data['AttrEnvironment'] = TypesUtil.json_to_string(env_time) #rules_entry = ABACRuleManager.select_ByFieldname(path_db, field_data) #print(rules_entry) #update test update_arg = rule_arg update_arg['AttrUser'] = "******" update_arg['AttrResource'] = "/test/api/v1.0/dt/project" env_update = TypesUtil.string_to_json(update_arg['AttrEnvironment']) env_update['value']['start'] = "13:12:32" env_update['value']['end'] = "22:12:32"
def commitService(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) op_state = json_data['op_state'] balance = json_data['balance'] if (op_state == 1): mySrvExchange.subscriber_commit(balance) else: mySrvExchange.publisher_commit() return jsonify({'commitService': 'Succeed'}), 201
def registerService(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) client_addr = json_data['client_addr'] op_state = json_data['op_state'] service_info = json_data['service_info'] if (op_state == 1): mySrvExchange.updateRecipient(client_addr, service_info) else: mySrvExchange.updateProvider(client_addr, service_info) return jsonify({'registerService': 'Succeed'}), 201
def updateBroker(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) client_addr = json_data['client_addr'] op_state = json_data['op_state'] zone_id = json_data['zid'] if (op_state == 1): mySrvExchange.updateSubscriber(client_addr, zone_id) else: mySrvExchange.updatePublisher(client_addr, zone_id) return jsonify({'updateBroker': 'Succeed'}), 201
def test_access(): #test Api path_db = 'RBAC.db' user_list = UsersManager.select_Allentry(path_db) print(user_list) #get access right '''role_admin=generateAdmin() role_viewer=generateViewer() print(role_admin) print(role_viewer) #update role data to database RolesManager.update_entry(path_db,role_admin) RolesManager.update_entry(path_db,role_viewer)''' #load data from database #user_access=AccessManager.select_Allentry(path_db) user_access = AccessManager.select_ByName(path_db, user_list[1]['Name']) print(user_access) #read access right data json_data = TypesUtil.string_to_jsonlist(user_access[0]['AccessRight']) print(json_data) print(json_data[0]['conditions']['value']['start'])
def generateAdmin(): #get access right role_arg = [] role_arg.append('admin') #construct access json data ac_list = [] #access right 1 ac_json = {} ac_json['resource'] = '/test/api/v1.0/dt' ac_json['action'] = 'GET' ac_json['conditions'] = {} #construct condition json data ac_json['conditions']['value'] = {} ac_json['conditions']['value']['start'] = '16:12:32' ac_json['conditions']['value']['end'] = '23:32:32' ac_json['conditions']['type'] = 'Timespan' ac_list.append(ac_json) #access right 2 ac_json = {} ac_json['resource'] = '/test/api/v1.0/dt/project' ac_json['action'] = 'GET' ac_json['conditions'] = {} #construct condition json data ac_json['conditions']['value'] = {} ac_json['conditions']['value']['start'] = '8:12:32' ac_json['conditions']['value']['end'] = '16:32:32' ac_json['conditions']['type'] = 'Timespan' ac_list.append(ac_json) #append json list to role_arg role_arg.append(TypesUtil.jsonlist_to_string(ac_list)) return role_arg
def test_CapACToken(): addr_list = './addr_list.json' # ========== get host account ========= accounts = myCapACToken.getAccounts() # ========== Get account address ========= node1_address = myCapACToken.getAddress('PI_Node_1', addr_list) node2_address = myCapACToken.getAddress('TKB1_node1', addr_list) print("Account: " + node2_address) # ============== Read token data using CapACToken call getCapTokenStatus() ======== '''token_data=myCapACToken.getCapTokenStatus(node1_address); CapACToken.print_tokendata(token_data)''' # ========= Read token data using CapPolicy function get_token() ============= token_data = CapPolicy.get_token(node1_address) print(token_data['VZone_master']) ac = TypesUtil.string_to_json(token_data['authorization']) print(ac['resource']) # ========= Write token data to 'token.dat' ============= #FileUtil.AddLine('token.dat', TypesUtil.json_to_string(token_data)) # ========= Read token data from 'token.dat' ============= '''read_token=FileUtil.ReadLines('token.dat') json_token=TypesUtil.string_to_json(read_token[0]) print(json_token['initialized']) print(json_token['issuedate'])''' print(CapPolicy.is_token_valid(token_data))
def test_pyca(): # transfer string data to bytes block bytes_block=TypesUtil.string_to_bytes('samuel'); hash_value=Crypto_Hash.generate_hash(bytes_block) print(Crypto_Hash.verify_hash(hash_value, b'samuelx')) pass
def tx_evaluate(model_name): ''' Launch tx and evaluate tx committed time Args: model_name: model file Returns: tx committed reulst ''' # 1) Load model from file model=ModelUtils.load_model(model_name) # 2) calculate hash value for model hash_value=ModelUtils.hash_model(model) # 3) evaluate tx committed time start_time=time.time() logger.info("tx hashed model: {} to blockchain...\n".format(model_name)) # -------- prepare parameter for tx ------------ tx_json = {} key_str = model_name value_str = TypesUtil.string_to_hex(hash_value) tx_data = key_str + "=" + value_str # --------- build parameter string: tx=? -------- tx_json['tx']='"' + tx_data +'"' tx_ret=Tender_RPC.broadcast_tx_commit(tx_json) exec_time=time.time()-start_time logger.info("tx committed time: {:.3f}\n".format(exec_time, '.3f')) FileUtil.save_testlog('test_results', 'exec_tx_commit_tendermint.log', format(exec_time, '.3f')) return tx_ret
def hash_dataset(dataset, keep_labels): ''' Generate hash value of loaded dataset (tensor-->numpy-->string) Args: dataset: dataset object Returns: Binary hash value ''' str_dataset=[] # For each model's state_dict to get str_dataset logger.info("For each dataset's touple to get str_dataset...\n") i=0 for data_tensor, target_tensor in dataset: if(target_tensor in keep_labels): # conver to touple [data, target] value_np = [data_tensor, target_tensor] # conver to string, which is used for hash function str_dataset.append( str(value_np) ) # convert string to byte before hash operation bytes_block = TypesUtil.string_to_bytes(str(str_dataset)) # generate hash value based on byte string hash_value = Crypto_Hash.generate_hash(bytes_block) return hash_value
def hash_model(model): ''' Generate hash value of model data (tensor-->numpy-->string) Args: model: tensor model object Returns: Binary hash value ''' str_model=[] # For each model's state_dict to get str_model logger.info("For each model's state_dict to get str_model...\n") for param_tensor in model.state_dict(): # conver to numpy array value_np = model.state_dict()[param_tensor].numpy() # conver to string, which is used for hash function str_model.append([param_tensor, str(value_np)]) # convert string to byte before hash operation bytes_block = TypesUtil.string_to_bytes(str(str_model)) # generate hash value based on byte string hash_value = Crypto_Hash.generate_hash(bytes_block) return hash_value
def string_test(): #==================== test upload string ========================= tx_json = {} # tx_size = 128*1024 # tx_data = TypesUtil.string_to_hex(os.urandom(tx_size)) kv_mode = 1 if (kv_mode == 0): # ----------------- 1) value -------------- tx_data = "This is samuel test message!" else: # ----------------- 2) key:value -------------- json_value = {} json_value['id'] = '1' json_value['name'] = "samuel_xu" json_value['age'] = 28 tx_data = TypesUtil.json_to_string(json_value) tx_json['data'] = tx_data post_ret = RPC_Curl.upload_string(tx_json) print(post_ret) #==================== test download string ========================= query_json = {} # use string hash # swarm_hash = '4c9293963f4c1e9b7cd3e9e3a45d41ec8a961278be2701ce071317d4832d3bca' swarm_hash = post_ret['results'] query_ret = RPC_Curl.download_string(swarm_hash) print(query_ret)
def verify_hashmodel(model_name): ''' Verify model hash value by querying blockchain Args: model_name: model file Returns: Verified result: True or False ''' # 1) Load model from file ls_time_exec = [] start_time=time.time() model=ModelUtils.load_model(model_name) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) # 2) Calculate hash value of model start_time=time.time() hash_value=ModelUtils.hash_model(model) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) model_hash={} model_hash[model_name]=str(hash_value) # 3) Read token data using call query_json = {} query_json['data']='"' + model_name +'"' start_time=time.time() query_ret=Tender_RPC.abci_query(query_json) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) # -------- parse value from response and display it ------------ key_str=query_ret['result']['response']['key'] value_str=query_ret['result']['response']['value'] logger.info("Fetched model hash value:") logger.info("model: {}".format(TypesUtil.base64_to_ascii(key_str)) ) if( value_str!= None): query_hash_value = TypesUtil.hex_to_string(TypesUtil.base64_to_ascii(value_str)) else: query_hash_value = '' logger.info("value: {}".format(query_hash_value)) # Prepare log messgae str_time_exec=" ".join(ls_time_exec) FileUtil.save_testlog('test_results', 'exec_verify_hashmodel_tendermint.log', str_time_exec) # 4) return verify hash model result return model_hash[model_name]==str(query_hash_value)
def negotiateService(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) client_addr = json_data['client_addr'] op_state = json_data['op_state'] time_currency = json_data['time_currency'] if (op_state == 2): mySrvExchange.recipient_withdraw(client_addr) elif (op_state == 1): mySrvExchange.recipient_deposit(client_addr, time_currency) else: mySrvExchange.provider_confirm(client_addr) return jsonify({'negotiateService': 'Succeed'}), 201
def getAccount(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) json_data = TypesUtil.string_to_json(req_data) client_addr = json_data['client_addr'] start_time = time.time() service_data = mySrvExchange.getAccount(client_addr) exec_time = time.time() - start_time FileUtil.save_testlog('test_results', 'exec_getAccount.log', format(exec_time * 1000, '.3f')) json_data = {} json_data['account'] = client_addr json_data['uid'] = service_data[0] json_data['balance'] = service_data[1] json_data['status'] = service_data[2] return jsonify({'result': 'Succeed', 'data': json_data}), 201
def verify_indexToken(str_index, filepath): # Define ls_time_exec to save executing time to log ls_time_exec=[] # mark the start time start_time=time.time() #1) read index data in contract token_data=mytoken.getIndexToken(str_index); #print(token_data) # calculate computational cost exec_time=time.time()-start_time ls_time_exec.append(format(exec_time*1000, '.3f')) print("Execution time of getIndexToken is:%2.6f" %(exec_time)) # mark the start time start_time=time.time() #2) extract data from index file indexData=IndexPolicy.ExtractData(filepath) str_value=str(indexData) # calculate computational cost exec_time=time.time()-start_time ls_time_exec.append(format(exec_time*1000, '.3f')) print("Execution time of extract Index is:%2.6f" %(exec_time)) # mark the start time start_time=time.time() #3) calculate hash value of str_value # transfer string data to bytes block bytes_block = TypesUtil.string_to_bytes(str_value); hash_value = Crypto_Hash.generate_hash(bytes_block) # compare ret_indexAuth = (str(hash_value)==token_data[1]) # calculate computational cost exec_time=time.time()-start_time ls_time_exec.append(format(exec_time*1000, '.3f')) print("Execution time of verifyIndex is:%2.6f" %(exec_time)) #transfer list to string str_time_exec=" ".join(ls_time_exec) #print(str_time_exec) FileUtil.AddLine('exec_time_authIndex.log', str_time_exec) #return index authentication result return ret_indexAuth
def verify_hashmodel(model_name, target_address="0.0.0.0:8080"): ''' Verify model hash value by querying blockchain Args: model_name: model file Returns: Verified result: True or False ''' # 1) Load model from file ls_time_exec = [] start_time=time.time() model=ModelUtils.load_model(model_name) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) # 2) Calculate hash value of model start_time=time.time() hash_value=ModelUtils.hash_model(model) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) model_hash={} model_hash[model_name]=str(hash_value) # 3) Read token data using call query_json = {} value_str = str(hash_value) query_json[model_name]=value_str # print(query_json) start_time=time.time() query_ret=Micro_RPC.tx_query(target_address, query_json) ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) # # -------- parse value from response and display it ------------ verify_result = False # print(query_ret) logger.info("Fetched model hash value:") if(query_ret!={}): tx_json = TypesUtil.string_to_json(query_ret) for _name, _value in tx_json.items(): logger.info("model: {}".format(_name) ) logger.info("value: {}".format(_value) ) verify_result = True # Prepare log messgae str_time_exec=" ".join(ls_time_exec) FileUtil.save_testlog('test_results', 'exec_verify_hashmodel_microchain.log', str_time_exec) # 4) return verify hash model result return verify_result
def is_access_valid(token_data, acess_args=''): ret = False #print(token_data) #query access right associated to role from local database #rule_entry=ABACRuleManager.select_ByName(path_db, 'rule2') #print(rule_entry) #Find rule associated to resource field_data = {} field_data['AttrResource'] = str(acess_args['url_rule']) rules_entry = ABACRuleManager.select_ByFieldname(path_db, field_data) if (len(rules_entry) > 0): #print(rules_entry) token_attribute = TypesUtil.string_to_json(token_data['attribute']) #print(token_attribute['attrUser'][0]) #print(token_attribute) for rule_entry in rules_entry: json_env = TypesUtil.string_to_json( rule_entry['AttrEnvironment']) #print(json_env['value']) if (rule_entry['AttrUser'] in token_attribute['attrUser'] and acess_args['method'] in token_attribute['attrAction'] and str(acess_args['url_rule']) in token_attribute['attrResource'] and ABACPolicy.is_condition_valid(json_env)): #print(token_attribute['']) ret = True break '''print(rule_entry['AttrUser'] in token_attribute['attrUser']) print(acess_args['method'] in token_attribute['attrAction']) print(str(acess_args['url_rule']) in token_attribute['attrResource']) print(ABACPolicy.is_condition_valid(json_env))''' return ret
def is_access_valid(token_data, acess_args=''): ret = True #token_authorization = token_data[2][1] ac_data=TypesUtil.string_to_json(token_data['authorization']) #print(ac_data) if(ac_data['action']!=acess_args['method'] or ac_data['resource']!=str(acess_args['url_rule']) or not CapPolicy.is_condition_valid(ac_data['conditions'])): '''print(ac_data['action']!=acess_args['method']) print(ac_data['resource']==str(acess_args['url_rule'])) print(CapPolicy.is_condition_valid(ac_data['conditions']))''' ret = False return ret
def test_IndexAuth(): addr_list = './addr_list.json' #set sample record record_block={} record_block['id']='1' #record_block['value']='samuelxu' #extract data from index file filepath = './features/0_2_person1/13.2.52.txt' filepath0 = './features/0_2_person1/13.4.53.txt' indexData=IndexPolicy.ExtractData(filepath) record_block['value']=str(indexData) #1) read token data token_data=mytoken.getIndexToken(record_block['id']) print(token_data) node_data=mytoken.getAuthorizedNodes(); print(node_data) json_token = IndexPolicy.get_indexToken(record_block['id']) print(json_token) #2) get hash value for index # transfer string data to bytes block bytes_block = TypesUtil.string_to_bytes(record_block['value']) hash_value = Crypto_Hash.generate_hash(bytes_block) hash_str = str(hash_value) '''print(hash_value) print(hash_str) print(token_data[1])''' #3) set index token #mytoken.setIndexToken(record_block['id'], hash_str); #4) set authrozied nodes node_address = IndexToken.getAddress('TKB1_node1', addr_list) #mytoken.addAuthorizedNodes(node_address) #mytoken.removeAuthorizedNodes(node_address) #5) verify hash #hash_token = token_data[1] #print(Crypto_Hash.verify_hash(hash_value, bytes_block)) print(IndexPolicy.verify_indexToken(record_block['id'],filepath))
def upload_data(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) tx_json = json.loads(req_data)['data'] if (tx_json == {}): abort(401, {'error': 'No parameter data'}) # -------------- call curl API to send data ------------------ post_ret = RPC_Curl.upload_string(tx_json) response = {} # build response given post result if (post_ret['status'] == 200): response['data'] = post_ret['results'] else: response['data'] = '' return jsonify(response), 200
def is_access_valid(token_data, acess_args=''): ret = False #query access right associated to role from local database user_access = RolesManager.select_ByName(path_db, token_data['role']) #print(user_access) #token_authorization = token_data[2][1] json_data = TypesUtil.string_to_jsonlist(user_access[0]['AccessRight']) #print(json_data) for ac_data in json_data: if (ac_data['action'] == acess_args['method'] and ac_data['resource'] == str(acess_args['url_rule']) and RBACPolicy.is_condition_valid(ac_data['conditions'])): '''print(ac_data['action']==acess_args['method']) print(ac_data['resource']==str(acess_args['url_rule'])) print(RBACPolicy.is_condition_valid(ac_data['conditions']))''' ret = True break return ret
def download_data(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) tx_json = json.loads(req_data) if (tx_json == {}): abort(401, {'error': 'No parameter data'}) # -------------- call curl API to query data ------------------ swarm_hash = tx_json['hash'] query_ret = RPC_Curl.download_string(swarm_hash) response = {} # build response given get result if (query_ret['status'] == 200): response['data'] = query_ret['results'] else: response['data'] = '' return jsonify(response), 200
def download_file(): # parse data from request.data req_data = TypesUtil.bytes_to_string(request.data) tx_json = json.loads(req_data) if (tx_json == {}): abort(401, {'error': 'No parameter data'}) # -------------- call curl API to query data ------------------ swarm_hash = tx_json['hash'] file_name = tx_json['file_name'] download_file = tx_json['download_file'] query_ret = RPC_Curl.download_file(swarm_hash, file_name, download_file) response = {} # build response given query result if (query_ret['status'] == 200): # response['data'] = query_ret['results'] return query_ret['content'], 200 else: response['error'] = 'Cannot download file: {}'.format(file_name) return jsonify(response), 402
if(test_fun == 0): #==================== test upload data ========================= if(kv_mode==0): # ----------------- 1) value -------------- # tx_size = 128 # tx_data = TypesUtil.string_to_hex(os.urandom(tx_size)) tx_data = "This is samuel test message!" else: # ----------------- 2) key:value -------------- json_value={} json_value['id']='1' json_value['name']="samuel_xu" json_value['age']=28 tx_data = TypesUtil.json_to_string(json_value) tx_json['data']=tx_data post_ret = test_upload_data(target_address, tx_json) print(post_ret) #==================== test download data ========================= # swarm_hash = '4c9293963f4c1e9b7cd3e9e3a45d41ec8a961278be2701ce071317d4832d3bca' swarm_hash = post_ret['data'] query_ret = test_download_data(target_address,swarm_hash) print(query_ret) elif(test_fun == 1): #==================== test upload file ========================= if(op_status==0): file_name = "test_data.txt" else:
print(json_data['resource']) print(json_data['action']) print(json_data['conditions'])''' #Send transact #mytoken.initAttributeToken(accountAddr); #mytoken.setAttributeToken_isValid(accountAddr, True) #set issue date and expired date nowtime = datetime.datetime.now() #calculate issue_time and expire_time issue_time = DatetimeUtil.datetime_timestamp(nowtime) duration = DatetimeUtil.datetime_duration(0, 1, 0, 0) expire_time = DatetimeUtil.datetime_timestamp(nowtime + duration) #mytoken.setAttributeToken_expireddate(accountAddr, issue_time, expire_time) #set delegation right #mytoken.setAttributeToken_delegateDepth(accountAddr, 4) #mytoken.setAttributeToken_delegatee(accountAddr, '0x9c2da23272c8fec791c54febd0507fb519730cee') #mytoken.setAttributeToken_revokeDelegate(accountAddr, '0x9c2da23272c8fec791c54febd0507fb519730cee') #set attribute attribute = generateAttribute() str_attribute = TypesUtil.json_to_string(attribute) #mytoken.setAttributeToken_Attribute(accountAddr, str_attribute) #read attribute '''read_attr=TypesUtil.string_to_json(token_data[2][1]) print(read_attr['attrEnvironment'][1]['value'])''' pass
def is_valid_access_request(req_args): #Get account address addr_client = req_args.json['host_address'] url_rule = req_args.json['url_rule'] #Define ls_time_exec to save executing time to log ls_time_exec = [] # define branch control flag query_src = 0 # smart contract:0, local cache:1 is_cachetoken = 0 # cache data:1, not cache data:0 #get token data start_time = time.time() if (query_src == 0): # ----------a) query token from smart contract ------------ token_data = CapPolicy.get_token(addr_client) #print(token_data) if (is_cachetoken == 1): # 2) Save token data to local token.dat FileUtil.AddLine('ACtoken.dat', TypesUtil.json_to_string(token_data)) else: # ----------b) read authToken from local cached file ------------ read_token = FileUtil.ReadLines('ACtoken.dat') token_data = TypesUtil.string_to_json(read_token[0]) #print(token_data) exec_time = time.time() - start_time ls_time_exec.append(format(exec_time * 1000, '.3f')) print("Execution time of get_token is:%2.6f" % (exec_time)) #extract access action from request access_data = {} access_data['url_rule'] = url_rule access_data['method'] = req_args.method #print(access_data) start_time = time.time() if (not CapPolicy.is_token_valid(token_data)): print('token valid fail') return False exec_time = time.time() - start_time ls_time_exec.append(format(exec_time * 1000, '.3f')) print("Execution time of is_token_valid is:%2.6f" % (exec_time)) start_time = time.time() if (not CapPolicy.is_access_valid(token_data, access_data)): print('access valid fail') return False exec_time = time.time() - start_time ls_time_exec.append(format(exec_time * 1000, '.3f')) print("Execution time of is_access_valid is:%2.6f" % (exec_time)) #transfer list to string str_time_exec = " ".join(ls_time_exec) #print(str_time_exec) FileUtil.AddLine('capac_exec_time_server.log', str_time_exec) return True
def verify_AuthToken(req_args): # extract client address from req_args #addr_client = req_args['host_address'] addr_client = req_args.json['host_address'] #print(addr_client) # Define ls_time_exec to save executing time to log ls_time_exec = [] # define branch control flag query_src = 0 # smart contract:0, local cache:1 is_cachetoken = 0 # cache data:1, not cache data:0 # mark the start time start_time = time.time() if (query_src == 0): # ----------a) query token from smart contract ------------ # 1) get host Vnode data in contract accounts = myAuthToken.getAccounts() json_VNode_host = AuthPolicy.get_VNodeInfo(accounts[0]) #2) get client Vnode in contract json_VNode_client = AuthPolicy.get_VNodeInfo(addr_client) #print(json_VNode_host) #print(json_VNode_client) if (is_cachetoken == 1): json_authToken = {} json_authToken['host'] = json_VNode_host json_authToken['client'] = json_VNode_client #print(json_authToken) # 2) Save token data to local token.dat FileUtil.AddLine('authToken.dat', TypesUtil.json_to_string(json_authToken)) else: # ----------b) read authToken from local cached file ------------ # 3) read token from local data, low overload read_token = FileUtil.ReadLines('authToken.dat') token_data = TypesUtil.string_to_json(read_token[0]) json_VNode_host = token_data['host'] json_VNode_client = token_data['client'] print("localhost: %s | client: %s" % (json_VNode_host, json_VNode_client)) #3) authicate identity based on token # compare ret_indexAuth = ( json_VNode_host['VZoneID'] == json_VNode_client['VZoneID']) # calculate computational cost exec_time = time.time() - start_time ls_time_exec.append(format(exec_time * 1000, '.3f')) print("Execution time of %s authentication is:%2.6f" % (addr_client, exec_time)) #transfer list to string str_time_exec = " ".join(ls_time_exec) #print(str_time_exec) FileUtil.AddLine('auth_exec_time_server.log', str_time_exec) #return index authentication result return ret_indexAuth