Пример #1
0
def getService():

    start_time = time.time()
    service_data = mySrvExchange.getService()
    exec_time = time.time() - start_time
    FileUtil.save_testlog('test_results', 'exec_getService.log',
                          format(exec_time * 1000, '.3f'))

    json_data = {}

    json_data['dealer'] = {}
    json_data['dealer']['uid'] = service_data[0]
    json_data['dealer']['balance'] = service_data[1]

    json_data['provider'] = {}
    json_data['provider']['vid'] = service_data[2]
    json_data['provider']['serviceinfo'] = service_data[3]
    json_data['provider']['status'] = service_data[4]

    json_data['recipient'] = {}
    json_data['recipient']['vid'] = service_data[5]
    json_data['recipient']['serviceinfo'] = service_data[6]
    json_data['recipient']['status'] = service_data[7]

    return jsonify({'result': 'Succeed', 'data': json_data}), 201
def getBroker():

    start_time = time.time()
    publisher_info = mySrvExchange.getPublisher()
    subscriber_info = mySrvExchange.getSubscriber()
    exec_time = time.time() - start_time
    FileUtil.save_testlog('test_results', 'exec_getBroker.log',
                          format(exec_time * 1000, '.3f'))

    host_account = mySrvExchange.getHostAccounts()[0]
    host_balance = mySrvExchange.getHostBalance(host_account)

    json_data = {}

    json_data['host'] = {}
    json_data['host']['account'] = host_account
    json_data['host']['balance'] = format(host_balance, '.3f')

    json_data['publisher'] = {}
    json_data['publisher']['vid'] = publisher_info[0]
    json_data['publisher']['zid'] = publisher_info[1]
    json_data['publisher']['status'] = publisher_info[2]
    json_data['publisher']['balance'] = publisher_info[3]
    json_data['publisher']['txs'] = publisher_info[4]

    json_data['subscriber'] = {}
    json_data['subscriber']['vid'] = subscriber_info[0]
    json_data['subscriber']['zid'] = subscriber_info[1]
    json_data['subscriber']['status'] = subscriber_info[2]
    json_data['subscriber']['balance'] = subscriber_info[3]
    json_data['subscriber']['txs'] = subscriber_info[4]

    return jsonify({'result': 'Succeed', 'data': json_data}), 201
Пример #3
0
    def tx_evaluate(model_name):
        '''
        Launch tx and evaluate tx committed time

        Args:
            model_name: model file
        Returns:
            tx committed reulst
        '''
        # 1) Load model from file
        model=ModelUtils.load_model(model_name)

        # 2) calculate hash value for model
        hash_value=ModelUtils.hash_model(model)

        # 3) evaluate tx committed time
        start_time=time.time()
        logger.info("tx hashed model: {} to blockchain...\n".format(model_name)) 
        # -------- prepare parameter for tx ------------
        tx_json = {}
        key_str = model_name
        value_str = TypesUtil.string_to_hex(hash_value)
        tx_data = key_str + "=" + value_str 
        # --------- build parameter string: tx=? --------
        tx_json['tx']='"' + tx_data +'"' 
        tx_ret=Tender_RPC.broadcast_tx_commit(tx_json)
        exec_time=time.time()-start_time
        logger.info("tx committed time: {:.3f}\n".format(exec_time, '.3f')) 
        FileUtil.save_testlog('test_results', 'exec_tx_commit_tendermint.log', format(exec_time, '.3f'))

        return tx_ret
Пример #4
0
def test_maskedModel(args):
    use_cuda = args.cuda and torch.cuda.is_available()

    torch.manual_seed(args.seed)

    device = torch.device("cuda" if use_cuda else "cpu")
    kwargs = {"num_workers": 1, "pin_memory": True} if use_cuda else {}

    logger.info("test_loader setup...\n")
    test_loader = torch.utils.data.DataLoader(
        datasets.MNIST(
            root="./data",
            train=False,
            download=True,
            transform=transforms.Compose([
                transforms.ToTensor(),
                transforms.Normalize((0.1307, ), (0.3081, ))
            ]),
        ),
        batch_size=args.test_batch_size,
        shuffle=True,
        **kwargs,
    )

    model = ModelUtils.load_model("mnist_cnn.pt", False)

    for i in range(args.tx_round):
        if (args.mask_model == 0):
            # test mask input
            logger.info("Test run:{}".format(i + 1))
            start_time = time.time()
            masked_model = ModelUtils.mask_model(model, 0.1)
            str_time_exec = format((time.time() - start_time) * 1000, '.3f')
            FileUtil.save_testlog('test_results', 'mask_model.log',
                                  str_time_exec)

            if (args.eval_model):
                logger.info("test masked module...\n")
                ModelUtils.evaluate_model(masked_model, device, test_loader)

                unmask_model = ModelUtils.mask_model(masked_model, -0.1)

                logger.info("test unmasked module...\n")
                ModelUtils.evaluate_model(unmask_model, device, test_loader)
        else:
            # test sum masked value
            logger.info("Test run:{}".format(i + 1))
            model_list = []
            for i in range(1, 21):
                model_list.append(model)

            start_time = time.time()
            fedavg_model = ModelUtils.FedAvg_model(model_list)
            str_time_exec = format((time.time() - start_time) * 1000, '.3f')
            FileUtil.save_testlog('test_results', 'sum_model.log',
                                  str_time_exec)

            if (args.eval_model):
                logger.info("test fedavg_model module...\n")
                ModelUtils.evaluate_model(fedavg_model, device, test_loader)
Пример #5
0
    def verify_hashmodel(model_name, target_address="0.0.0.0:8080"):
        '''
        Verify model hash value by querying blockchain

        Args:
            model_name: model file
        Returns:
            Verified result: True or False
        '''
        # 1) Load model from file
        ls_time_exec = []
        start_time=time.time()
        model=ModelUtils.load_model(model_name)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # 2) Calculate hash value of model
        start_time=time.time()
        hash_value=ModelUtils.hash_model(model)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        model_hash={}
        model_hash[model_name]=str(hash_value)

        # 3) Read token data using call
        query_json = {}
        value_str = str(hash_value)
        query_json[model_name]=value_str
        # print(query_json)
        start_time=time.time()
        query_ret=Micro_RPC.tx_query(target_address, query_json)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # # -------- parse value from response and display it ------------
        verify_result = False
        # print(query_ret)
        logger.info("Fetched model hash value:")
        if(query_ret!={}):
            tx_json = TypesUtil.string_to_json(query_ret)
            for _name, _value in tx_json.items():
                logger.info("model: {}".format(_name) )
                logger.info("value: {}".format(_value) )
            verify_result = True

        
        # Prepare log messgae
        str_time_exec=" ".join(ls_time_exec)
        FileUtil.save_testlog('test_results', 'exec_verify_hashmodel_microchain.log', str_time_exec)

        # 4) return verify hash model result
        return verify_result
Пример #6
0
def test_ServiceAccess(host_ip):
    # set host id address
    filepath = './features/0_2_person1/13.2.52.txt'
    filepath0 = './features/6_10_person/14.52.38.txt'

    addr_list = '../../node_data/addr_list.json'

    project_id = 1
    index_id = 1
    node_name = 'Desk_PI_Plus_Sam1'
    node_address = SrvAPI.getAddress(node_name, addr_list)

    # construct data argument
    data_args = {}
    data_args['project_id'] = project_id
    data_args['host_ip'] = host_ip
    data_args['index_id'] = index_id
    data_args['filepath'] = filepath
    data_args['host_address'] = node_address
    data_args['url_rule'] = '/BlendCAC/api/v1.0/getCapToken'
    # set project id
    project_id = 3

    start_time = time.time()

    #------------------ test data access service API ------------------
    test_search(data_args)

    end_time = time.time()
    exec_time = end_time - start_time

    time_exec = format(exec_time * 1000, '.3f')
    print("Execution time is:%2.6f" % (exec_time))

    FileUtil.AddLine('exec_time_client.log', time_exec)
Пример #7
0
def test_func(param_str):
    filepath = './features/0_2_person1/13.2.52.txt'
    filepath0 = './features/0_2_person1/13.4.53.txt'

    addr_list = './addr_list.json'
    param_args = param_str.split(',')

    index_id = param_args[0]
    node_name = param_args[1]
    node_address = SrvAPI.getAddress(node_name, addr_list)

    # construct data argument
    #data_args = {}
    #data_args ['host_address'] = node_address
    #data_args ['url_rule'] = '/BlendCAC/api/v1.0/getCapToken'

    start_time = time.time()

    print(SrvAPI.getIndexToken(index_id))
    print(SrvAPI.getAuthorizedNodes())
    print(SrvAPI.verify_indexToken(index_id, filepath))

    print(SrvAPI.getCapToken(node_address))
    #print(SrvAPI.isValidAccess(req_args))

    end_time = time.time()

    #calculate exec time
    exec_time = end_time - start_time

    time_exec = format(exec_time * 1000, '.3f')
    print("Execution time is:%2.6f" % (exec_time))

    FileUtil.AddLine('exec_time_client.log', time_exec)
Пример #8
0
    def verify_hashmodel(model_name):
        '''
        Verify model hash value by querying blockchain

        Args:
            model_name: model file
        Returns:
            Verified result: True or False
        '''
        # 1) Load model from file
        ls_time_exec = []
        start_time=time.time()
        model=ModelUtils.load_model(model_name)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # 2) Calculate hash value of model
        start_time=time.time()
        hash_value=ModelUtils.hash_model(model)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        model_hash={}
        model_hash[model_name]=str(hash_value)

        # 3) Read token data using call
        query_json = {}
        query_json['data']='"' + model_name +'"'
        start_time=time.time()
        query_ret=Tender_RPC.abci_query(query_json)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # -------- parse value from response and display it ------------
        key_str=query_ret['result']['response']['key']
        value_str=query_ret['result']['response']['value']
        logger.info("Fetched model hash value:")
        logger.info("model: {}".format(TypesUtil.base64_to_ascii(key_str)) )
        if( value_str!= None):
            query_hash_value = TypesUtil.hex_to_string(TypesUtil.base64_to_ascii(value_str))
        else:
            query_hash_value = ''
        logger.info("value: {}".format(query_hash_value))
        
        # Prepare log messgae
        str_time_exec=" ".join(ls_time_exec)
        FileUtil.save_testlog('test_results', 'exec_verify_hashmodel_tendermint.log', str_time_exec)

        # 4) return verify hash model result
        return model_hash[model_name]==str(query_hash_value)
Пример #9
0
	def ExtractData(filepath):
		ls_lines=FileUtil.ReadLines(filepath)
		ls_record=[]
		for line in ls_lines:
			#print(line[:-1].split(';'))
			ls_record.append(line[:-1].split(';'))

		return ls_record
Пример #10
0
    def tx_evaluate(model_name, target_address="0.0.0.0:8080"):
        '''
        Launch tx and evaluate tx committed time

        Args:
            model_name: model file
        Returns:
            tx committed reulst
        '''
        # 1) Load model from file
        model=ModelUtils.load_model(model_name)

        # 2) calculate hash value for model
        hash_value=ModelUtils.hash_model(model)

        # 3) evaluate tx committed time
        tx_time = 0.0
        start_time=time.time()
        logger.info("tx hashed model: {} to blockchain...\n".format(model_name)) 
        # -------- prepare transaction data ------------
        tx_json = {}
        # value_str = TypesUtil.string_to_hex(hash_value)
        value_str = str(hash_value)
        tx_json[model_name]=value_str
        # print(tx_json)
        tx_ret=Micro_RPC.broadcast_tx_commit(target_address, tx_json)

        while(True):
            query_ret=Micro_RPC.tx_query(target_address, tx_json)

            if( query_ret!={} ):
                break
            time.sleep(0.5)
            tx_time +=0.5
            if(tx_time>=TX_TIMEOUT):
                logger.info("Timeout, tx commit fail.") 
                return False

        exec_time=time.time()-start_time
        logger.info("tx committed time: {:.3f}\n".format(exec_time, '.3f')) 
        FileUtil.save_testlog('test_results', 'exec_tx_commit_microchain.log', format(exec_time, '.3f'))

        # return tx_ret
        return tx_ret
Пример #11
0
    def is_valid_access_request(req_args):
        #Get account address
        accountAddr = RBACToken.getAddress(
            'sam_miner_win7_0', '../CapbilityToken/test/addr_list.json')

        #Define ls_time_exec to save executing time to log
        ls_time_exec = []

        #get token data
        start_time = time.time()

        # 1) get token from smart contract, high overload
        token_data = RBACPolicy.get_token(accountAddr)

        # 2) Save token data to local token.dat
        #FileUtil.AddLine('RBAC_token.dat', TypesUtil.json_to_string(token_data))

        # 3) read token from local data, low overload
        '''read_token=FileUtil.ReadLines('RBAC_token.dat')
		token_data=TypesUtil.string_to_json(read_token[0])'''
        #print(token_data)

        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of get_token is:%2.6f" % (exec_time))

        #extract access action from request
        access_data = {}
        access_data['url_rule'] = req_args.url_rule
        access_data['method'] = req_args.method
        #print(access_data)

        start_time = time.time()
        if (not RBACPolicy.is_token_valid(token_data)):
            print('token valid fail')
            return False
        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of is_token_valid is:%2.6f" % (exec_time))

        start_time = time.time()
        if (not RBACPolicy.is_access_valid(token_data, access_data)):
            print('access valid fail')
            return False
        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of is_access_valid is:%2.6f" % (exec_time))

        #transfer list to string
        str_time_exec = " ".join(ls_time_exec)
        #print(str_time_exec)
        FileUtil.AddLine('exec_time_server.log', str_time_exec)

        return True
Пример #12
0
def getAccount():
    # parse data from request.data
    req_data = TypesUtil.bytes_to_string(request.data)
    json_data = TypesUtil.string_to_json(req_data)

    client_addr = json_data['client_addr']

    start_time = time.time()
    service_data = mySrvExchange.getAccount(client_addr)
    exec_time = time.time() - start_time
    FileUtil.save_testlog('test_results', 'exec_getAccount.log',
                          format(exec_time * 1000, '.3f'))

    json_data = {}

    json_data['account'] = client_addr
    json_data['uid'] = service_data[0]
    json_data['balance'] = service_data[1]
    json_data['status'] = service_data[2]

    return jsonify({'result': 'Succeed', 'data': json_data}), 201
Пример #13
0
	def verify_indexToken(str_index, filepath):
		# Define ls_time_exec to save executing time to log
		ls_time_exec=[]

		# mark the start time
		start_time=time.time()

		#1) read index data in contract
		token_data=mytoken.getIndexToken(str_index);
		#print(token_data)

		# calculate computational cost
		exec_time=time.time()-start_time
		ls_time_exec.append(format(exec_time*1000, '.3f'))	
		print("Execution time of getIndexToken is:%2.6f" %(exec_time))

		
		# mark the start time
		start_time=time.time()

		#2) extract data from index file
		indexData=IndexPolicy.ExtractData(filepath)
		str_value=str(indexData)
		# calculate computational cost
		exec_time=time.time()-start_time
		ls_time_exec.append(format(exec_time*1000, '.3f'))	
		print("Execution time of extract Index is:%2.6f" %(exec_time))


		# mark the start time
		start_time=time.time()

		#3) calculate hash value of str_value
		# transfer string data to bytes block
		bytes_block = TypesUtil.string_to_bytes(str_value);
		hash_value = Crypto_Hash.generate_hash(bytes_block)

		# compare 
		ret_indexAuth = (str(hash_value)==token_data[1])

		# calculate computational cost
		exec_time=time.time()-start_time
		ls_time_exec.append(format(exec_time*1000, '.3f'))	
		print("Execution time of verifyIndex is:%2.6f" %(exec_time))

		#transfer list to string
		str_time_exec=" ".join(ls_time_exec)
		#print(str_time_exec)
		FileUtil.AddLine('exec_time_authIndex.log', str_time_exec)

		#return index authentication result
		return ret_indexAuth
Пример #14
0
    def tx_evaluate(model_name):
        '''
        Launch tx and evaluate tx committed time

        Args:
            model_name: model file
        Returns:
            tx committed reulst
        '''
        # 1) Load model from file
        model=ModelUtils.load_model(model_name)
        # 2) calculate hash value for model
        hash_value=ModelUtils.hash_model(model)

        # 3) evaluate tx committed time
        token_data=mytoken.getIndexToken(model_name)
        original_id = token_data[0] 

        logger.info("tx hashed model: {} to blockchain...\n".format(model_name)) 
        tx_time = 0.0
        start_time=time.time()
        mytoken.setIndexToken(model_name, str(hash_value))
        while(True):
            token_data=mytoken.getIndexToken(model_name)
            new_id = token_data[0]
            if(new_id > original_id ):
                IndexToken.print_tokendata(token_data)
                break
            time.sleep(0.1)
            tx_time +=0.1
            if(tx_time>=TX_TIMEOUT):
                logger.info("Timeout, tx commit fail.") 
                return False

        exec_time=time.time()-start_time
        logger.info("tx committed time: {:.3f}\n".format(exec_time, '.3f')) 
        FileUtil.save_testlog('test_results', 'exec_tx_commit_ethereum.log', format(exec_time, '.3f'))

        return True
Пример #15
0
    def verify_hashmodel(model_name):
        '''
        Verify model hash value by querying blockchain

        Args:
            model_name: model file
        Returns:
            Verified result: True or False
        '''
        # 1) Load model from file
        ls_time_exec = []
        start_time=time.time()
        model=ModelUtils.load_model(model_name)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # 2) Calculate hash value of model
        start_time=time.time()
        hash_value=ModelUtils.hash_model(model)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        model_hash={}
        model_hash[model_name]=str(hash_value)

        # -------- display contract information -------------
        mytoken.Show_ContractInfo()

        # 3) Read token data using call
        start_time=time.time()
        token_data=mytoken.getIndexToken(model_name)
        IndexToken.print_tokendata(token_data)
        ls_time_exec.append( format( time.time()-start_time, '.3f' ) ) 

        # Prepare log messgae
        str_time_exec=" ".join(ls_time_exec)
        FileUtil.save_testlog('test_results', 'exec_verify_hashmodel_ethereum.log', str_time_exec)

        # 4) return verify hash model result
        return model_hash[model_name]==token_data[1]
Пример #16
0
def test_func(host_ip, index_id):
    filepath = './features/0_2_person1/13.2.52.txt'
    filepath0 = './features/0_2_person1/13.4.53.txt'

    start_time = time.time()

    print(WSClient.getIndexToken(host_ip, index_id))
    print(WSClient.getAuthorizedNodes(host_ip))
    print(WSClient.verify_indexToken(host_ip, index_id, filepath))

    end_time = time.time()

    #calculate exec time
    exec_time = end_time - start_time

    time_exec = format(exec_time * 1000, '.3f')
    print("Execution time is:%2.6f" % (exec_time))

    FileUtil.AddLine('exec_time_client.log', time_exec)
Пример #17
0
def test_CapAC():

    #params = {'project_id':'2'}
    data_args = {'project_id': '2'}

    start_time = time.time()

    #print token_data
    #test_add(data_args)
    #test_update(data_args)
    #test_delete(data_args)
    test_search(data_args)

    end_time = time.time()
    exec_time = end_time - start_time

    time_exec = format(exec_time * 1000, '.3f')
    print("Execution time is:%2.6f" % (exec_time))

    FileUtil.AddLine('exec_time_client.log', time_exec)
    '''print WSClient.Get_Datasets('http://128.226.78.217/test/api/v1.0/dt', data_args)
Пример #18
0
def test_func(host_ip, node_name):
    addr_list = '../../node_data/addr_list.json'
    node_address = WSClient.getAddress(node_name, addr_list)

    # construct data argument
    data_args = {}
    data_args['host_ip'] = host_ip
    data_args['host_address'] = node_address
    data_args['url_rule'] = '/BlendCAC/api/v1.0/getCapToken'

    start_time = time.time()

    print(WSClient.getCapToken(data_args))
    print(WSClient.isValidAccess(data_args))

    end_time = time.time()

    #calculate exec time
    exec_time = end_time - start_time

    time_exec = format(exec_time * 1000, '.3f')
    print("Execution time is:%2.6f" % (exec_time))

    FileUtil.AddLine('exec_time_client.log', time_exec)
Пример #19
0
    if (kv_mode == 0):
        # ----------------- 1) value --------------
        # tx_data = "samuel"
        tx_data = tx_value
    else:
        # ----------------- 2) key:value --------------
        json_value = {}
        json_value['name'] = "samuel_xu999"
        json_value['age'] = 36
        key_str = 'id'
        value_str = TypesUtil.json_to_tx(json_value)
        # print(value_str)

        # In tx_data, " must replace with ', for json: 'id={\'address\':\'hamilton\'}'
        tx_data = key_str + "=" + value_str

    # --------- build parameter string: tx=? --------
    tx_json['tx'] = '"' + tx_data + '"'
    # print(tx_json)

    start_time = time.time()
    # ---------------- deliver tx --------------
    test_tx_commit(tx_json)
    exec_time = time.time() - start_time
    print(format(exec_time * 1000, '.3f'))
    FileUtil.save_testlog('test_results', 'exec_time.log',
                          format(exec_time * 1000, '.3f'))

    pass
    def is_valid_access_request(req_args):
        #Get account address
        addr_client = req_args.json['host_address']
        url_rule = req_args.json['url_rule']

        #Define ls_time_exec to save executing time to log
        ls_time_exec = []

        # define branch control flag
        query_src = 0  # smart contract:0, local cache:1
        is_cachetoken = 0  # cache data:1, not cache data:0

        #get token data
        start_time = time.time()

        if (query_src == 0):
            # ----------a) query token from smart contract ------------
            token_data = CapPolicy.get_token(addr_client)
            #print(token_data)

            if (is_cachetoken == 1):
                # 2) Save token data to local token.dat
                FileUtil.AddLine('ACtoken.dat',
                                 TypesUtil.json_to_string(token_data))
        else:
            # ----------b) read authToken from local cached file ------------
            read_token = FileUtil.ReadLines('ACtoken.dat')
            token_data = TypesUtil.string_to_json(read_token[0])
            #print(token_data)

        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of get_token is:%2.6f" % (exec_time))

        #extract access action from request
        access_data = {}
        access_data['url_rule'] = url_rule
        access_data['method'] = req_args.method
        #print(access_data)

        start_time = time.time()
        if (not CapPolicy.is_token_valid(token_data)):
            print('token valid fail')
            return False
        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of is_token_valid is:%2.6f" % (exec_time))

        start_time = time.time()
        if (not CapPolicy.is_access_valid(token_data, access_data)):
            print('access valid fail')
            return False
        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of is_access_valid is:%2.6f" % (exec_time))

        #transfer list to string
        str_time_exec = " ".join(ls_time_exec)
        #print(str_time_exec)
        FileUtil.AddLine('capac_exec_time_server.log', str_time_exec)

        return True
Пример #21
0
    def verify_AuthToken(req_args):
        # extract client address from req_args
        #addr_client = req_args['host_address']
        addr_client = req_args.json['host_address']
        #print(addr_client)

        # Define ls_time_exec to save executing time to log
        ls_time_exec = []

        # define branch control flag
        query_src = 0  # smart contract:0, local cache:1
        is_cachetoken = 0  # cache data:1, not cache data:0

        # mark the start time
        start_time = time.time()

        if (query_src == 0):
            # ----------a) query token from smart contract ------------
            # 1) get host Vnode data in contract
            accounts = myAuthToken.getAccounts()
            json_VNode_host = AuthPolicy.get_VNodeInfo(accounts[0])

            #2) get client Vnode in contract
            json_VNode_client = AuthPolicy.get_VNodeInfo(addr_client)
            #print(json_VNode_host)
            #print(json_VNode_client)

            if (is_cachetoken == 1):
                json_authToken = {}
                json_authToken['host'] = json_VNode_host
                json_authToken['client'] = json_VNode_client
                #print(json_authToken)

                # 2) Save token data to local token.dat
                FileUtil.AddLine('authToken.dat',
                                 TypesUtil.json_to_string(json_authToken))
        else:
            # ----------b) read authToken from local cached file ------------
            # 3) read token from local data, low overload
            read_token = FileUtil.ReadLines('authToken.dat')
            token_data = TypesUtil.string_to_json(read_token[0])
            json_VNode_host = token_data['host']
            json_VNode_client = token_data['client']

        print("localhost: %s | client: %s" %
              (json_VNode_host, json_VNode_client))

        #3) authicate identity based on token
        # compare
        ret_indexAuth = (
            json_VNode_host['VZoneID'] == json_VNode_client['VZoneID'])

        # calculate computational cost
        exec_time = time.time() - start_time
        ls_time_exec.append(format(exec_time * 1000, '.3f'))
        print("Execution time of %s authentication is:%2.6f" %
              (addr_client, exec_time))

        #transfer list to string
        str_time_exec = " ".join(ls_time_exec)
        #print(str_time_exec)
        FileUtil.AddLine('auth_exec_time_server.log', str_time_exec)

        #return index authentication result
        return ret_indexAuth