def method(casperlabs_client: CasperLabsClient, args: Dict): deploy_hash = casperlabs_client.deploy( from_addr=args.get("from"), private_key=args.get("private_key"), algorithm=args.get("algorithm"), payment=args.get("payment"), payment_args=args.get("payment_args"), payment_amount=args.get("payment_amount"), payment_hash=args.get("payment_hash"), payment_name=args.get("payment_name"), payment_package_hash=args.get("payment_package_hash"), payment_package_name=args.get("payment_package_name"), payment_entry_point=args.get("payment_entry_point"), payment_version=args.get("payment_version"), session=args.get("session"), session_args=args.get("session_args"), session_hash=args.get("session_hash"), session_name=args.get("session_name"), session_package_hash=args.get("session_package_hash"), session_package_name=args.get("session_package_name"), session_entry_point=args.get("session_entry_point"), session_version=args.get("session_version"), ttl_millis=args.get("ttl_millis"), dependencies=args.get("dependencies"), chain_name=args.get("chain_name"), ) print(f"Success! Deploy {deploy_hash} deployed") if args.get("wait_for_processed", False): deploy_info = casperlabs_client.show_deploy( deploy_hash, full_view=False, wait_for_processed=True, timeout_seconds=args.get("timeout_seconds", consts.STATUS_TIMEOUT), ) print(reformat.hexify(deploy_info))
def account_keys_directory(): with tempfile.TemporaryDirectory() as directory: client = CasperLabsClient() for key_algorithm in SUPPORTED_KEY_ALGORITHMS: client.keygen(directory, algorithm=key_algorithm, filename_prefix=key_algorithm) yield Path(directory)
def test_grpc_encryption_python_lib(encrypted_two_node_network): node = encrypted_two_node_network.docker_nodes[0] host = os.environ.get("TAG_NAME", None) and node.container_name or "localhost" client = CasperLabsClient( host, node.grpc_external_docker_port, node.grpc_internal_docker_port, extract_common_name(node.config.tls_certificate_local_path()), node.config.tls_certificate_local_path(), ) blocks = list(client.showBlocks(1)) assert len(blocks) > 0 logging.debug(f"{blocks}")
def account_keys_directory(): with tempfile.TemporaryDirectory() as directory: for key_algorithm in SUPPORTED_KEY_ALGORITHMS: client = CasperLabsClient() client.keygen(directory, algorithm=key_algorithm) for file_name in ( PUBLIC_KEY_FILENAME, PUBLIC_KEY_HEX_FILENAME, PRIVATE_KEY_FILENAME, ): shutil.move( Path(directory) / file_name, Path(directory) / f"{key_algorithm}{file_name}", ) yield Path(directory)
def test_basic_transfer_to_node_comm_failure(account_keys_directory, algorithm): """ Tests full building up to sending and assures we don't get failure that isn't sending based """ client = CasperLabsClient() with pytest.raises(InternalError) as excinfo: _ = client.transfer( from_addr= "1212121212121212121212121212121212121212121212121212121212121212", target_account= "0000000000000000000000000000000000000000000000000000000000000000", amount=10000, private_key=account_keys_directory / f"{algorithm}{ACCOUNT_PRIVATE_KEY_FILENAME_SUFFIX}", ) assert "failed to connect" in str(excinfo.value)
def __init__(self, node: "DockerNode"): # NOQA super(PythonClient, self).__init__() self.node = node self.abi = ABI # If $TAG_NAME is set it means we are running in docker, see docker_run_test.sh host = (os.environ.get("TAG_NAME", None) and self.node.container_name or "localhost") self.client = CasperLabsClient( host=host, internal_port=self.node.grpc_internal_docker_port, port=self.node.grpc_external_docker_port, ) logging.info(f"PythonClient(host={self.client.host}, " f"port={self.node.grpc_external_docker_port}, " f"internal_port={self.node.grpc_internal_docker_port})")
def method(casperlabs_client: CasperLabsClient, args: Dict): kwargs = dict( account_public_key_hashes=_add_to_list(args.get("account_hash")), deploy_hashes=_add_to_list(args.get("deploy_hash")), ) for key in ( "all", "block_added", "block_finalized", "deploy_added", "deploy_discarded", "deploy_requeued", "deploy_processed", "deploy_finalized", "deploy_orphaned", "min_event_id", "max_event_id", ): kwargs[key] = args.get(key) stream = casperlabs_client.stream_events(**kwargs) output_format = args.get("format") for event in stream: if output_format == "binary": print(base64.b64encode(event.SerializeToString()).decode()) elif output_format == "json": print(reformat.jsonify(event)) else: print(reformat.hexify(event))
def method(casperlabs_client: CasperLabsClient, args: Dict): deploy = casperlabs_client.make_deploy( from_addr=args.get("from"), payment=args.get("payment"), session=args.get("session"), public_key=args.get("public_key"), session_args=args.get("session_args"), payment_args=args.get("payment_args"), payment_amount=args.get("payment_amount"), payment_hash=args.get("payment_hash"), payment_name=args.get("payment_name"), payment_package_hash=args.get("payment_package_hash"), payment_package_name=args.get("payment_package_name"), session_hash=args.get("session_hash"), session_name=args.get("session_name"), session_package_hash=args.get("session_package_hash"), session_package_name=args.get("session_package_name"), ttl_millis=args.get("ttl_millis"), dependencies=args.get("dependencies"), chain_name=args.get("chain_name"), algorithm=args.get("algorithm"), ) data = deploy.SerializeToString() if not args.get("deploy_path"): sys.stdout.buffer.write(data) else: io.write_binary_file(args.get("deploy_path"), data)
def test_not_only_one_target_arguments(target_account, target_purse): with pytest.raises(InternalError): CasperLabsClient().transfer( amount=100, target_purse=target_purse, target_account=target_account, payment_amount=100000, )
def method(casperlabs_client: CasperLabsClient, _: Dict): peers = casperlabs_client.show_peers() i = 0 for i, node in enumerate(peers, 1): print(f"------------- node {i} ---------------") print(reformat.hexify(node)) print("-----------------------------------------------------") print(f"count: {i}")
def method(casperlabs_client: CasperLabsClient, args: Dict): response = casperlabs_client.show_deploy( args.get("hash"), full_view=False, wait_for_processed=args.get("wait_for_processed", False), timeout_seconds=args.get("timeout_seconds", consts.STATUS_TIMEOUT), ) print(reformat.hexify(response))
def method(casperlabs_client: CasperLabsClient, args: Dict): for o in casperlabs_client.visualize_dag( args.get("depth"), args.get("out"), args.get("show_justification_lines"), args.get("stream"), ): if not args.get("out"): print(o) break
def test_sign_deploy(account_keys_directory, algorithm): private_key_pem_path, public_key_pem_path = key_paths( algorithm, account_keys_directory) key_holder = key_holders.key_holder_object( algorithm, private_key_pem_path=private_key_pem_path) client = CasperLabsClient() deploy = client.make_deploy( public_key=public_key_pem_path, session_name="contract_name", algorithm=algorithm, payment_amount=100000, ) signed_by_pem = client.sign_deploy( private_key_pem_file=private_key_pem_path, algorithm=algorithm, deploy=deploy) signed_by_key_holder = client.sign_deploy(key_holder=key_holder, algorithm=algorithm, deploy=deploy) assert signed_by_pem == signed_by_key_holder
def method(casperlabs_client: CasperLabsClient, args: dict): deploy_hash = casperlabs_client.transfer( amount=args.get("amount"), target_account=args.get("target_account"), target_purse=args.get("target_purse"), source_purse=args.get("source_purse"), from_addr=args.get("from_addr"), private_key=args.get("private_key"), ttl_millis=args.get("ttl_millis"), dependencies=args.get("dependencies"), chain_name=args.get("chain_name"), ) print(f"Success! Deploy {deploy_hash} deployed") if args.get("wait_for_processed", False): deploy_info = casperlabs_client.showDeploy( deploy_hash, full_view=False, wait_for_processed=True, timeout_seconds=args.get("timeout_seconds", consts.STATUS_TIMEOUT), ) print(reformat.hexify(deploy_info))
def run(self, argv): args = self.parser.parse_args(argv) return args.function( CasperLabsClient( args.host, args.port, args.port_internal, args.node_id, args.certificate_file, ), args, )
def __init__(self, node: "DockerNode"): # NOQA super(PythonClient, self).__init__() self.node = node self.abi = ABI host = node.node_host certificate_file = None node_id = None if self.node.config.grpc_encryption: certificate_file = self.node.config.tls_certificate_local_path() node_id = extract_common_name(certificate_file) self.client = CasperLabsClient( host=host, port_internal=self.node.grpc_internal_docker_port, port=self.node.grpc_external_docker_port, node_id=node_id, certificate_file=certificate_file, ) logging.info(f"PythonClient(host={self.client.host}, " f"port={self.node.grpc_external_docker_port}, " f"port_internal={self.node.grpc_internal_docker_port})")
def run(self, argv): # Using dict rather than namespace to allow dual interface with library args = vars(self.parser.parse_args(argv)) return args["function"]( CasperLabsClient( args.get("host"), args.get("port"), args.get("port_internal"), args.get("node_id"), args.get("certificate_file"), ), args, )
def method(casperlabs_client: CasperLabsClient, args: Dict): algorithm = args.get("algorithm") public_key_path = args.get("public_key") file_path = args.get("file_path") account_hash = casperlabs_client.account_hash( algorithm=algorithm, public_key_pem_path=public_key_path) account_hash_hex = account_hash.hex().encode("UTF-8") if file_path: io.write_binary_file(file_path, account_hash_hex) else: sys.stdout.buffer.write(account_hash_hex)
def __init__(self, node: "DockerNode"): # NOQA super(PythonClient, self).__init__() self.node = node self.abi = ABI # If $TAG_NAME is set it means we are running in docker, see docker_run_test.sh host = (os.environ.get("TAG_NAME", None) and self.node.container_name or "localhost") certificate_file = None node_id = None if self.node.config.grpc_encryption: certificate_file = self.node.config.tls_certificate_local_path() node_id = extract_common_name(certificate_file) self.client = CasperLabsClient( host=host, internal_port=self.node.grpc_internal_docker_port, port=self.node.grpc_external_docker_port, node_id=node_id, certificate_file=certificate_file, ) logging.info(f"PythonClient(host={self.client.host}, " f"port={self.node.grpc_external_docker_port}, " f"internal_port={self.node.grpc_internal_docker_port})")
def get_client_contract_hash(client: casperlabs_client.CasperLabsClient, account: Account, bhash: str, contract_name: str) -> str: """Returns chain hash of a deployed client side conttract. :param client: A python client instance. :param account: Account to be associated with contract. :param bhash: Hash of block which processed deploy. :param contract_name: Name of smart contract (within wasm blob). :returns: Deploy client side contract hash. """ q = client.queryState(bhash, account.public_key, "", keyType="address") for nk in q.account.named_keys: if nk.name == contract_name: return nk.key.hash.hash.hex() raise ValueError( f"{contract_name} contract hash could not be found on-chain")
def method(casperlabs_client: CasperLabsClient, args: Dict): private_key = args.get("private_key") algorithm = args.get("algorithm") deploy_path = args.get("deploy_path") signed_deploy_path = args.get("signed_deploy_path") deploy = None if not deploy_path: deploy = consensus.Deploy() deploy.ParseFromString(sys.stdin.read()) signed_deploy = casperlabs_client.sign_deploy( private_key_pem_file=private_key, algorithm=algorithm, deploy=deploy, deploy_file=deploy_path, ) serialized_deploy = signed_deploy.SerializeToString() if signed_deploy_path is None: sys.stdout.write(serialized_deploy) else: io.write_binary_file(signed_deploy_path, serialized_deploy)
def get_valid_block_hash(casperlabs_client: CasperLabsClient): """ Get a valid block hash from current hack/docker network in bytes """ block_generator = casperlabs_client.show_blocks(depth=8) block = list(block_generator)[-1] block_hash = block.summary.block_hash return block_hash
def method(casperlabs_client: CasperLabsClient, args: Dict): response = casperlabs_client.show_deploys(args.get("hash"), full_view=False) io.print_blocks(response, element_name="deploy")
def method(casperlabs_client: CasperLabsClient, args: Dict): directory = Path(args.get("directory")).resolve() casperlabs_client.validator_keygen(directory) print( f"Keys successfully created in directory: {str(directory.absolute())}")
class PythonClient(CasperLabsClientBase, LoggingMixin): def __init__(self, node: "DockerNode"): # NOQA super(PythonClient, self).__init__() self.node = node self.abi = ABI host = node.node_host certificate_file = None node_id = None if self.node.config.grpc_encryption: certificate_file = self.node.config.tls_certificate_local_path() node_id = extract_common_name(certificate_file) self.client = CasperLabsClient( host=host, port_internal=self.node.grpc_internal_docker_port, port=self.node.grpc_external_docker_port, node_id=node_id, certificate_file=certificate_file, ) logging.info(f"PythonClient(host={self.client.host}, " f"port={self.node.grpc_external_docker_port}, " f"port_internal={self.node.grpc_internal_docker_port})") def __getattr__(self, name): """ Compatibility with the times when this class was derived from CasperLabsClient, some tests may need to call methods of self.client directly """ return getattr(self.client, name) @property def client_type(self) -> str: return "python" def deploy( self, from_address: str = None, gas_price: int = 1, session_contract: Optional[Union[str, Path]] = None, payment_contract: Optional[Union[str, Path]] = Contract.STANDARD_PAYMENT, private_key: Optional[str] = None, public_key: Optional[str] = None, session_args: list = None, payment_args: bytes = DEFAULT_PAYMENT_ABI, alt_session_path: Optional[Path] = None, alt_payment_path: Optional[Path] = None, ) -> str: if session_contract is None: raise Exception("session_contract is required.") public_key = public_key or self.node.test_account.public_key_path private_key = private_key or self.node.test_account.private_key_path address = from_address or self.node.from_address # dApp test framework will specify alternate location for non-framework contracts session_path = (alt_session_path if alt_session_path else self.node.resources_folder) session_contract_path = session_path / session_contract payment_path = (alt_payment_path if alt_payment_path else self.node.resources_folder) payment_contract_path = payment_path / payment_contract logging.info(f"PY_CLIENT.deploy(from_address={address}, " f"gas_price={gas_price}, " f"payment_contract={payment_contract_path}, " f"session_contract={session_contract_path}, " f"private_key={private_key}, " f"public_key={public_key} ") return self.client.deploy( bytes.fromhex(address), gas_price, payment_contract_path, session_contract_path, public_key, private_key, session_args, payment_args, ) def propose(self): logging.info(f"PY_CLIENT.propose() for {self.client.host}") return self.client.propose() def query_state(self, block_hash: str, key: str, path: str, key_type: str): return self.client.queryState(block_hash, key, path, key_type) def show_block(self, block_hash: str): return self.client.showBlock(block_hash) def show_blocks(self, depth: int): return self.client.showBlocks(depth) def get_blocks_count(self, depth: int) -> int: return len(list(self.show_blocks(depth))) def show_deploys(self, block_hash: str): return self.client.showDeploys(block_hash) def show_deploy(self, deploy_hash: str): return self.client.showDeploy(deploy_hash) def propose_with_retry(self, max_attempts: int, retry_seconds: int) -> str: attempt = 0 while True: try: return self.propose() except InternalError as ex: if attempt < max_attempts: self.logger.debug("Could not propose; retrying later.") attempt += 1 time.sleep(retry_seconds) else: self.logger.debug("Could not propose; no more retries!") raise ex def deploy_and_propose(self, **deploy_kwargs) -> str: if "from_address" not in deploy_kwargs: deploy_kwargs["from_address"] = self.node.from_address self.deploy(**deploy_kwargs) propose_output = self.propose() block_hash = propose_output.block_hash.hex() logging.info( f"The block hash: {block_hash} generated for {self.node.container_name}" ) return block_hash def deploy_and_propose_with_retry(self, max_attempts: int, retry_seconds: int, **deploy_kwargs) -> str: self.deploy(**deploy_kwargs) block_hash = self.propose_with_retry(max_attempts, retry_seconds) logging.info( f"The block hash: {block_hash} generated for {self.node.container_name}" ) if block_hash is None: raise Exception("No block_hash received from propose_with_retry") return block_hash
def method(casperlabs_client: CasperLabsClient, args: Dict): response = casperlabs_client.balance(args.get("address"), args.get("block_hash")) print(response)
def method(casperlabs_client: CasperLabsClient, args: Dict): deploy_hash = casperlabs_client.send_deploy(deploy_file=args.get("deploy_path")) print(f"Success! Deploy {deploy_hash} deployed")
class PythonClient(CasperLabsClient, LoggingMixin): def __init__(self, node: "DockerNode"): # NOQA super(PythonClient, self).__init__() self.node = node self.abi = ABI # If $TAG_NAME is set it means we are running in docker, see docker_run_test.sh host = (os.environ.get("TAG_NAME", None) and self.node.container_name or "localhost") certificate_file = None node_id = None if self.node.config.grpc_encryption: certificate_file = self.node.config.tls_certificate_local_path() node_id = extract_common_name(certificate_file) self.client = CasperLabsClient( host=host, internal_port=self.node.grpc_internal_docker_port, port=self.node.grpc_external_docker_port, node_id=node_id, certificate_file=certificate_file, ) logging.info(f"PythonClient(host={self.client.host}, " f"port={self.node.grpc_external_docker_port}, " f"internal_port={self.node.grpc_internal_docker_port})") @property def client_type(self) -> str: return "python" def deploy( self, from_address: str = None, gas_price: int = 1, session_contract: Optional[Union[str, Path]] = None, payment_contract: Optional[Union[str, Path]] = Contract.STANDARD_PAYMENT, private_key: Optional[str] = None, public_key: Optional[str] = None, session_args: list = None, payment_args: list = MAX_PAYMENT_ABI, ) -> str: assert session_contract is not None public_key = public_key or self.node.test_account.public_key_path private_key = private_key or self.node.test_account.private_key_path address = from_address or self.node.from_address resources_path = self.node.resources_folder session_contract_path = str(resources_path / session_contract) payment_contract_path = str(resources_path / payment_contract) logging.info( f"PY_CLIENT.deploy(from_address={address}, gas_price={gas_price}, " f"payment_contract={payment_contract_path}, session_contract={session_contract_path}, " f"private_key={private_key}, " f"public_key={public_key} ") return self.client.deploy( bytes.fromhex(address), gas_price, payment_contract_path, session_contract_path, public_key, private_key, session_args, payment_args, ) def propose(self): logging.info(f"PY_CLIENT.propose() for {self.client.host}") return self.client.propose() def query_state(self, block_hash: str, key: str, path: str, key_type: str): return self.client.queryState(block_hash, key, path, key_type) def show_block(self, block_hash: str): return self.client.showBlock(block_hash) def show_blocks(self, depth: int): return self.client.showBlocks(depth) def get_blocks_count(self, depth: int) -> int: return len(list(self.show_blocks(depth))) def show_deploys(self, block_hash: str): return self.client.showDeploys(block_hash) def show_deploy(self, deploy_hash: str): return self.client.showDeploy(deploy_hash) def propose_with_retry(self, max_attempts: int, retry_seconds: int) -> str: attempt = 0 while True: try: return self.propose() except InternalError as ex: if attempt < max_attempts: self.logger.debug("Could not propose; retrying later.") attempt += 1 time.sleep(retry_seconds) else: self.logger.debug("Could not propose; no more retries!") raise ex def deploy_and_propose(self, **deploy_kwargs) -> str: if "from_address" not in deploy_kwargs: deploy_kwargs["from_address"] = self.node.from_address self.deploy(**deploy_kwargs) propose_output = self.propose() block_hash = propose_output.block_hash.hex() logging.info( f"The block hash: {block_hash} generated for {self.node.container_name}" ) return block_hash def deploy_and_propose_with_retry(self, max_attempts: int, retry_seconds: int, **deploy_kwargs) -> str: self.deploy(**deploy_kwargs) block_hash = self.propose_with_retry(max_attempts, retry_seconds) logging.info( f"The block hash: {block_hash} generated for {self.node.container_name}" ) if block_hash is None: raise Exception("No block_hash received from propose_with_retry") return block_hash
def method(casperlabs_client: CasperLabsClient, args: Dict): response = casperlabs_client.show_block(args.get("hash"), full_view=True) return io.print_block(response)
from casperlabs_client import (CasperLabsClient, CasperMessage_pb2_grpc, CasperMessage_pb2, casper_pb2_grpc, empty_pb2) import mock_server import grpc RESOURCES_PATH = "../../../resources/" def resource(file_name): return os.path.join(os.path.dirname(os.path.realpath(__file__)), RESOURCES_PATH, file_name) CONTRACT = resource("old_wasm/test_helloname.wasm") PAYMENT = resource("old_wasm/test_helloname.wasm") SESSION = resource("old_wasm/test_helloname.wasm") HASH = 'd9d087fe5d22dbfa1bacb57d6da8d509f7191a216cee6a971de32463ff0f284f' client = CasperLabsClient(port = mock_server.CL_GRPC_PORT_EXTERNAL) @pytest.fixture() def casper_service(request): server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) casper_pb2_grpc.add_CasperServiceServicer_to_server(mock_server.CasperServiceServicer(), server) port = '[::]:' + str(mock_server.CL_GRPC_PORT_EXTERNAL) server.add_insecure_port(port) server.start() request.addfinalizer(lambda: server.stop(0)) @pytest.fixture() def mock_server_setup(request): server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))