def stop(self): """Kill the process.""" if not self.process or self.is_stopping: return self.is_stopping = True logger.info(f"Stopping '{self.process_name}' process.") self._kill_process() self.is_stopping = False self.process = None
def send_transaction(self, txn: TransactionAPI) -> ReceiptAPI: txn_hash = self.web3.eth.send_raw_transaction( txn.serialize_transaction()) req_confs = (txn.required_confirmations if txn.required_confirmations is not None else self.network.required_confirmations) receipt = self.get_transaction(txn_hash.hex(), required_confirmations=req_confs) logger.info( f"Confirmed {receipt.txn_hash} (gas_used={receipt.gas_used})") self._try_track_receipt(receipt) return receipt
def compile(self, contract_filepaths: List[Path]) -> Dict[str, ContractType]: """ Invoke :meth:`ape.ape.compiler.CompilerAPI.compile` for each of the given files. For example, use the `ape-solidity plugin <https://github.com/ApeWorX/ape-solidity>`__ to compile ``'.sol'`` files. Raises: :class:`~ape.exceptions.CompilerError`: When there is no compiler found for the given extension as well as when there is a contract-type collision across compilers. Args: contract_filepaths (List[pathlib.Path]): The list of files to compile, as ``pathlib.Path`` objects. Returns: Dict[str, ``ContractType``]: A mapping of contract names to their type. """ extensions = self._get_contract_extensions(contract_filepaths) contract_types_dict = {} for extension in extensions: # Filter out in-source cache files from dependencies. paths_to_compile = [ path for path in contract_filepaths if path.suffix == extension and ".cache" not in [p.name for p in path.parents] ] for path in paths_to_compile: contract_path = _get_contract_path( path, self.config_manager.contracts_folder) logger.info(f"Compiling '{contract_path}'.") compiled_contracts = self.registered_compilers[extension].compile( paths_to_compile, base_path=self.config_manager.contracts_folder) for contract_type in compiled_contracts: if contract_type.name in contract_types_dict: raise CompilerError( "ContractType collision across compiler plugins " f"with contract name: {contract_type.name}") contract_types_dict[contract_type.name] = contract_type return contract_types_dict # type: ignore
def await_confirmations(self) -> "ReceiptAPI": """ Wait for a transaction to be considered confirmed. Returns: :class:`~ape.api.ReceiptAPI`: The receipt that is now confirmed. """ # Wait for nonce from provider to increment. sender_nonce = self.provider.get_nonce(self.sender) while sender_nonce == self.nonce: # type: ignore time.sleep(1) sender_nonce = self.provider.get_nonce(self.sender) if self.required_confirmations == 0: # The transaction might not yet be confirmed but # the user is aware of this. Or, this is a development environment. return self confirmations_occurred = self._confirmations_occurred if confirmations_occurred >= self.required_confirmations: return self # If we get here, that means the transaction has been recently submitted. log_message = f"Submitted {self.txn_hash}" if self._explorer: explorer_url = self._explorer.get_transaction_url(self.txn_hash) if explorer_url: log_message = f"{log_message}\n{self._explorer.name} URL: {explorer_url}" logger.info(log_message) with ConfirmationsProgressBar( self.required_confirmations) as progress_bar: while confirmations_occurred < self.required_confirmations: confirmations_occurred = self._confirmations_occurred progress_bar.confs = confirmations_occurred if confirmations_occurred == self.required_confirmations: break time_to_sleep = int(self._block_time / 2) time.sleep(time_to_sleep) return self
def start(self, timeout: int = 20): """Start the process and wait for its RPC to be ready.""" if self.is_connected: logger.info( f"Connecting to existing '{self.process_name}' process.") self.process = None # Not managing the process. else: logger.info(f"Starting '{self.process_name}' process.") pre_exec_fn = _linux_set_death_signal if platform.uname( ).system == "Linux" else None self.process = _popen(*self.build_command(), preexec_fn=pre_exec_fn) with RPCTimeoutError(self, seconds=timeout) as _timeout: while True: if self.is_connected: break time.sleep(0.1) _timeout.check()
def connect(self): logger.info(f"Starting geth with RPC address '{self._hostname}:{self._port}'.") self.start() self.wait_for_rpc(timeout=60)
def connect(self): self._web3 = Web3(HTTPProvider(self.uri)) if not self._web3.isConnected(): if self.network.name != LOCAL_NETWORK_NAME: raise ProviderError( f"When running on network '{self.network.name}', " f"the Geth plugin expects the Geth process to already " f"be running on '{self.uri}'." ) # Start an ephemeral geth process. parsed_uri = urlparse(self.uri) if parsed_uri.hostname not in ("localhost", "127.0.0.1"): raise ConnectionError(f"Unable to connect web3 to {parsed_uri.hostname}.") if not shutil.which("geth"): raise GethNotInstalledError() # Use mnemonic from test config config_manager = self.network.config_manager test_config = config_manager.get_config("test") mnemonic = test_config["mnemonic"] num_of_accounts = test_config["number_of_accounts"] self._geth = EphemeralGeth( self.data_folder, parsed_uri.hostname, parsed_uri.port, mnemonic, number_of_accounts=num_of_accounts, ) self._geth.connect() if not self._web3.isConnected(): self._geth.disconnect() raise ConnectionError("Unable to connect to locally running geth.") else: client_version = self._web3.clientVersion if "geth" in client_version.lower(): logger.info(f"Connecting to existing Geth node at '{self.uri}'.") else: network_name = client_version.split("/")[0] logger.warning(f"Connecting Geth plugin to non-Geth network '{network_name}'.") self._web3.eth.set_gas_price_strategy(rpc_gas_price_strategy) def is_poa() -> bool: node_info: Mapping = self._node_info or {} chain_config = extract_nested_value(node_info, "protocols", "eth", "config") return chain_config is not None and "clique" in chain_config # If network is rinkeby, goerli, or kovan (PoA test-nets) if self._web3.eth.chain_id in (4, 5, 42) or is_poa(): self._web3.middleware_onion.inject(geth_poa_middleware, layer=0) if self.network.name != LOCAL_NETWORK_NAME and self.network.chain_id != self.chain_id: raise ProviderError( "HTTP Connection does not match expected chain ID. " f"Are you connected to '{self.network.name}'?" )
def clone_repo(self, repo_path: str, target_path: Path, branch: Optional[str] = None) -> GitRepository: """ Clone a repository from Github. Args: repo_path (str): The path on Github to the repository, e.g. ``OpenZeppelin/openzeppelin-contracts``. target_path (Path): The local path to store the repo. branch (Optional[str]): The branch to clone. Defaults to the default branch. Returns: pygit2.repository.Repository """ repo = self.get_repo(repo_path) branch = branch or repo.default_branch logger.info(f"Cloning branch '{branch}' from '{repo.name}'.") class GitRemoteCallbacks(pygit2.RemoteCallbacks): percentage_pattern = re.compile( r"[1-9]{1,2}% \([1-9]*/[1-9]*\)") # e.g. '75% (324/432)' total_objects: int = 0 current_objects_cloned: int = 0 _progress_bar = None def sideband_progress(self, string: str): # Parse a line like 'Compressing objects: 0% (1/432)' string = string.lower() expected_prefix = "compressing objects:" if expected_prefix not in string: return progress_str = string.split(expected_prefix)[-1].strip() if not self.percentage_pattern.match(progress_str): return None progress_parts = progress_str.split(" ") fraction_str = progress_parts[1].lstrip("(").rstrip(")") fraction = fraction_str.split("/") if not fraction: return total_objects = fraction[1] if not str(total_objects).isnumeric(): return GitRemoteCallbacks.total_objects = int(total_objects) previous_value = GitRemoteCallbacks.current_objects_cloned new_value = int(fraction[0]) GitRemoteCallbacks.current_objects_cloned = new_value if GitRemoteCallbacks.total_objects and not GitRemoteCallbacks._progress_bar: GitRemoteCallbacks._progress_bar = tqdm( range(GitRemoteCallbacks.total_objects)) difference = new_value - previous_value if difference > 0: GitRemoteCallbacks._progress_bar.update( difference) # type: ignore GitRemoteCallbacks._progress_bar.refresh() # type: ignore url = repo.git_url.replace("git://", "https://") clone = pygit2.clone_repository(url, str(target_path), checkout_branch=branch, callbacks=GitRemoteCallbacks()) return clone