def main(): if os.name == "nt": log.info("Timelord launcher not supported on Windows.") return root_path = DEFAULT_ROOT_PATH setproctitle("chia_timelord_launcher") net_config = load_config(root_path, "config.yaml") config = net_config["timelord_launcher"] initialize_logging("TLauncher", config["logging"], root_path) def signal_received(): asyncio.create_task(kill_processes()) loop = asyncio.get_event_loop() try: loop.add_signal_handler(signal.SIGINT, signal_received) loop.add_signal_handler(signal.SIGTERM, signal_received) except NotImplementedError: log.info("signal handlers unsupported") try: loop.run_until_complete(spawn_all_processes(config, net_config)) finally: log.info("Launcher fully closed.") loop.close()
async def async_run_daemon(root_path: Path) -> int: chia_init(root_path) config = load_config(root_path, "config.yaml") setproctitle("chia_daemon") initialize_logging("daemon", config["logging"], root_path) lockfile = singleton(daemon_launch_lock_path(root_path)) crt_path = root_path / config["daemon_ssl"]["private_crt"] key_path = root_path / config["daemon_ssl"]["private_key"] ca_crt_path = root_path / config["private_ssl_ca"]["crt"] ca_key_path = root_path / config["private_ssl_ca"]["key"] sys.stdout.flush() json_msg = dict_to_json_str({ "message": "cert_path", "success": True, "cert": f"{crt_path}", "key": f"{key_path}", "ca_crt": f"{ca_crt_path}", }) sys.stdout.write("\n" + json_msg + "\n") sys.stdout.flush() if lockfile is None: print("daemon: already launching") return 2 # TODO: clean this up, ensuring lockfile isn't removed until the listen port is open create_server_for_daemon(root_path) ws_server = WebSocketServer(root_path, ca_crt_path, ca_key_path, crt_path, key_path) await ws_server.start() assert ws_server.websocket_server is not None await ws_server.websocket_server.wait_closed() log.info("Daemon WebSocketServer closed") return 0
def plots_cmd(ctx: click.Context): """Create, add, remove and check your plots""" from chia.util.chia_logging import initialize_logging root_path: Path = ctx.obj["root_path"] if not root_path.is_dir(): raise RuntimeError("Please initialize (or migrate) your config directory with 'chia init'") initialize_logging("", {"log_stdout": True}, root_path)
def main(): root_path = DEFAULT_ROOT_PATH config = load_config(root_path, "config.yaml", SERVICE_NAME) initialize_logging(SERVICE_NAME, config["logging"], root_path) global D global ns global TTL global soa_record global ns_records D = DomainName(config["domain_name"]) ns = DomainName(config["nameserver"]) TTL = config["ttl"] soa_record = SOA( mname=ns, # primary name server rname=config["soa"]["rname"], # email of the domain administrator times=( config["soa"]["serial_number"], config["soa"]["refresh"], config["soa"]["retry"], config["soa"]["expire"], config["soa"]["minimum"], ), ) ns_records = [NS(ns)] def signal_received(): asyncio.create_task(kill_processes()) loop = asyncio.get_event_loop() try: loop.add_signal_handler(signal.SIGINT, signal_received) loop.add_signal_handler(signal.SIGTERM, signal_received) except NotImplementedError: log.info("signal handlers unsupported") try: loop.run_until_complete(serve_dns()) finally: loop.close()
async def async_run_daemon(root_path: Path, wait_for_unlock: bool = False) -> int: # When wait_for_unlock is true, we want to skip the check_keys() call in chia_init # since it might be necessary to wait for the GUI to unlock the keyring first. chia_init(root_path, should_check_keys=(not wait_for_unlock)) config = load_config(root_path, "config.yaml") setproctitle("chia_daemon") initialize_logging("daemon", config["logging"], root_path) lockfile = singleton(daemon_launch_lock_path(root_path)) crt_path = root_path / config["daemon_ssl"]["private_crt"] key_path = root_path / config["daemon_ssl"]["private_key"] ca_crt_path = root_path / config["private_ssl_ca"]["crt"] ca_key_path = root_path / config["private_ssl_ca"]["key"] sys.stdout.flush() json_msg = dict_to_json_str( { "message": "cert_path", "success": True, "cert": f"{crt_path}", "key": f"{key_path}", "ca_crt": f"{ca_crt_path}", } ) sys.stdout.write("\n" + json_msg + "\n") sys.stdout.flush() if lockfile is None: print("daemon: already launching") return 2 # TODO: clean this up, ensuring lockfile isn't removed until the listen port is open create_server_for_daemon(root_path) ws_server = WebSocketServer( root_path, ca_crt_path, ca_key_path, crt_path, key_path, run_check_keys_on_unlock=wait_for_unlock ) await ws_server.start() assert ws_server.websocket_server is not None await ws_server.websocket_server.wait_closed() log.info("Daemon WebSocketServer closed") # sys.stdout.close() return 0
def __init__( self, root_path, node: Any, peer_api: Any, node_type: NodeType, advertised_port: int, service_name: str, network_id: str, upnp_ports: List[int] = [], server_listen_ports: List[int] = [], connect_peers: List[PeerInfo] = [], auth_connect_peers: bool = True, on_connect_callback: Optional[Callable] = None, rpc_info: Optional[Tuple[type, int]] = None, parse_cli_args=True, connect_to_daemon=True, ) -> None: self.root_path = root_path self.config = load_config(root_path, "config.yaml") ping_interval = self.config.get("ping_interval") self.self_hostname = self.config.get("self_hostname") self.daemon_port = self.config.get("daemon_port") assert ping_interval is not None self._connect_to_daemon = connect_to_daemon self._node_type = node_type self._service_name = service_name self._rpc_task: Optional[asyncio.Task] = None self._rpc_close_task: Optional[asyncio.Task] = None self._network_id: str = network_id proctitle_name = f"chia_{service_name}" setproctitle(proctitle_name) self._log = logging.getLogger(service_name) if parse_cli_args: service_config = load_config_cli(root_path, "config.yaml", service_name) else: service_config = load_config(root_path, "config.yaml", service_name) initialize_logging(service_name, service_config["logging"], root_path) self._rpc_info = rpc_info private_ca_crt, private_ca_key = private_ssl_ca_paths( root_path, self.config) chia_ca_crt, chia_ca_key = chia_ssl_ca_paths(root_path, self.config) inbound_rlp = self.config.get("inbound_rate_limit_percent") outbound_rlp = self.config.get("outbound_rate_limit_percent") if NodeType == NodeType.WALLET: inbound_rlp = service_config.get("inbound_rate_limit_percent", inbound_rlp) outbound_rlp = service_config.get("outbound_rate_limit_percent", 60) assert inbound_rlp and outbound_rlp self._server = ChiaServer( advertised_port, node, peer_api, node_type, ping_interval, network_id, inbound_rlp, outbound_rlp, root_path, service_config, (private_ca_crt, private_ca_key), (chia_ca_crt, chia_ca_key), name=f"{service_name}_server", ) f = getattr(node, "set_server", None) if f: f(self._server) else: self._log.warning(f"No set_server method for {service_name}") self._connect_peers = connect_peers self._auth_connect_peers = auth_connect_peers self._upnp_ports = upnp_ports self._server_listen_ports = server_listen_ports self._api = peer_api self._node = node self._did_start = False self._is_stopping = asyncio.Event() self._stopped_by_rpc = False self._on_connect_callback = on_connect_callback self._advertised_port = advertised_port self._reconnect_tasks: List[asyncio.Task] = [] self.upnp: Optional[UPnP] = None
def __init__( self, config: Dict, pool_config: Dict, constants: ConsensusConstants, pool_store: Optional[AbstractPoolStore] = None, difficulty_function: Callable = get_new_difficulty, ): self.follow_singleton_tasks: Dict[bytes32, asyncio.Task] = {} self.log = logging # If you want to log to a file: use filename='example.log', encoding='utf-8' self.log.basicConfig(level=logging.INFO) initialize_logging("pool", pool_config["logging"], pathlib.Path(pool_config["logging"]["log_path"])) # Set our pool info here self.info_default_res = pool_config["pool_info"]["default_res"] self.info_name = pool_config["pool_info"]["name"] self.info_logo_url = pool_config["pool_info"]["logo_url"] self.info_description = pool_config["pool_info"]["description"] self.welcome_message = pool_config["welcome_message"] self.config = config self.constants = constants if pool_config.get("store") == "MariadbPoolStore": from .store.mariadb_store import MariadbPoolStore self.store: AbstractPoolStore = pool_store or MariadbPoolStore() else: self.store: AbstractPoolStore = pool_store or SqlitePoolStore() self.pool_fee = pool_config["pool_fee"] # This number should be held constant and be consistent for every pool in the network. DO NOT CHANGE self.iters_limit = self.constants.POOL_SUB_SLOT_ITERS // 64 # This number should not be changed, since users will put this into their singletons self.relative_lock_height = uint32(pool_config["relative_lock_height"]) # TODO(pool): potentially tweak these numbers for security and performance # This is what the user enters into the input field. This exact value will be stored on the blockchain self.pool_url = pool_config["pool_url"] self.min_difficulty = uint64( pool_config["min_difficulty"] ) # 10 difficulty is about 1 proof a day per plot self.default_difficulty: uint64 = uint64( pool_config["default_difficulty"]) self.difficulty_function: Callable = difficulty_function self.pending_point_partials: Optional[asyncio.Queue] = None self.recent_points_added: LRUCache = LRUCache(20000) # The time in minutes for an authentication token to be valid. See "Farmer authentication" in SPECIFICATION.md self.authentication_token_timeout: uint8 = pool_config[ "authentication_token_timeout"] # This is where the block rewards will get paid out to. The pool needs to support this address forever, # since the farmers will encode it into their singleton on the blockchain. WARNING: the default pool code # completely spends this wallet and distributes it to users, do don't put any additional funds in here # that you do not want to distribute. Even if the funds are in a different address than this one, they WILL # be spent by this code! So only put funds that you want to distribute to pool members here. # Using 2164248527 self.default_target_puzzle_hash: bytes32 = bytes32( decode_puzzle_hash(pool_config["default_target_address"])) # The pool fees will be sent to this address. This MUST be on a different key than the target_puzzle_hash, # otherwise, the fees will be sent to the users. Using 690783650 self.pool_fee_puzzle_hash: bytes32 = bytes32( decode_puzzle_hash(pool_config["pool_fee_address"])) # This is the wallet fingerprint and ID for the wallet spending the funds from `self.default_target_puzzle_hash` self.wallet_fingerprint = pool_config["wallet_fingerprint"] self.wallet_id = pool_config["wallet_id"] # We need to check for slow farmers. If farmers cannot submit proofs in time, they won't be able to win # any rewards either. This number can be tweaked to be more or less strict. More strict ensures everyone # gets high rewards, but it might cause some of the slower farmers to not be able to participate in the pool. self.partial_time_limit: int = pool_config["partial_time_limit"] # There is always a risk of a reorg, in which case we cannot reward farmers that submitted partials in that # reorg. That is why we have a time delay before changing any account points. self.partial_confirmation_delay: int = pool_config[ "partial_confirmation_delay"] # Only allow PUT /farmer per launcher_id every n seconds to prevent difficulty change attacks. self.farmer_update_blocked: set = set() self.farmer_update_cooldown_seconds: int = 600 # These are the phs that we want to look for on chain, that we can claim to our pool self.scan_p2_singleton_puzzle_hashes: Set[bytes32] = set() # Don't scan anything before this height, for efficiency (for example pool start date) self.scan_start_height: uint32 = uint32( pool_config["scan_start_height"]) # Interval for scanning and collecting the pool rewards self.collect_pool_rewards_interval = pool_config[ "collect_pool_rewards_interval"] # After this many confirmations, a transaction is considered final and irreversible self.confirmation_security_threshold = pool_config[ "confirmation_security_threshold"] # Interval for making payout transactions to farmers self.payment_interval = pool_config["payment_interval"] # We will not make transactions with more targets than this, to ensure our transaction gets into the blockchain # faster. self.max_additions_per_transaction = pool_config[ "max_additions_per_transaction"] # This is the list of payments that we have not sent yet, to farmers self.pending_payments: Optional[asyncio.Queue] = None # Keeps track of the latest state of our node self.blockchain_state = {"peak": None} # Whether or not the wallet is synced (required to make payments) self.wallet_synced = False # The fee to pay ( In mojo ) when claiming a block reward self.claim_fee: uint64 = uint64(pool_config.get("block_claim_fee", 0)) # We target these many partials for this number of seconds. We adjust after receiving this many partials. self.number_of_partials_target: int = pool_config[ "number_of_partials_target"] self.time_target: int = pool_config["time_target"] # Tasks (infinite While loops) for different purposes self.confirm_partials_loop_task: Optional[asyncio.Task] = None self.collect_pool_rewards_loop_task: Optional[asyncio.Task] = None self.create_payment_loop_task: Optional[asyncio.Task] = None self.submit_payment_loop_task: Optional[asyncio.Task] = None self.get_peak_loop_task: Optional[asyncio.Task] = None self.node_rpc_client: Optional[FullNodeRpcClient] = None self.node_rpc_port = pool_config["node_rpc_port"] self.wallet_rpc_client: Optional[WalletRpcClient] = None self.wallet_rpc_port = pool_config["wallet_rpc_port"]