def get_config(self): logging.info("[-] Incoming RPC request - get_config") path = check_folder(dist_mode) if os.path.exists(os.path.join(path, CONFIG_FILE)): with open(os.path.join(path, CONFIG_FILE), "r") as f: try: config_vars = {} for line in f.readlines(): config_var = line.split("=") if len(config_var) > 1: config_vars[config_var[0]] = config_var[1].split( "\n")[0] else: config_vars[config_var[0]] = True result = {"type": "config", "data": config_vars} logging.debug(result) return json.dumps(result) except: result = { "type": "error", "data": "error reading config file" } logging.debug(result) return json.dumps(result) else: result = {"type": "error", "data": "config file does not exist"} logging.info(result) return json.dumps(result)
def start_log(log_level="INFO"): path = check_folder(dist_mode) logging.basicConfig( format='%(asctime)s %(levelname)s:%(message)s', filename=os.path.join(path, RPC_FILE), level=getattr(logging, log_level), filemode='w', )
def get_sync_status(self): logging.info("[-] Incoming RPC request - get_sync_status") path = check_folder(dist_mode) if os.path.exists(os.path.join(path, SYNC_FILE)): with open(os.path.join(path, SYNC_FILE), "r") as f: try: latest_block = get_latest_block_db() latest_block_height = latest_block.block_height text = f.read() if text == "TRUE": result = { "type": "status", "data": "syncing: block height " + str(latest_block_height) } logging.info(result) return json.dumps(result) elif text == "FALSE": result = { "type": "status", "data": "not syncing: block height " + str(latest_block_height) } logging.info(result) return json.dumps(result) else: result = { "type": "status", "data": "unknown: block_height " + str(latest_block_height) } logging.info(result) return json.dumps(result) except (DatabaseQueryFailed) as err: result = {"type": "error", "data": err.args[0]} logging.info(result) return json.dumps(result) except: result = { "type": "error", "data": "error reading sync file" } logging.info(result) return json.dumps(result) else: result = {"type": "error", "data": "sync file does not exist"} logging.info(result) return json.dumps(result)
def parse_env(): path = check_folder(is_dist_mode()) if os.path.exists(os.path.join(path, ENV_FILE)): with open(os.path.join(path, ENV_FILE), "r") as f: try: env_vars = {} for line in f.readlines(): env_vars[line.split("=")[0]] = line.split("=")[1].split( "\n")[0] except: logging.critical("[!] Unable to read .env file") sys.exit(1) else: logging.info( "[*] .env file does not exist. Creating default .env file") with open(os.path.join(path, ENV_FILE), "w") as f: try: f.write("DATABASE_HOST=127.0.0.1" + "\n") f.write("DATABASE_PORT=27017" + "\n") except: logging.critical("[*] Unable to write default .env file.") sys.exit(1) with open(os.path.join(path, ENV_FILE), "r") as f: try: env_vars = {} for line in f.readlines(): env_vars[line.split("=")[0]] = line.split("=")[1].split( "\n")[0] except: logging.critical("[!] Unable to read .env file") sys.exit(1) try: if "DATABASE_HOST" in env_vars.keys( ) and "DATABASE_PORT" in env_vars.keys(): DATABASE_HOST = env_vars["DATABASE_HOST"] DATABASE_PORT = int(env_vars["DATABASE_PORT"]) return DATABASE_HOST, DATABASE_PORT except: logging.critical("[!] Incorrect environment variables set") sys.exit(1)
def remove_sync_file(): path = check_folder(dist_mode) if os.path.exists(os.path.join(path, SYNC_FILE)): os.remove(os.path.join(path, SYNC_FILE))
def initiate_processes(start_block, sync_event): signal.signal(signal.SIGINT, signal.SIG_IGN) path = check_folder(dist_mode) with open(os.path.join(path, SYNC_FILE), "w") as f: try: logging.info("[-] Syncing") f.write("TRUE") except: logging.critical("[!] Unable to write .sync file.") exceptions = Queue() block_hashes_queue = Queue(maxsize=400) blocks_downloaded_queue = Queue(maxsize=25) blocks_process_queue = Queue() add_addresses_queue = Queue() add_blocks_queue = Queue() jobs_queue = Queue() completed_jobs_process_blocks = Queue() completed_jobs_add_addresses = Queue() completed_jobs_add_blocks = Queue() completed_jobs_queue_map = { "process_blocks": completed_jobs_process_blocks, "add_addresses": completed_jobs_add_addresses, "add_blocks": completed_jobs_add_blocks } block_process_event = Event() if sync_event.is_set(): initiate_block_hashes_download_process( start_block, block_hashes_queue, exceptions ) if sync_event.is_set(): initiate_block_download_all_process( block_hashes_queue, blocks_downloaded_queue, exceptions ) if sync_event.is_set(): initiate_process_blocks_process( block_process_event, blocks_downloaded_queue, add_addresses_queue, jobs_queue, completed_jobs_process_blocks, exceptions ) if sync_event.is_set(): initiate_add_addresses_process( add_addresses_queue, add_blocks_queue, jobs_queue, completed_jobs_add_addresses, exceptions ) if sync_event.is_set(): initiate_add_blocks_process( block_process_event, add_blocks_queue, jobs_queue, completed_jobs_add_blocks, exceptions ) signal.signal(signal.SIGINT, default_handler) while sync_event.is_set(): try: start_time = time.time() try: exception = exceptions.get(block=False) print("Exception occurred: ", exception) raise exception except Empty: pass finally: try: process, action, args, unpack = jobs_queue.get(block=False) if unpack: result = action(*args) else: result = action(args) if result: completed_jobs_queue_map[process].put(result) except Empty: pass finally: if all([ block_hashes_queue.empty(), blocks_downloaded_queue.empty(), blocks_process_queue.empty(), time.time() - start_time > 10000 ]): log_stats() with open(SYNC_FILE, "w") as f: try: logging.info("[-] Sync complete") f.write("FALSE") except: logging.critical("[!] Unable to write .sync file.") time.sleep(180) break except KeyboardInterrupt: empty_queue(jobs_queue) raise
def __init__(self, host, port=None, testnet=False, logging=False, tor=False, process_name="main"): self.testnet = testnet self.logging = logging if port is None: if testnet: port = 18333 else: port = 8333 if tor: self.tor = False socks_port = randint(9000, 9999) if sys.platform == "darwin" or sys.platform == "linux": tor_executable_dir_path = os.path.join( str(Path.home()), ".bitcoin-explorer-app", "tor") elif sys.platform == "win32": tor_executable_dir_path = os.path.join(str(Path.home()), "bitcoin-explorer-app", "tor.exe") else: tor_executable_dir_path = os.path.join(str(Path.home()), "bitcoin-explorer-app", "tor") if os.path.exists(tor_executable_dir_path): tor_cmd = tor_executable_dir_path else: tor_cmd = "tor" tor_data_dir_path = os.path.join(check_folder(is_dist_mode()), "tor_" + process_name) if not os.path.exists(tor_data_dir_path): os.mkdir(tor_data_dir_path, 0o744) else: shutil.rmtree(tor_data_dir_path) while not self.tor: try: if logging: tor_process = stem.process.launch_tor_with_config( config={ "SocksPort": str(socks_port), "DataDirectory": tor_data_dir_path }, tor_cmd=tor_cmd, init_msg_handler=logging, take_ownership=True, timeout=60) else: tor_process = stem.process.launch_tor_with_config( config={ "SocksPort": str(socks_port), "DataDirectory": tor_data_dir_path }, tor_cmd=tor_cmd, take_ownership=True, timeout=60) self.tor = tor_process self.socks_port = socks_port except OSError as os_error: if BIND_SOCKS_PORT_ERROR_MSG == os_error.__str__(): pass else: raise TorNotAvailable break else: self.tor = False # connect to socket if tor: s = socks.socksocket() s.set_proxy(socks.SOCKS5, "localhost", socks_port) self.socket = s else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.settimeout(10) try: self.socket.connect((host, port)) # create a stream that we can use with the rest of the library self.stream = self.socket.makefile('rb', None) except: if self.tor: self.tor.kill()