def __init__(self, ledger): self.config = ledger.config self.session_pool = SessionPool(network=self, timeout=self.config.get( 'connect_timeout', 6)) self.client: Optional[ClientSession] = None self._switch_task: Optional[asyncio.Task] = None self.running = False self.remote_height: int = 0 self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController( merge_repeated_events=True) self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController( merge_repeated_events=True) self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, }
def __init__(self, config=None): self.config = config or {} self.db: BaseDatabase = self.config.get('db') or self.database_class( os.path.join(self.path, "blockchain.db")) self.headers: BaseHeaders = self.config.get( 'headers') or self.headers_class(os.path.join( self.path, "headers")) self.network = self.config.get('network') or self.network_class(self) self.network.on_header.listen(self.receive_header) self.network.on_status.listen(self.receive_status) self.accounts = [] self.fee_per_byte: int = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen(lambda e: log.info( '(%s) on_transaction: address=%s, height=%s, is_verified=%s, tx.id=%s', self.get_id(), e.address, e.height, e.is_verified, e.tx.id)) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self.on_header.listen(lambda change: log.info( '%s: added %s header blocks, final height %s', self.get_id(), change, self.headers.height)) self._transaction_processing_locks = {} self._utxo_reservation_lock = defer.DeferredLock() self._header_processing_lock = defer.DeferredLock()
def __init__(self): self.request_id = 0 self.lookup_table = {} self.session = {} self.on_disconnected_controller = StreamController() self.on_disconnected = self.on_disconnected_controller.stream
def test_unique_events(self): events = [] controller = StreamController(merge_repeated_events=True) controller.stream.listen(on_data=events.append) controller.add("yo") controller.add("yo") self.assertEqual(events, ["yo"])
def test_non_unique_events(self): events = [] controller = StreamController() controller.stream.listen(on_data=events.append) controller.add("yo") controller.add("yo") self.assertEqual(events, ["yo", "yo"])
def __init__(self, *args, network, server, **kwargs): self.network = network self.server = server super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.bw_limit = self.framer.max_size = self.max_errors = 1 << 32
def __init__(self, *args, network, server, timeout=30, **kwargs): self.network = network self.server = server super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.bw_limit = self.framer.max_size = self.max_errors = 1 << 32 self.timeout = timeout self.max_seconds_idle = timeout * 2 self.ping_task = None
class ClientSession(BaseClientSession): def __init__(self, *args, network, server, timeout=30, **kwargs): self.network = network self.server = server super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.bw_limit = self.framer.max_size = self.max_errors = 1 << 32 self.timeout = timeout self.max_seconds_idle = timeout * 2 self.ping_task = None async def send_request(self, method, args=()): try: return await asyncio.wait_for(super().send_request(method, args), timeout=self.timeout) except RPCError as e: log.warning( "Wallet server returned an error. Code: %s Message: %s", *e.args) raise e except asyncio.TimeoutError: self.abort() raise async def ping_forever(self): # TODO: change to 'ping' on newer protocol (above 1.2) while not self.is_closing(): if (time() - self.last_send) > self.max_seconds_idle: try: await self.send_request('server.banner') except: self.abort() raise await asyncio.sleep(self.max_seconds_idle // 3) async def create_connection(self, timeout=6): connector = Connector(lambda: self, *self.server) await asyncio.wait_for(connector.create_connection(), timeout=timeout) self.ping_task = asyncio.create_task(self.ping_forever()) async def handle_request(self, request): controller = self.network.subscription_controllers[request.method] controller.add(request.args) def connection_lost(self, exc): super().connection_lost(exc) self._on_disconnect_controller.add(True) if self.ping_task: self.ping_task.cancel()
class ClientSession(BaseClientSession): def __init__(self, *args, network, **kwargs): self.network = network super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.bw_limit = self.framer.max_size = self.max_errors = 1 << 32 async def handle_request(self, request): controller = self.network.subscription_controllers[request.method] controller.add(request.args) def connection_lost(self, exc): super().connection_lost(exc) self._on_disconnect_controller.add(True)
def __init__(self, config=None): self.config = config or {} self.db: BaseDatabase = self.config.get('db') or self.database_class( os.path.join(self.path, "blockchain.db") ) self.db.ledger = self self.headers: BaseHeaders = self.config.get('headers') or self.headers_class( os.path.join(self.path, "headers") ) self.network = self.config.get('network') or self.network_class(self) self.network.on_header.listen(self.receive_header) self.network.on_status.listen(self.process_status_update) self.accounts = [] self.fee_per_byte: int = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen( lambda e: log.info( '(%s) on_transaction: address=%s, height=%s, is_verified=%s, tx.id=%s', self.get_id(), e.address, e.tx.height, e.tx.is_verified, e.tx.id ) ) self._on_address_controller = StreamController() self.on_address = self._on_address_controller.stream self.on_address.listen( lambda e: log.info('(%s) on_address: %s', self.get_id(), e.addresses) ) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self.on_header.listen( lambda change: log.info( '%s: added %s header blocks, final height %s', self.get_id(), change, self.headers.height ) ) self._tx_cache = pylru.lrucache(100000) self._update_tasks = TaskGroup() self._utxo_reservation_lock = asyncio.Lock() self._header_processing_lock = asyncio.Lock() self._address_update_locks: Dict[str, asyncio.Lock] = {} self.coin_selection_strategy = None
def __init__(self, ledger): self.config = ledger.config self.client: ClientSession = None self.running = False self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController() self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, }
def __init__(self, *args, network, server, timeout=30, on_connect_callback=None, **kwargs): self.network = network self.server = server super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.framer.max_size = self.max_errors = 1 << 32 self.bw_limit = -1 self.timeout = timeout self.max_seconds_idle = timeout * 2 self.response_time: Optional[float] = None self.connection_latency: Optional[float] = None self._response_samples = 0 self.pending_amount = 0 self._on_connect_cb = on_connect_callback or (lambda: None) self.trigger_urgent_reconnect = asyncio.Event() # one request per second of timeout, conservative default self._semaphore = asyncio.Semaphore(self.timeout * 2)
def __init__(self, config=None, db=None, network=None, headers_class=None): self.config = config or {} self.db = db or self.database_class( os.path.join(self.path, "blockchain.db") ) # type: basedatabase.BaseDatabase self.network = network or self.network_class(self) self.network.on_header.listen(self.process_header) self.network.on_status.listen(self.process_status) self.accounts = set() self.headers = (headers_class or self.headers_class)(self) self.fee_per_byte = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen( lambda e: log.info('({}) on_transaction: address={}, height={}, is_verified={}, tx.id={}'.format( self.get_id(), e.address, e.height, e.is_verified, e.tx.hex_id) ) ) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._transaction_processing_locks = {}
class ClientSession(BaseClientSession): def __init__(self, *args, network, server, timeout=30, on_connect_callback=None, **kwargs): self.network = network self.server = server super().__init__(*args, **kwargs) self._on_disconnect_controller = StreamController() self.on_disconnected = self._on_disconnect_controller.stream self.framer.max_size = self.max_errors = 1 << 32 self.bw_limit = -1 self.timeout = timeout self.max_seconds_idle = timeout * 2 self.response_time: Optional[float] = None self.connection_latency: Optional[float] = None self._response_samples = 0 self.pending_amount = 0 self._on_connect_cb = on_connect_callback or (lambda: None) self.trigger_urgent_reconnect = asyncio.Event() # one request per second of timeout, conservative default self._semaphore = asyncio.Semaphore(self.timeout * 2) @property def available(self): return not self.is_closing() and self.response_time is not None @property def server_address_and_port(self) -> Optional[Tuple[str, int]]: if not self.transport: return None return self.transport.get_extra_info('peername') async def send_timed_server_version_request(self, args=(), timeout=None): timeout = timeout or self.timeout log.debug("send version request to %s:%i", *self.server) start = perf_counter() result = await asyncio.wait_for(super().send_request( 'server.version', args), timeout=timeout) current_response_time = perf_counter() - start response_sum = (self.response_time or 0) * self._response_samples + current_response_time self.response_time = response_sum / (self._response_samples + 1) self._response_samples += 1 return result async def send_request(self, method, args=()): self.pending_amount += 1 async with self._semaphore: return await self._send_request(method, args) async def _send_request(self, method, args=()): log.debug("send %s to %s:%i", method, *self.server) try: if method == 'server.version': reply = await self.send_timed_server_version_request( args, self.timeout) else: reply = await asyncio.wait_for(super().send_request( method, args), timeout=self.timeout) log.debug("got reply for %s from %s:%i", method, *self.server) return reply except (RPCError, ProtocolError) as e: if str(e).find('.*no such .*transaction.*'): # shouldnt the server return none instead? return None log.warning( "Wallet server (%s:%i) returned an error. Code: %s Message: %s", *self.server, *e.args) raise e except ConnectionError: log.warning("connection to %s:%i lost", *self.server) self.synchronous_close() raise except asyncio.TimeoutError: log.info("timeout sending %s to %s:%i", method, *self.server) raise except asyncio.CancelledError: log.info("cancelled sending %s to %s:%i", method, *self.server) self.synchronous_close() raise finally: self.pending_amount -= 1 async def ensure_session(self): # Handles reconnecting and maintaining a session alive # TODO: change to 'ping' on newer protocol (above 1.2) retry_delay = default_delay = 1.0 while True: try: if self.is_closing(): await self.create_connection(self.timeout) await self.ensure_server_version() self._on_connect_cb() if (perf_counter() - self.last_send ) > self.max_seconds_idle or self.response_time is None: await self.ensure_server_version() retry_delay = default_delay except (asyncio.TimeoutError, OSError): await self.close() retry_delay = min(60, retry_delay * 2) log.debug("Wallet server timeout (retry in %s seconds): %s:%d", retry_delay, *self.server) try: await asyncio.wait_for(self.trigger_urgent_reconnect.wait(), timeout=retry_delay) except asyncio.TimeoutError: pass finally: self.trigger_urgent_reconnect.clear() async def ensure_server_version(self, required='1.2', timeout=3): return await asyncio.wait_for(self.send_request( 'server.version', [__version__, required]), timeout=timeout) async def create_connection(self, timeout=6): connector = Connector(lambda: self, *self.server) start = perf_counter() await asyncio.wait_for(connector.create_connection(), timeout=timeout) self.connection_latency = perf_counter() - start async def handle_request(self, request): controller = self.network.subscription_controllers[request.method] controller.add(request.args) def connection_lost(self, exc): log.debug("Connection lost: %s:%d", *self.server) super().connection_lost(exc) self.response_time = None self.connection_latency = None self._response_samples = 0 self.pending_amount = 0 self._on_disconnect_controller.add(True)
def __init__(self, ledger): # type: (baseledger.BaseLedger) -> BaseHeaders self.ledger = ledger self._size = None self._on_change_controller = StreamController() self.on_changed = self._on_change_controller.stream
class StratumClientProtocol(LineOnlyReceiver): delimiter = b'\n' MAX_LENGTH = 100000 def __init__(self): self.request_id = 0 self.lookup_table = {} self.session = {} self.on_disconnected_controller = StreamController() self.on_disconnected = self.on_disconnected_controller.stream def _get_id(self): self.request_id += 1 return self.request_id @property def _ip(self): return self.transport.getPeer().host def get_session(self): return self.session def connectionMade(self): try: self.transport.setTcpNoDelay(True) self.transport.setTcpKeepAlive(True) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPIDLE, 120 # Seconds before sending keepalive probes ) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPINTVL, 1 # Interval in seconds between keepalive probes ) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPCNT, 5 # Failed keepalive probles before declaring other end dead ) except Exception as err: # Supported only by the socket transport, # but there's really no better place in code to trigger this. log.warning("Error setting up socket: %s", err) def connectionLost(self, reason=None): self.on_disconnected_controller.add(True) def lineReceived(self, line): try: # `line` comes in as a byte string but `json.loads` automatically converts everything to # unicode. For keys it's not a big deal but for values there is an expectation # everywhere else in wallet code that most values are byte strings. message = json.loads(line, object_hook=lambda obj: {k: unicode2bytes(v) for k, v in obj.items()}) except (ValueError, TypeError): raise ValueError("Cannot decode message '{}'".format(line.strip())) if message.get('id'): try: d = self.lookup_table.pop(message['id']) if message.get('error'): d.errback(RuntimeError(message['error'])) else: d.callback(message.get('result')) except KeyError: raise LookupError( "Lookup for deferred object for message ID '{}' failed.". format(message['id'])) elif message.get('method') in self.network.subscription_controllers: controller = self.network.subscription_controllers[ message['method']] controller.add(message.get('params')) else: log.warning("Cannot handle message '%s'" % line) def rpc(self, method, *args): message_id = self._get_id() message = json.dumps({ 'id': message_id, 'method': method, 'params': [bytes2unicode(arg) for arg in args] }) self.sendLine(message.encode('latin-1')) d = self.lookup_table[message_id] = defer.Deferred() return d
class BaseNetwork: def __init__(self, ledger): self.config = ledger.config self.client = None self.service = None self.running = False self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController() self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, } @defer.inlineCallbacks def start(self): for server in cycle(self.config['default_servers']): connection_string = 'tcp:{}:{}'.format(*server) endpoint = clientFromString(reactor, connection_string) log.debug("Attempting connection to SPV wallet server: %s", connection_string) self.service = ClientService(endpoint, StratumClientFactory(self)) self.service.startService() try: self.client = yield self.service.whenConnected(failAfterFailures=2) yield self.ensure_server_version() log.info("Successfully connected to SPV wallet server: %s", connection_string) self._on_connected_controller.add(True) yield self.client.on_disconnected.first except CancelledError: return except Exception: # pylint: disable=broad-except log.exception("Connecting to %s raised an exception:", connection_string) finally: self.client = None if not self.running: return def stop(self): self.running = False if self.service is not None: self.service.stopService() if self.is_connected: return self.client.on_disconnected.first else: return defer.succeed(True) @property def is_connected(self): return self.client is not None and self.client.connected def rpc(self, list_or_method, *args): if self.is_connected: return self.client.rpc(list_or_method, *args) else: raise ConnectionError("Attempting to send rpc request when connection is not available.") def ensure_server_version(self, required='1.2'): return self.rpc('server.version', __version__, required) def broadcast(self, raw_transaction): return self.rpc('blockchain.transaction.broadcast', raw_transaction) def get_history(self, address): return self.rpc('blockchain.address.get_history', address) def get_transaction(self, tx_hash): return self.rpc('blockchain.transaction.get', tx_hash) def get_merkle(self, tx_hash, height): return self.rpc('blockchain.transaction.get_merkle', tx_hash, height) def get_headers(self, height, count=10000): return self.rpc('blockchain.block.headers', height, count) def subscribe_headers(self): return self.rpc('blockchain.headers.subscribe', True) def subscribe_address(self, address): return self.rpc('blockchain.address.subscribe', address)
class BaseNetwork: def __init__(self, ledger): self.config = ledger.config self.client: ClientSession = None self.session_pool: SessionPool = None self.running = False self.remote_height: int = 0 self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController() self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, } async def start(self): self.running = True connect_timeout = self.config.get('connect_timeout', 6) self.session_pool = SessionPool(network=self, timeout=connect_timeout) self.session_pool.start(self.config['default_servers']) self.on_header.listen(self._update_remote_height) while True: try: self.client = await self.pick_fastest_session() if self.is_connected: await self.ensure_server_version() self._update_remote_height((await self.subscribe_headers(), )) log.info( "Successfully connected to SPV wallet server: %s:%d", *self.client.server) self._on_connected_controller.add(True) await self.client.on_disconnected.first except CancelledError: self.running = False except asyncio.TimeoutError: log.warning("Timed out while trying to find a server!") except Exception: # pylint: disable=broad-except log.exception("Exception while trying to find a server!") if not self.running: return elif self.client: await self.client.close() self.client.connection.cancel_pending_requests() async def stop(self): self.running = False if self.session_pool: self.session_pool.stop() if self.is_connected: disconnected = self.client.on_disconnected.first await self.client.close() await disconnected @property def is_connected(self): return self.client is not None and not self.client.is_closing() def rpc(self, list_or_method, args): if self.is_connected: return self.client.send_request(list_or_method, args) else: raise ConnectionError( "Attempting to send rpc request when connection is not available." ) async def pick_fastest_session(self): sessions = await self.session_pool.get_online_sessions() done, pending = await asyncio.wait([ self.probe_session(session) for session in sessions if not session.is_closing() ], return_when='FIRST_COMPLETED') for task in pending: task.cancel() for session in done: return await session async def probe_session(self, session: ClientSession): await session.send_request('server.banner') return session def _update_remote_height(self, header_args): self.remote_height = header_args[0]["height"] def ensure_server_version(self, required='1.2'): return self.rpc('server.version', [__version__, required]) def broadcast(self, raw_transaction): return self.rpc('blockchain.transaction.broadcast', [raw_transaction]) def get_history(self, address): return self.rpc('blockchain.address.get_history', [address]) def get_transaction(self, tx_hash): return self.rpc('blockchain.transaction.get', [tx_hash]) def get_transaction_height(self, tx_hash): return self.rpc('blockchain.transaction.get_height', [tx_hash]) def get_merkle(self, tx_hash, height): return self.rpc('blockchain.transaction.get_merkle', [tx_hash, height]) def get_headers(self, height, count=10000): return self.rpc('blockchain.block.headers', [height, count]) def subscribe_headers(self): return self.rpc('blockchain.headers.subscribe', [True]) def subscribe_address(self, address): return self.rpc('blockchain.address.subscribe', [address])
class BaseHeaders: header_size = 80 verify_bits_to_target = True def __init__(self, ledger): # type: (baseledger.BaseLedger) -> BaseHeaders self.ledger = ledger self._size = None self._on_change_controller = StreamController() self.on_changed = self._on_change_controller.stream @property def path(self): return os.path.join(self.ledger.path, 'headers') def touch(self): if not os.path.exists(self.path): with open(self.path, 'wb'): pass @property def height(self): return len(self) def sync_read_length(self): return os.path.getsize(self.path) // self.header_size def sync_read_header(self, height): if 0 <= height < len(self): with open(self.path, 'rb') as f: f.seek(height * self.header_size) return f.read(self.header_size) def __len__(self): if self._size is None: self._size = self.sync_read_length() return self._size def __getitem__(self, height): assert not isinstance(height, slice), \ "Slicing of header chain has not been implemented yet." header = self.sync_read_header(height) return self._deserialize(height, header) @execute_serially @defer.inlineCallbacks def connect(self, start, headers): yield threads.deferToThread(self._sync_connect, start, headers) def _sync_connect(self, start, headers): previous_header = None for header in self._iterate_headers(start, headers): height = header['block_height'] if previous_header is None and height > 0: previous_header = self[height-1] self._verify_header(height, header, previous_header) previous_header = header with open(self.path, 'r+b') as f: f.seek(start * self.header_size) f.write(headers) f.truncate() _old_size = self._size self._size = self.sync_read_length() change = self._size - _old_size log.info('{}: added {} header blocks, final height {}'.format( self.ledger.get_id(), change, self.height) ) self._on_change_controller.add(change) def _iterate_headers(self, height, headers): assert len(headers) % self.header_size == 0 for idx in range(len(headers) // self.header_size): start, end = idx * self.header_size, (idx + 1) * self.header_size header = headers[start:end] yield self._deserialize(height+idx, header) def _verify_header(self, height, header, previous_header): previous_hash = self._hash_header(previous_header) assert previous_hash == header['prev_block_hash'], \ "prev hash mismatch: {} vs {}".format(previous_hash, header['prev_block_hash']) bits, target = self._calculate_next_work_required(height, previous_header, header) assert bits == header['bits'], \ "bits mismatch: {} vs {} (hash: {})".format( bits, header['bits'], self._hash_header(header)) # TODO: FIX ME!!! #_pow_hash = self._pow_hash_header(header) #assert int(b'0x' + _pow_hash, 16) <= target, \ # "insufficient proof of work: {} vs target {}".format( # int(b'0x' + _pow_hash, 16), target) @staticmethod def _serialize(header): return b''.join([ int_to_hex(header['version'], 4), rev_hex(header['prev_block_hash']), rev_hex(header['merkle_root']), int_to_hex(int(header['timestamp']), 4), int_to_hex(int(header['bits']), 4), int_to_hex(int(header['nonce']), 4) ]) @staticmethod def _deserialize(height, header): version, = struct.unpack('<I', header[:4]) timestamp, bits, nonce = struct.unpack('<III', header[68:80]) return { 'block_height': height, 'version': version, 'prev_block_hash': hash_encode(header[4:36]), 'merkle_root': hash_encode(header[36:68]), 'timestamp': timestamp, 'bits': bits, 'nonce': nonce, } def _hash_header(self, header): if header is None: return b'0' * 64 return hash_encode(double_sha256(unhexlify(self._serialize(header)))) def _pow_hash_header(self, header): if header is None: return b'0' * 64 return hash_encode(pow_hash(unhexlify(self._serialize(header)))) def _calculate_next_work_required(self, height, first, last): if height == 0: return self.ledger.genesis_bits, self.ledger.max_target if self.verify_bits_to_target: bits = last['bits'] bitsN = (bits >> 24) & 0xff assert 0x03 <= bitsN <= 0x1d, \ "First part of bits should be in [0x03, 0x1d], but it was {}".format(hex(bitsN)) bitsBase = bits & 0xffffff assert 0x8000 <= bitsBase <= 0x7fffff, \ "Second part of bits should be in [0x8000, 0x7fffff] but it was {}".format(bitsBase) # new target retargetTimespan = self.ledger.target_timespan nActualTimespan = last['timestamp'] - first['timestamp'] nModulatedTimespan = retargetTimespan + (nActualTimespan - retargetTimespan) // 8 nMinTimespan = retargetTimespan - (retargetTimespan // 8) nMaxTimespan = retargetTimespan + (retargetTimespan // 2) # Limit adjustment step if nModulatedTimespan < nMinTimespan: nModulatedTimespan = nMinTimespan elif nModulatedTimespan > nMaxTimespan: nModulatedTimespan = nMaxTimespan # Retarget bnPowLimit = _ArithUint256(self.ledger.max_target) bnNew = _ArithUint256.SetCompact(last['bits']) bnNew *= nModulatedTimespan bnNew //= nModulatedTimespan if bnNew > bnPowLimit: bnNew = bnPowLimit return bnNew.GetCompact(), bnNew._value
class BaseNetwork: def __init__(self, ledger): self.config = ledger.config self.session_pool = SessionPool(network=self, timeout=self.config.get( 'connect_timeout', 6)) self.client: Optional[ClientSession] = None self._switch_task: Optional[asyncio.Task] = None self.running = False self.remote_height: int = 0 self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController( merge_repeated_events=True) self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController( merge_repeated_events=True) self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, } async def switch_forever(self): while self.running: if self.is_connected: await self.client.on_disconnected.first self.client = None continue self.client = await self.session_pool.wait_for_fastest_session() log.info("Switching to SPV wallet server: %s:%d", *self.client.server) self._on_connected_controller.add(True) try: self._update_remote_height((await self.subscribe_headers(), )) log.info("Subscribed to headers: %s:%d", *self.client.server) except (asyncio.TimeoutError, ConnectionError): log.info("Switching to %s:%d timed out, closing and retrying.", *self.client.server) self.client.synchronous_close() self.client = None async def start(self): self.running = True self._switch_task = asyncio.ensure_future(self.switch_forever()) self.session_pool.start(self.config['default_servers']) self.on_header.listen(self._update_remote_height) async def stop(self): if self.running: self.running = False self._switch_task.cancel() self.session_pool.stop() @property def is_connected(self): return self.client and not self.client.is_closing() def rpc(self, list_or_method, args, restricted=True): session = self.client if restricted else self.session_pool.fastest_session if session and not session.is_closing(): return session.send_request(list_or_method, args) else: self.session_pool.trigger_nodelay_connect() raise ConnectionError( "Attempting to send rpc request when connection is not available." ) async def retriable_call(self, function, *args, **kwargs): while self.running: if not self.is_connected: log.warning( "Wallet server unavailable, waiting for it to come back and retry." ) await self.on_connected.first await self.session_pool.wait_for_fastest_session() try: return await function(*args, **kwargs) except asyncio.TimeoutError: log.warning("Wallet server call timed out, retrying.") except ConnectionError: pass raise asyncio.CancelledError() # if we got here, we are shutting down def _update_remote_height(self, header_args): self.remote_height = header_args[0]["height"] def get_transaction(self, tx_hash, known_height=None): # use any server if its old, otherwise restrict to who gave us the history restricted = not known_height or 0 > known_height > self.remote_height - 10 return self.rpc('blockchain.transaction.get', [tx_hash], restricted) def get_transaction_height(self, tx_hash, known_height=None): restricted = not known_height or 0 > known_height > self.remote_height - 10 return self.rpc('blockchain.transaction.get_height', [tx_hash], restricted) def get_merkle(self, tx_hash, height): restricted = 0 > height > self.remote_height - 10 return self.rpc('blockchain.transaction.get_merkle', [tx_hash, height], restricted) def get_headers(self, height, count=10000): return self.rpc('blockchain.block.headers', [height, count]) # --- Subscribes, history and broadcasts are always aimed towards the master client directly def get_history(self, address): return self.rpc('blockchain.address.get_history', [address], True) def broadcast(self, raw_transaction): return self.rpc('blockchain.transaction.broadcast', [raw_transaction], True) def subscribe_headers(self): return self.rpc('blockchain.headers.subscribe', [True], True) async def subscribe_address(self, address): try: return await self.rpc('blockchain.address.subscribe', [address], True) except asyncio.TimeoutError: # abort and cancel, we cant lose a subscription, it will happen again on reconnect if self.client: self.client.abort() raise asyncio.CancelledError()
class StratumClientProtocol(LineOnlyReceiver): delimiter = b'\n' MAX_LENGTH = 2000000 def __init__(self): self.request_id = 0 self.lookup_table = {} self.session = {} self.network = None self.on_disconnected_controller = StreamController() self.on_disconnected = self.on_disconnected_controller.stream def _get_id(self): self.request_id += 1 return self.request_id @property def _ip(self): return self.transport.getPeer().host def get_session(self): return self.session def connectionMade(self): try: self.transport.setTcpNoDelay(True) self.transport.setTcpKeepAlive(True) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPIDLE, 120 # Seconds before sending keepalive probes ) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPINTVL, 1 # Interval in seconds between keepalive probes ) self.transport.socket.setsockopt( socket.SOL_TCP, socket.TCP_KEEPCNT, 5 # Failed keepalive probles before declaring other end dead ) except Exception as err: # pylint: disable=broad-except # Supported only by the socket transport, # but there's really no better place in code to trigger this. log.warning("Error setting up socket: %s", err) def connectionLost(self, reason=None): self.on_disconnected_controller.add(True) def lineReceived(self, line): log.debug('received: %s', line) try: message = json.loads(line) except (ValueError, TypeError): raise ValueError("Cannot decode message '{}'".format(line.strip())) if message.get('id'): try: d = self.lookup_table.pop(message['id']) if message.get('error'): d.errback(RuntimeError(message['error'])) else: d.callback(message.get('result')) except KeyError: raise LookupError( "Lookup for deferred object for message ID '{}' failed.".format(message['id'])) elif message.get('method') in self.network.subscription_controllers: controller = self.network.subscription_controllers[message['method']] controller.add(message.get('params')) else: log.warning("Cannot handle message '%s'", line) def rpc(self, method, *args): message_id = self._get_id() message = json.dumps({ 'id': message_id, 'method': method, 'params': args }) log.debug('sent: %s', message) self.sendLine(message.encode('latin-1')) d = self.lookup_table[message_id] = defer.Deferred() return d
class BaseNetwork: def __init__(self, ledger): self.config = ledger.config self.client: ClientSession = None self.running = False self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController() self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, } async def pick_fastest_server(self, timeout): async def __probe(server): client = ClientSession(network=self, server=server) try: await client.create_connection(timeout) await client.send_request('server.banner') return client except (asyncio.TimeoutError, asyncio.CancelledError): if not client.is_closing(): client.abort() raise except Exception: # pylint: disable=broad-except log.exception("Connecting to %s:%d raised an exception:", *server) futures = [] for server in self.config['default_servers']: futures.append(__probe(server)) done, pending = await asyncio.wait(futures, return_when='FIRST_COMPLETED') for task in pending: task.cancel() for client in done: return await client async def start(self): self.running = True delay = 0.0 connect_timeout = self.config.get('connect_timeout', 6) while True: try: self.client = await self.pick_fastest_server(connect_timeout) if self.is_connected: await self.ensure_server_version() log.info( "Successfully connected to SPV wallet server: %s:%d", *self.client.server) self._on_connected_controller.add(True) delay = 0.0 await self.client.on_disconnected.first except CancelledError: self.running = False except asyncio.TimeoutError: log.warning("Timed out while trying to find a server!") except Exception: # pylint: disable=broad-except log.exception("Exception while trying to find a server!") if not self.running: return elif self.client: await self.client.close() self.client.connection.cancel_pending_requests() await asyncio.sleep(delay) delay = min(delay + 1.0, 10.0) async def stop(self): self.running = False if self.is_connected: disconnected = self.client.on_disconnected.first await self.client.close() await disconnected @property def is_connected(self): return self.client is not None and not self.client.is_closing() def rpc(self, list_or_method, args): if self.is_connected: return self.client.send_request(list_or_method, args) else: raise ConnectionError( "Attempting to send rpc request when connection is not available." ) def ensure_server_version(self, required='1.2'): return self.rpc('server.version', [__version__, required]) def broadcast(self, raw_transaction): return self.rpc('blockchain.transaction.broadcast', [raw_transaction]) def get_history(self, address): return self.rpc('blockchain.address.get_history', [address]) def get_transaction(self, tx_hash): return self.rpc('blockchain.transaction.get', [tx_hash]) def get_transaction_height(self, tx_hash): return self.rpc('blockchain.transaction.get_height', [tx_hash]) def get_merkle(self, tx_hash, height): return self.rpc('blockchain.transaction.get_merkle', [tx_hash, height]) def get_headers(self, height, count=10000): return self.rpc('blockchain.block.headers', [height, count]) def subscribe_headers(self): return self.rpc('blockchain.headers.subscribe', [True]) def subscribe_address(self, address): return self.rpc('blockchain.address.subscribe', [address])
class BaseNetwork: def __init__(self, ledger): self.config = ledger.config self.client: ClientSession = None self.running = False self._on_connected_controller = StreamController() self.on_connected = self._on_connected_controller.stream self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._on_status_controller = StreamController() self.on_status = self._on_status_controller.stream self.subscription_controllers = { 'blockchain.headers.subscribe': self._on_header_controller, 'blockchain.address.subscribe': self._on_status_controller, } async def start(self): self.running = True delay = 0.0 for server in cycle(self.config['default_servers']): self.client = ClientSession(network=self, server=server) connection_string = '{}:{}'.format(*server) try: await self.client.create_connection() await self.ensure_server_version() log.info("Successfully connected to SPV wallet server: %s", connection_string) self._on_connected_controller.add(True) delay = 0.0 await self.client.on_disconnected.first except (Exception, CancelledError): # pylint: disable=broad-except log.exception("Connecting to %s raised an exception:", connection_string) if not self.running: return elif self.client: await self.client.close() self.client.connection.cancel_pending_requests() await asyncio.sleep(delay) delay = min(delay + 1.0, 10.0) async def stop(self): self.running = False if self.is_connected: disconnected = self.client.on_disconnected.first await self.client.close() await disconnected @property def is_connected(self): return self.client is not None and not self.client.is_closing() def rpc(self, list_or_method, *args): if self.is_connected: return self.client.send_request(list_or_method, args) else: raise ConnectionError( "Attempting to send rpc request when connection is not available." ) def ensure_server_version(self, required='1.2'): return self.rpc('server.version', __version__, required) def broadcast(self, raw_transaction): return self.rpc('blockchain.transaction.broadcast', raw_transaction) def get_history(self, address): return self.rpc('blockchain.address.get_history', address) def get_transaction(self, tx_hash): return self.rpc('blockchain.transaction.get', tx_hash) def get_merkle(self, tx_hash, height): return self.rpc('blockchain.transaction.get_merkle', tx_hash, height) def get_headers(self, height, count=10000): return self.rpc('blockchain.block.headers', height, count) def subscribe_headers(self): return self.rpc('blockchain.headers.subscribe', True) def subscribe_address(self, address): return self.rpc('blockchain.address.subscribe', address)
class BaseLedger(metaclass=LedgerRegistry): name: str symbol: str network_name: str database_class = BaseDatabase account_class = baseaccount.BaseAccount network_class = basenetwork.BaseNetwork transaction_class = basetransaction.BaseTransaction headers_class: Type[BaseHeaders] pubkey_address_prefix: bytes script_address_prefix: bytes extended_public_key_prefix: bytes extended_private_key_prefix: bytes default_fee_per_byte = 10 def __init__(self, config=None): self.config = config or {} self.db: BaseDatabase = self.config.get('db') or self.database_class( os.path.join(self.path, "blockchain.db")) self.headers: BaseHeaders = self.config.get( 'headers') or self.headers_class(os.path.join( self.path, "headers")) self.network = self.config.get('network') or self.network_class(self) self.network.on_header.listen(self.receive_header) self.network.on_status.listen(self.receive_status) self.accounts = [] self.fee_per_byte: int = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen(lambda e: log.info( '(%s) on_transaction: address=%s, height=%s, is_verified=%s, tx.id=%s', self.get_id(), e.address, e.height, e.is_verified, e.tx.id)) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self.on_header.listen(lambda change: log.info( '%s: added %s header blocks, final height %s', self.get_id(), change, self.headers.height)) self._transaction_processing_locks = {} self._utxo_reservation_lock = defer.DeferredLock() self._header_processing_lock = defer.DeferredLock() @classmethod def get_id(cls): return '{}_{}'.format(cls.symbol.lower(), cls.network_name.lower()) @classmethod def hash160_to_address(cls, h160): raw_address = cls.pubkey_address_prefix + h160 return Base58.encode( bytearray(raw_address + double_sha256(raw_address)[0:4])) @staticmethod def address_to_hash160(address): return Base58.decode(address)[1:21] @classmethod def public_key_to_address(cls, public_key): return cls.hash160_to_address(hash160(public_key)) @staticmethod def private_key_to_wif(private_key): return b'\x1c' + private_key + b'\x01' @property def path(self): return os.path.join(self.config['data_path'], self.get_id()) @defer.inlineCallbacks def add_account(self, account: baseaccount.BaseAccount) -> defer.Deferred: self.accounts.append(account) if self.network.is_connected: yield self.update_account(account) @defer.inlineCallbacks def get_transaction(self, txhash): raw, _, _ = yield self.db.get_transaction(txhash) if raw is not None: defer.returnValue(self.transaction_class(raw)) @defer.inlineCallbacks def get_private_key_for_address(self, address): match = yield self.db.get_address(address) if match: for account in self.accounts: if match['account'] == account.public_key.address: defer.returnValue( account.get_private_key(match['chain'], match['position'])) @defer.inlineCallbacks def get_effective_amount_estimators( self, funding_accounts: Iterable[baseaccount.BaseAccount]): estimators = [] for account in funding_accounts: utxos = yield account.get_unspent_outputs() for utxo in utxos: estimators.append(utxo.get_estimator(self)) defer.returnValue(estimators) @defer.inlineCallbacks def get_spendable_utxos(self, amount: int, funding_accounts): yield self._utxo_reservation_lock.acquire() try: txos = yield self.get_effective_amount_estimators(funding_accounts) selector = CoinSelector( txos, amount, self.transaction_class.output_class.pay_pubkey_hash( COIN, NULL_HASH32).get_fee(self)) spendables = selector.select() if spendables: yield self.reserve_outputs(s.txo for s in spendables) except Exception: log.exception('Failed to get spendable utxos:') raise finally: self._utxo_reservation_lock.release() defer.returnValue(spendables) def reserve_outputs(self, txos): return self.db.reserve_outputs(txos) def release_outputs(self, txos): return self.db.release_outputs(txos) @defer.inlineCallbacks def get_local_status(self, address): address_details = yield self.db.get_address(address) history = address_details['history'] or '' h = sha256(history.encode()) defer.returnValue(hexlify(h)) @defer.inlineCallbacks def get_local_history(self, address): address_details = yield self.db.get_address(address) history = address_details['history'] or '' parts = history.split(':')[:-1] defer.returnValue(list(zip(parts[0::2], map(int, parts[1::2])))) @staticmethod def get_root_of_merkle_tree(branches, branch_positions, working_branch): for i, branch in enumerate(branches): other_branch = unhexlify(branch)[::-1] other_branch_on_left = bool((branch_positions >> i) & 1) if other_branch_on_left: combined = other_branch + working_branch else: combined = working_branch + other_branch working_branch = double_sha256(combined) return hexlify(working_branch[::-1]) @defer.inlineCallbacks def is_valid_transaction(self, tx, height): height <= len(self.headers) or defer.returnValue(False) merkle = yield self.network.get_merkle(tx.id, height) merkle_root = self.get_root_of_merkle_tree(merkle['merkle'], merkle['pos'], tx.hash) header = self.headers[height] defer.returnValue(merkle_root == header['merkle_root']) @defer.inlineCallbacks def start(self): if not os.path.exists(self.path): os.mkdir(self.path) yield defer.gatherResults([self.db.open(), self.headers.open()]) first_connection = self.network.on_connected.first self.network.start() yield first_connection yield self.update_headers() yield self.network.subscribe_headers() yield self.update_accounts() @defer.inlineCallbacks def stop(self): yield self.network.stop() yield self.db.close() yield self.headers.close() @defer.inlineCallbacks def update_headers(self, height=None, headers=None, subscription_update=False): rewound = 0 while True: if height is None or height > len(self.headers): # sometimes header subscription updates are for a header in the future # which can't be connected, so we do a normal header sync instead height = len(self.headers) headers = None subscription_update = False if not headers: header_response = yield self.network.get_headers(height, 2001) headers = header_response['hex'] if not headers: # Nothing to do, network thinks we're already at the latest height. return added = yield self.headers.connect(height, unhexlify(headers)) if added > 0: height += added self._on_header_controller.add( BlockHeightEvent(self.headers.height, added)) if rewound > 0: # we started rewinding blocks and apparently found # a new chain rewound = 0 yield self.db.rewind_blockchain(height) if subscription_update: # subscription updates are for latest header already # so we don't need to check if there are newer / more # on another loop of update_headers(), just return instead return elif added == 0: # we had headers to connect but none got connected, probably a reorganization height -= 1 rewound += 1 log.warning( "Blockchain Reorganization: attempting rewind to height %s from starting height %s", height, height + rewound) else: raise IndexError( "headers.connect() returned negative number ({})".format( added)) if height < 0: raise IndexError( "Blockchain reorganization rewound all the way back to genesis hash. " "Something is very wrong. Maybe you are on the wrong blockchain?" ) if rewound >= 100: raise IndexError( "Blockchain reorganization dropped {} headers. This is highly unusual. " "Will not continue to attempt reorganizing. Please, delete the ledger " "synchronization directory inside your wallet directory (folder: '{}') and " "restart the program to synchronize from scratch.".format( rewound, self.get_id())) headers = None # ready to download some more headers # if we made it this far and this was a subscription_update # it means something went wrong and now we're doing a more # robust sync, turn off subscription update shortcut subscription_update = False @defer.inlineCallbacks def receive_header(self, response): yield self._header_processing_lock.acquire() try: header = response[0] yield self.update_headers(height=header['height'], headers=header['hex'], subscription_update=True) finally: self._header_processing_lock.release() def update_accounts(self): return defer.DeferredList( [self.update_account(a) for a in self.accounts]) @defer.inlineCallbacks def update_account( self, account): # type: (baseaccount.BaseAccount) -> defer.Defferred # Before subscribing, download history for any addresses that don't have any, # this avoids situation where we're getting status updates to addresses we know # need to update anyways. Continue to get history and create more addresses until # all missing addresses are created and history for them is fully restored. yield account.ensure_address_gap() addresses = yield account.get_addresses(max_used_times=0) while addresses: yield defer.DeferredList( [self.update_history(a) for a in addresses]) addresses = yield account.ensure_address_gap() # By this point all of the addresses should be restored and we # can now subscribe all of them to receive updates. all_addresses = yield account.get_addresses() yield defer.DeferredList( list(map(self.subscribe_history, all_addresses))) @defer.inlineCallbacks def update_history(self, address): remote_history = yield self.network.get_history(address) local_history = yield self.get_local_history(address) synced_history = [] for i, (hex_id, remote_height) in enumerate( map(itemgetter('tx_hash', 'height'), remote_history)): synced_history.append((hex_id, remote_height)) if i < len(local_history) and local_history[i] == (hex_id, remote_height): continue lock = self._transaction_processing_locks.setdefault( hex_id, defer.DeferredLock()) yield lock.acquire() try: # see if we have a local copy of transaction, otherwise fetch it from server raw, _, is_verified = yield self.db.get_transaction(hex_id) save_tx = None if raw is None: _raw = yield self.network.get_transaction(hex_id) tx = self.transaction_class(unhexlify(_raw)) save_tx = 'insert' else: tx = self.transaction_class(raw) if remote_height > 0 and not is_verified: is_verified = yield self.is_valid_transaction( tx, remote_height) is_verified = 1 if is_verified else 0 if save_tx is None: save_tx = 'update' yield self.db.save_transaction_io( save_tx, tx, remote_height, is_verified, address, self.address_to_hash160(address), ''.join('{}:{}:'.format(tx_id, tx_height) for tx_id, tx_height in synced_history)) log.debug( "%s: sync'ed tx %s for address: %s, height: %s, verified: %s", self.get_id(), hex_id, address, remote_height, is_verified) self._on_transaction_controller.add( TransactionEvent(address, tx, remote_height, is_verified)) except Exception: log.exception('Failed to synchronize transaction:') raise finally: lock.release() if not lock.locked and hex_id in self._transaction_processing_locks: del self._transaction_processing_locks[hex_id] @defer.inlineCallbacks def subscribe_history(self, address): remote_status = yield self.network.subscribe_address(address) local_status = yield self.get_local_status(address) if local_status != remote_status: yield self.update_history(address) @defer.inlineCallbacks def receive_status(self, response): address, remote_status = response local_status = yield self.get_local_status(address) if local_status != remote_status: yield self.update_history(address) def broadcast(self, tx): return self.network.broadcast(hexlify(tx.raw).decode())
class BaseLedger(metaclass=LedgerRegistry): name: str symbol: str network_name: str database_class = BaseDatabase account_class = baseaccount.BaseAccount network_class = basenetwork.BaseNetwork transaction_class = basetransaction.BaseTransaction headers_class: Type[BaseHeaders] pubkey_address_prefix: bytes script_address_prefix: bytes extended_public_key_prefix: bytes extended_private_key_prefix: bytes default_fee_per_byte = 10 def __init__(self, config=None): self.config = config or {} self.db: BaseDatabase = self.config.get('db') or self.database_class( os.path.join(self.path, "blockchain.db")) self.db.ledger = self self.headers: BaseHeaders = self.config.get( 'headers') or self.headers_class(os.path.join( self.path, "headers")) self.network = self.config.get('network') or self.network_class(self) self.network.on_header.listen(self.receive_header) self.network.on_status.listen(self.process_status_update) self.accounts = [] self.fee_per_byte: int = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen(lambda e: log.info( '(%s) on_transaction: address=%s, height=%s, is_verified=%s, tx.id=%s', self.get_id(), e.address, e.tx.height, e.tx.is_verified, e.tx.id)) self._on_address_controller = StreamController() self.on_address = self._on_address_controller.stream self.on_address.listen(lambda e: log.info('(%s) on_address: %s', self.get_id(), e.addresses)) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self.on_header.listen(lambda change: log.info( '%s: added %s header blocks, final height %s', self.get_id(), change, self.headers.height)) self._tx_cache = pylru.lrucache(100000) self._update_tasks = TaskGroup() self._utxo_reservation_lock = asyncio.Lock() self._header_processing_lock = asyncio.Lock() self._address_update_locks: Dict[str, asyncio.Lock] = {} self.coin_selection_strategy = None self._known_addresses_out_of_sync = set() @classmethod def get_id(cls): return '{}_{}'.format(cls.symbol.lower(), cls.network_name.lower()) @classmethod def hash160_to_address(cls, h160): raw_address = cls.pubkey_address_prefix + h160 return Base58.encode( bytearray(raw_address + double_sha256(raw_address)[0:4])) @staticmethod def address_to_hash160(address): return Base58.decode(address)[1:21] @classmethod def is_valid_address(cls, address): decoded = Base58.decode_check(address) return decoded[0] == cls.pubkey_address_prefix[0] @classmethod def public_key_to_address(cls, public_key): return cls.hash160_to_address(hash160(public_key)) @staticmethod def private_key_to_wif(private_key): return b'\x1c' + private_key + b'\x01' @property def path(self): return os.path.join(self.config['data_path'], self.get_id()) def add_account(self, account: baseaccount.BaseAccount): self.accounts.append(account) async def _get_account_and_address_info_for_address(self, address): match = await self.db.get_address(address=address) if match: for account in self.accounts: if match['account'] == account.public_key.address: return account, match async def get_private_key_for_address(self, address) -> Optional[PrivateKey]: match = await self._get_account_and_address_info_for_address(address) if match: account, address_info = match return account.get_private_key(address_info['chain'], address_info['position']) return None async def get_public_key_for_address(self, address) -> Optional[PubKey]: match = await self._get_account_and_address_info_for_address(address) if match: account, address_info = match return account.get_public_key(address_info['chain'], address_info['position']) return None async def get_account_for_address(self, address): match = await self._get_account_and_address_info_for_address(address) if match: return match[0] async def get_effective_amount_estimators( self, funding_accounts: Iterable[baseaccount.BaseAccount]): estimators = [] for account in funding_accounts: utxos = await account.get_utxos() for utxo in utxos: estimators.append(utxo.get_estimator(self)) return estimators async def get_addresses(self, **constraints): self.constraint_account_or_all(constraints) addresses = await self.db.get_addresses(**constraints) for address in addresses: public_key = await self.get_public_key_for_address( address['address']) address['public_key'] = public_key.extended_key_string() return addresses def get_address_count(self, **constraints): self.constraint_account_or_all(constraints) return self.db.get_address_count(**constraints) async def get_spendable_utxos(self, amount: int, funding_accounts): async with self._utxo_reservation_lock: txos = await self.get_effective_amount_estimators(funding_accounts) fee = self.transaction_class.output_class.pay_pubkey_hash( COIN, NULL_HASH32).get_fee(self) selector = CoinSelector(amount, fee) spendables = selector.select(txos, self.coin_selection_strategy) if spendables: await self.reserve_outputs(s.txo for s in spendables) return spendables def reserve_outputs(self, txos): return self.db.reserve_outputs(txos) def release_outputs(self, txos): return self.db.release_outputs(txos) def release_tx(self, tx): return self.release_outputs([txi.txo_ref.txo for txi in tx.inputs]) def constraint_account_or_all(self, constraints): if 'accounts' in constraints: return account = constraints.pop('account', None) if account: constraints['accounts'] = [account] else: constraints['accounts'] = self.accounts def get_utxos(self, **constraints): self.constraint_account_or_all(constraints) return self.db.get_utxos(**constraints) def get_utxo_count(self, **constraints): self.constraint_account_or_all(constraints) return self.db.get_utxo_count(**constraints) def get_transactions(self, **constraints): self.constraint_account_or_all(constraints) return self.db.get_transactions(**constraints) def get_transaction_count(self, **constraints): self.constraint_account_or_all(constraints) return self.db.get_transaction_count(**constraints) async def get_local_status_and_history(self, address, history=None): if not history: address_details = await self.db.get_address(address=address) history = address_details['history'] or '' parts = history.split(':')[:-1] return (hexlify(sha256(history.encode())).decode() if history else None, list(zip(parts[0::2], map(int, parts[1::2])))) @staticmethod def get_root_of_merkle_tree(branches, branch_positions, working_branch): for i, branch in enumerate(branches): other_branch = unhexlify(branch)[::-1] other_branch_on_left = bool((branch_positions >> i) & 1) if other_branch_on_left: combined = other_branch + working_branch else: combined = working_branch + other_branch working_branch = double_sha256(combined) return hexlify(working_branch[::-1]) async def start(self): if not os.path.exists(self.path): os.mkdir(self.path) await asyncio.wait([self.db.open(), self.headers.open()]) first_connection = self.network.on_connected.first asyncio.ensure_future(self.network.start()) await first_connection await self.join_network() self.network.on_connected.listen(self.join_network) async def join_network(self, *_): log.info("Subscribing and updating accounts.") async with self._header_processing_lock: await self.update_headers() await self.subscribe_accounts() await self._update_tasks.done.wait() async def stop(self): self._update_tasks.cancel() await self._update_tasks.done.wait() await self.network.stop() await self.db.close() await self.headers.close() async def update_headers(self, height=None, headers=None, subscription_update=False): rewound = 0 while True: if height is None or height > len(self.headers): # sometimes header subscription updates are for a header in the future # which can't be connected, so we do a normal header sync instead height = len(self.headers) headers = None subscription_update = False if not headers: header_response = await self.network.retriable_call( self.network.get_headers, height, 2001) headers = header_response['hex'] if not headers: # Nothing to do, network thinks we're already at the latest height. return added = await self.headers.connect(height, unhexlify(headers)) if added > 0: height += added self._on_header_controller.add( BlockHeightEvent(self.headers.height, added)) if rewound > 0: # we started rewinding blocks and apparently found # a new chain rewound = 0 await self.db.rewind_blockchain(height) if subscription_update: # subscription updates are for latest header already # so we don't need to check if there are newer / more # on another loop of update_headers(), just return instead return elif added == 0: # we had headers to connect but none got connected, probably a reorganization height -= 1 rewound += 1 log.warning( "Blockchain Reorganization: attempting rewind to height %s from starting height %s", height, height + rewound) else: raise IndexError( "headers.connect() returned negative number ({})".format( added)) if height < 0: raise IndexError( "Blockchain reorganization rewound all the way back to genesis hash. " "Something is very wrong. Maybe you are on the wrong blockchain?" ) if rewound >= 100: raise IndexError( "Blockchain reorganization dropped {} headers. This is highly unusual. " "Will not continue to attempt reorganizing. Please, delete the ledger " "synchronization directory inside your wallet directory (folder: '{}') and " "restart the program to synchronize from scratch.".format( rewound, self.get_id())) headers = None # ready to download some more headers # if we made it this far and this was a subscription_update # it means something went wrong and now we're doing a more # robust sync, turn off subscription update shortcut subscription_update = False async def receive_header(self, response): async with self._header_processing_lock: header = response[0] await self.update_headers(height=header['height'], headers=header['hex'], subscription_update=True) async def subscribe_accounts(self): if self.network.is_connected and self.accounts: await asyncio.wait( [self.subscribe_account(a) for a in self.accounts]) async def subscribe_account(self, account: baseaccount.BaseAccount): for address_manager in account.address_managers.values(): await self.subscribe_addresses( address_manager, await address_manager.get_addresses()) await account.ensure_address_gap() async def announce_addresses(self, address_manager: baseaccount.AddressManager, addresses: List[str]): await self.subscribe_addresses(address_manager, addresses) await self._on_address_controller.add( AddressesGeneratedEvent(address_manager, addresses)) async def subscribe_addresses(self, address_manager: baseaccount.AddressManager, addresses: List[str]): if self.network.is_connected and addresses: await asyncio.wait([ self.subscribe_address(address_manager, address) for address in addresses ]) async def subscribe_address(self, address_manager: baseaccount.AddressManager, address: str): remote_status = await self.network.subscribe_address(address) self._update_tasks.add( self.update_history(address, remote_status, address_manager)) def process_status_update(self, update): address, remote_status = update self._update_tasks.add(self.update_history(address, remote_status)) async def update_history( self, address, remote_status, address_manager: baseaccount.AddressManager = None): async with self._address_update_locks.setdefault( address, asyncio.Lock()): self._known_addresses_out_of_sync.discard(address) local_status, local_history = await self.get_local_status_and_history( address) if local_status == remote_status: return True remote_history = await self.network.retriable_call( self.network.get_history, address) remote_history = list( map(itemgetter('tx_hash', 'height'), remote_history)) we_need = set(remote_history) - set(local_history) if not we_need: return True cache_tasks: List[asyncio.Future[BaseTransaction]] = [] synced_history = StringIO() for i, (txid, remote_height) in enumerate(remote_history): if i < len(local_history) and local_history[i] == ( txid, remote_height) and not cache_tasks: synced_history.write(f'{txid}:{remote_height}:') else: check_local = (txid, remote_height) not in we_need cache_tasks.append( asyncio.ensure_future( self.cache_transaction(txid, remote_height, check_local=check_local))) synced_txs = [] for task in cache_tasks: tx = await task check_db_for_txos = [] for txi in tx.inputs: if txi.txo_ref.txo is not None: continue cache_item = self._tx_cache.get(txi.txo_ref.tx_ref.id) if cache_item is not None: if cache_item.tx is None: await cache_item.has_tx.wait() assert cache_item.tx is not None txi.txo_ref = cache_item.tx.outputs[ txi.txo_ref.position].ref else: check_db_for_txos.append(txi.txo_ref.id) referenced_txos = {} if not check_db_for_txos else { txo.id: txo for txo in await self.db.get_txos( txoid__in=check_db_for_txos, no_tx=True) } for txi in tx.inputs: if txi.txo_ref.txo is not None: continue referenced_txo = referenced_txos.get(txi.txo_ref.id) if referenced_txo is not None: txi.txo_ref = referenced_txo.ref synced_history.write(f'{tx.id}:{tx.height}:') synced_txs.append(tx) await self.db.save_transaction_io_batch( synced_txs, address, self.address_to_hash160(address), synced_history.getvalue()) await asyncio.wait([ self._on_transaction_controller.add( TransactionEvent(address, tx)) for tx in synced_txs ]) if address_manager is None: address_manager = await self.get_address_manager_for_address( address) if address_manager is not None: await address_manager.ensure_address_gap() local_status, local_history = \ await self.get_local_status_and_history(address, synced_history.getvalue()) if local_status != remote_status: if local_history == remote_history: return True log.warning( "Wallet is out of sync after syncing. Remote: %s with %d items, local: %s with %d items", remote_status, len(remote_history), local_status, len(local_history)) log.warning("local: %s", local_history) log.warning("remote: %s", remote_history) self._known_addresses_out_of_sync.add(address) return False else: return True async def cache_transaction(self, txid, remote_height, check_local=True): cache_item = self._tx_cache.get(txid) if cache_item is None: cache_item = self._tx_cache[txid] = TransactionCacheItem() elif cache_item.tx is not None and \ cache_item.tx.height >= remote_height and \ (cache_item.tx.is_verified or remote_height < 1): return cache_item.tx # cached tx is already up-to-date async with cache_item.lock: tx = cache_item.tx if tx is None and check_local: # check local db tx = cache_item.tx = await self.db.get_transaction(txid=txid) if tx is None: # fetch from network _raw = await self.network.retriable_call( self.network.get_transaction, txid, remote_height) if _raw: tx = self.transaction_class(unhexlify(_raw)) cache_item.tx = tx # make sure it's saved before caching it if tx is None: raise ValueError( f'Transaction {txid} was not in database and not on network.' ) await self.maybe_verify_transaction(tx, remote_height) return tx async def maybe_verify_transaction(self, tx, remote_height): tx.height = remote_height if 0 < remote_height < len(self.headers): merkle = await self.network.retriable_call(self.network.get_merkle, tx.id, remote_height) merkle_root = self.get_root_of_merkle_tree(merkle['merkle'], merkle['pos'], tx.hash) header = self.headers[remote_height] tx.position = merkle['pos'] tx.is_verified = merkle_root == header['merkle_root'] async def get_address_manager_for_address( self, address) -> Optional[baseaccount.AddressManager]: details = await self.db.get_address(address=address) for account in self.accounts: if account.id == details['account']: return account.address_managers[details['chain']] return None def broadcast(self, tx): # broadcast cant be a retriable call yet return self.network.broadcast(hexlify(tx.raw).decode()) async def wait(self, tx: basetransaction.BaseTransaction, height=-1, timeout=None): addresses = set() for txi in tx.inputs: if txi.txo_ref.txo is not None: addresses.add( self.hash160_to_address( txi.txo_ref.txo.script.values['pubkey_hash'])) for txo in tx.outputs: addresses.add( self.hash160_to_address(txo.script.values['pubkey_hash'])) records = await self.db.get_addresses(cols=('address', ), address__in=addresses) _, pending = await asyncio.wait([ self.on_transaction.where( partial( lambda a, e: a == e.address and e.tx.height >= height and e .tx.id == tx.id, address_record['address'])) for address_record in records ], timeout=timeout) if pending: raise asyncio.TimeoutError('Timed out waiting for transaction.')
class BaseLedger(six.with_metaclass(LedgerRegistry)): name = None symbol = None network_name = None account_class = baseaccount.BaseAccount database_class = basedatabase.BaseDatabase headers_class = baseheader.BaseHeaders network_class = basenetwork.BaseNetwork transaction_class = basetransaction.BaseTransaction secret_prefix = None pubkey_address_prefix = None script_address_prefix = None extended_public_key_prefix = None extended_private_key_prefix = None default_fee_per_byte = 10 def __init__(self, config=None, db=None, network=None, headers_class=None): self.config = config or {} self.db = db or self.database_class( os.path.join(self.path, "blockchain.db") ) # type: basedatabase.BaseDatabase self.network = network or self.network_class(self) self.network.on_header.listen(self.process_header) self.network.on_status.listen(self.process_status) self.accounts = set() self.headers = (headers_class or self.headers_class)(self) self.fee_per_byte = self.config.get('fee_per_byte', self.default_fee_per_byte) self._on_transaction_controller = StreamController() self.on_transaction = self._on_transaction_controller.stream self.on_transaction.listen( lambda e: log.info('({}) on_transaction: address={}, height={}, is_verified={}, tx.id={}'.format( self.get_id(), e.address, e.height, e.is_verified, e.tx.hex_id) ) ) self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream self._transaction_processing_locks = {} @classmethod def get_id(cls): return '{}_{}'.format(cls.symbol.lower(), cls.network_name.lower()) def hash160_to_address(self, h160): raw_address = self.pubkey_address_prefix + h160 return Base58.encode(bytearray(raw_address + double_sha256(raw_address)[0:4])) @staticmethod def address_to_hash160(address): bytes = Base58.decode(address) prefix, pubkey_bytes, addr_checksum = bytes[0], bytes[1:21], bytes[21:] return pubkey_bytes def public_key_to_address(self, public_key): return self.hash160_to_address(hash160(public_key)) @staticmethod def private_key_to_wif(private_key): return b'\x1c' + private_key + b'\x01' @property def path(self): return os.path.join(self.config['wallet_path'], self.get_id()) def get_input_output_fee(self, io): """ Fee based on size of the input / output. """ return self.fee_per_byte * io.size def get_transaction_base_fee(self, tx): """ Fee for the transaction header and all outputs; without inputs. """ return self.fee_per_byte * tx.base_size @defer.inlineCallbacks def add_account(self, account): # type: (baseaccount.BaseAccount) -> None self.accounts.add(account) if self.network.is_connected: yield self.update_account(account) @defer.inlineCallbacks def get_private_key_for_address(self, address): match = yield self.db.get_address(address) if match: for account in self.accounts: if bytes(match['account']) == account.public_key.address: defer.returnValue(account.get_private_key(match['chain'], match['position'])) def get_unspent_outputs(self, account): return self.db.get_utxos(account, self.transaction_class.output_class) @defer.inlineCallbacks def get_effective_amount_estimators(self, funding_accounts): # type: (Iterable[baseaccount.BaseAccount]) -> defer.Deferred estimators = [] for account in funding_accounts: utxos = yield self.get_unspent_outputs(account) for utxo in utxos: estimators.append(utxo.get_estimator(self)) defer.returnValue(estimators) @defer.inlineCallbacks def get_local_status(self, address): address_details = yield self.db.get_address(address) history = address_details['history'] or '' hash = hashlib.sha256(history.encode()).digest() defer.returnValue(hexlify(hash)) @defer.inlineCallbacks def get_local_history(self, address): address_details = yield self.db.get_address(address) history = address_details['history'] or '' parts = history.split(':')[:-1] defer.returnValue(list(zip(parts[0::2], map(int, parts[1::2])))) @staticmethod def get_root_of_merkle_tree(branches, branch_positions, working_branch): for i, branch in enumerate(branches): other_branch = unhexlify(branch)[::-1] other_branch_on_left = bool((branch_positions >> i) & 1) if other_branch_on_left: combined = other_branch + working_branch else: combined = working_branch + other_branch working_branch = double_sha256(combined) return hexlify(working_branch[::-1]) @defer.inlineCallbacks def is_valid_transaction(self, tx, height): height <= len(self.headers) or defer.returnValue(False) merkle = yield self.network.get_merkle(tx.hex_id.decode(), height) merkle_root = self.get_root_of_merkle_tree(merkle['merkle'], merkle['pos'], tx.hash) header = self.headers[height] defer.returnValue(merkle_root == header['merkle_root']) @defer.inlineCallbacks def start(self): if not os.path.exists(self.path): os.mkdir(self.path) yield self.db.start() first_connection = self.network.on_connected.first self.network.start() yield first_connection self.headers.touch() yield self.update_headers() yield self.network.subscribe_headers() yield self.update_accounts() @defer.inlineCallbacks def stop(self): yield self.network.stop() yield self.db.stop() @execute_serially @defer.inlineCallbacks def update_headers(self): while True: height_sought = len(self.headers) headers = yield self.network.get_headers(height_sought) if headers['count'] <= 0: break yield self.headers.connect(height_sought, unhexlify(headers['hex'])) self._on_header_controller.add(height_sought) @defer.inlineCallbacks def process_header(self, response): header = response[0] if self.update_headers.is_running: return if header['height'] == len(self.headers): # New header from network directly connects after the last local header. yield self.headers.connect(len(self.headers), unhexlify(header['hex'])) self._on_header_controller.add(len(self.headers)) elif header['height'] > len(self.headers): # New header is several heights ahead of local, do download instead. yield self.update_headers() @execute_serially def update_accounts(self): return defer.DeferredList([ self.update_account(a) for a in self.accounts ]) @defer.inlineCallbacks def update_account(self, account): # type: (baseaccount.BaseAccount) -> defer.Defferred # Before subscribing, download history for any addresses that don't have any, # this avoids situation where we're getting status updates to addresses we know # need to update anyways. Continue to get history and create more addresses until # all missing addresses are created and history for them is fully restored. yield account.ensure_address_gap() addresses = yield account.get_unused_addresses() while addresses: yield defer.DeferredList([ self.update_history(a) for a in addresses ]) addresses = yield account.ensure_address_gap() # By this point all of the addresses should be restored and we # can now subscribe all of them to receive updates. all_addresses = yield account.get_addresses() yield defer.DeferredList( list(map(self.subscribe_history, all_addresses)) ) @defer.inlineCallbacks def update_history(self, address): remote_history = yield self.network.get_history(address) local_history = yield self.get_local_history(address) synced_history = [] for i, (hex_id, remote_height) in enumerate(map(itemgetter('tx_hash', 'height'), remote_history)): synced_history.append((hex_id, remote_height)) if i < len(local_history) and local_history[i] == (hex_id.decode(), remote_height): continue lock = self._transaction_processing_locks.setdefault(hex_id, defer.DeferredLock()) yield lock.acquire() try: # see if we have a local copy of transaction, otherwise fetch it from server raw, local_height, is_verified = yield self.db.get_transaction(unhexlify(hex_id)[::-1]) save_tx = None if raw is None: _raw = yield self.network.get_transaction(hex_id) tx = self.transaction_class(unhexlify(_raw)) save_tx = 'insert' else: tx = self.transaction_class(raw) if remote_height > 0 and not is_verified: is_verified = yield self.is_valid_transaction(tx, remote_height) is_verified = 1 if is_verified else 0 if save_tx is None: save_tx = 'update' yield self.db.save_transaction_io( save_tx, tx, remote_height, is_verified, address, self.address_to_hash160(address), ''.join('{}:{}:'.format(tx_id.decode(), tx_height) for tx_id, tx_height in synced_history) ) self._on_transaction_controller.add(TransactionEvent(address, tx, remote_height, is_verified)) finally: lock.release() if not lock.locked: del self._transaction_processing_locks[hex_id] @defer.inlineCallbacks def subscribe_history(self, address): remote_status = yield self.network.subscribe_address(address) local_status = yield self.get_local_status(address) if local_status != remote_status: yield self.update_history(address) @defer.inlineCallbacks def process_status(self, response): address, remote_status = response local_status = yield self.get_local_status(address) if local_status != remote_status: yield self.update_history(address) def broadcast(self, tx): return self.network.broadcast(hexlify(tx.raw))