def _serialize_cache(self, cache): """ Serializes a cache into a dict capable of being used as an arg to json.dumps(). """ newd = {} for k, v in cache.items(): if isinstance(v, dict): newd[k] = self._serialize_cache(v) elif isinstance(v, WalletTransaction): newd[k] = v._serialize() elif isinstance(v, Transaction): newd[k] = WalletTransaction.from_transaction(v)._serialize() elif isinstance(v, TransactionInput) or isinstance(v, TransactionOutput): newd[k] = str(v) elif isinstance(v, Hash): newd[k] = str(v) elif isinstance(v, set): newd[k] = list(v) else: newd[k] = v return newd
def _sync_txns(self, max_index=0, check_all=False): now = time.time() if now - self._last_full_update > 20 * 60: check_all = True for change in [0, 1]: found_last = False current_last = self.last_indices[change] addr_range = 0 while not found_last: # Try a 2 * GAP_LIMIT at a go end = addr_range + self.DISCOVERY_INCREMENT addresses = {i: self.get_address(change, i) for i in range(addr_range, end)} if self.data_provider.can_limit_by_height: min_block = None if check_all else self._cache_manager.last_block txns = self.data_provider.get_transactions( list(addresses.values()), limit=10000, min_block=min_block) else: txns = self.data_provider.get_transactions( list(addresses.values()), limit=10000) inserted_txns = set() for i in sorted(addresses.keys()): addr = addresses[i] self._cache_manager.insert_address(self.index, change, i, addr) addr_has_txns = self._cache_manager.address_has_txns(addr) if not addr_has_txns or addr not in txns or \ not bool(txns[addr]): if i - current_last >= self.GAP_LIMIT: found_last = True break if txns[addr]: current_last = i for t in txns[addr]: txid = str(t['transaction'].hash) if txid not in inserted_txns: wt = WalletTransaction.from_transaction( t['transaction']) wt.block = t['metadata']['block'] wt.block_hash = t['metadata']['block_hash'] wt.confirmations = t['metadata']['confirmations'] if 'network_time' in t['metadata']: wt.network_time = t['metadata']['network_time'] self._cache_manager.insert_txn(wt) inserted_txns.add(txid) if addr_has_txns: current_last = i addr_range += self.DISCOVERY_INCREMENT self.last_indices[change] = current_last self._last_update = time.time() if check_all: self._last_full_update = self._last_update
def _sync_txns(self, max_index=0, check_all=False): now = time.time() if now - self._last_full_update > 20 * 60: check_all = True for change in [0, 1]: found_last = False current_last = self.last_indices[change] addr_range = 0 while not found_last: # Try a 2 * GAP_LIMIT at a go end = addr_range + self.DISCOVERY_INCREMENT addresses = {i:self.get_address(change, i) for i in range(addr_range, end)} if self.data_provider.can_limit_by_height: min_block = None if check_all else self._cache_manager.last_block txns = self.data_provider.get_transactions( list(addresses.values()), limit=10000, min_block=min_block) else: txns = self.data_provider.get_transactions( list(addresses.values()), limit=10000) inserted_txns = set() for i in sorted(addresses.keys()): addr = addresses[i] self._cache_manager.insert_address(self.index, change, i, addr) addr_has_txns = self._cache_manager.address_has_txns(addr) if not addr_has_txns or addr not in txns or \ not bool(txns[addr]): if i - current_last >= self.GAP_LIMIT: found_last = True break if txns[addr]: current_last = i for t in txns[addr]: txid = str(t['transaction'].hash) if txid not in inserted_txns: wt = WalletTransaction.from_transaction( t['transaction']) wt.block = t['metadata']['block'] wt.block_hash = t['metadata']['block_hash'] wt.confirmations = t['metadata']['confirmations'] if 'network_time' in t['metadata']: wt.network_time = t['metadata']['network_time'] self._cache_manager.insert_txn(wt) inserted_txns.add(txid) if addr_has_txns: current_last = i addr_range += self.DISCOVERY_INCREMENT self.last_indices[change] = current_last self._last_update = time.time() if check_all: self._last_full_update = self._last_update