def detect_value(self, key_manager): try: privkey = key_manager.priv_by_address(self.address) nonce = self.apc decrypted_message = decrypt(privkey, nonce, self.encrypted_message) raw_blinding_key, self.value = struct.unpack( "> 32s Q", decrypted_message) self.blinding_key=PrivateKey(raw_blinding_key, raw=True) if not self._calc_pedersen_wos()==self.unpc: self.blinding_key, self.value = None,None raise Exception("Incorrect blinding key and value") except Exception as e: #TODO definetely some logic should be added here to notify about missed info pass return bool(self.value)
def fill(self, address, value, relay_fee = 0, blinding_key=None, generator=default_generator_ser, coinbase=False, lock_height = 0): """ Fill basic params of ouput. Fill params, but doesn't generate commitments and rangeproofs. It should be done separately by generate() function Parameters ---------- address : Address Address of output value : int Value of output in minimal indivisible units [optional] relay_fee : int Default:0. Relay fee of output in minimal indivisible units [optional] blinding_key : PrivateKey Default: random. Usually wallet should not store this key: it will be encrypted it the output with private key of address [optional] generator : GeneratorOnCurve Default: default_generator. Asset transactions will use another generators [optional] coinbase : bool Default: False. Coinbase outputs have version 0 (while common version is 1) [optional] lock_height : integer Default: 0. Minimal height at which output can be spent. """ self.address = address self.generator = generator self.value = value if blinding_key is None: blinding_key = PrivateKey() #we do not store this key in the wallet self.blinding_key = blinding_key self.relay_fee = relay_fee if coinbase: self.version=0 self.lock_height = lock_height
def mining_canary_hash_part(_bytes): m2 = sha256() m2.update(_bytes) p = PrivateKey(privkey=m2.digest(), raw=True) m3 = sha256() m3.update(p.pubkey.serialize()[1:]) return m3.digest()
def compose_block_transaction(self, rtx, combined_transaction=None): self.serialized = None if not self.coinbase: raise Exception("coinbase output is required") cb = self.coinbase dw = self.dev_reward self.__init__(txos_storage=self.txos_storage, excesses_storage=self.excesses_storage) #reset self self.outputs = [cb] if dw: self.outputs.append(dw) offset_pk = PrivateKey() self.mixer_offset = int.from_bytes(offset_pk.private_key, "big") aepk = cb.blinding_key + offset_pk if dw: aepk += dw.blinding_key self.additional_excesses = [ excess_from_private_key(aepk, b"\x01\x00" + cb.serialized_index[:33]) ] if combined_transaction: new_tx = self.merge(combined_transaction) self.inputs = new_tx.inputs self.outputs = new_tx.outputs self.additional_excesses = new_tx.additional_excesses self.updated_excesses = new_tx.updated_excesses self.sort_lists() self.verify(rtx=rtx)
def get_confirmed_balance_list(self, utxo_index, txos_storage, current_height): ret = {} with self.wallet.env.begin(write=False) as txn: cursor = txn.cursor(db=self.wallet.main_db) for ser_pub, priv in cursor.iternext(keys=True, values=True): address = address_from_private_key(PrivateKey(priv, raw=True)) taddress = address.to_text() for output_index in utxo_index.get_all_unspent_for_serialized_pubkey(ser_pub): try: lock_height, value, serialized_index = self.wallet.get_output(output_index, txn=txn) except KeyError: utxo = txos_storage.confirmed[output_index] lock_height = utxo.lock_height serialized_index = utxo.serialized_index if utxo.detect_value(self): value = utxo.value else: value = None self.wallet.put_output(output_index, (lock_height, value, serialized_index))#No txn here, write-access is required texted_index = base64.b64encode(serialized_index).decode() if not taddress in ret: ret[taddress]={} if value: ret[taddress][texted_index]=value else: ret[taddress][texted_index]='unknown' return ret
def detect_value(self, inputs_info): try: privkey = inputs_info['priv_by_pub'][ self.address.serialized_pubkey] nonce = self.apc decrypted_message = decrypt(privkey, nonce, self.encrypted_message) raw_blinding_key, self.value = struct.unpack( "> 32s Q", decrypted_message) self.blinding_key = PrivateKey(raw_blinding_key, raw=True) if not self._calc_pedersen_wos() == self.unpc: self.blinding_key, self.value = None, None raise Exception("Incorrect blinding key and value") except KeyError as e: raise e #Wrong inputs_info except Exception as e: #TODO definetely some logic should be added here to notify about missed info pass return bool(self.value)
def load_from_disc(self): lspriv = self.config['p2p']['lspriv'] s = Key(key=PrivateKey(lspriv.to_bytes(32, 'big'), raw=True)) our_node_params = { 'network': { 'host': self.config['p2p']['host'], 'port': self.config['p2p']['port'] }, 'static_full_key': s } self.our_node = Node(None, our_node_params, self.loop, None) #other = { 'network': {'IP':None, 'port':None}, 'lightning':{'id':None, 'static_key':Key().pubkey()}} self.nodes = {}
def get_confirmed_balance_list(self, utxo_index, txos_storage, current_height): ret = {} with self.wallet.env.begin(write=False) as txn: cursor = txn.cursor(db=self.wallet.main_db) for ser_pub, priv in cursor.iternext(keys=True, values=True): address = address_from_private_key(PrivateKey(priv, raw=True)) taddress = address.to_text() for output_index in utxo_index.get_all_unspent_for_serialized_pubkey( ser_pub): utxo = txos_storage.confirmed[output_index] texted_index = base64.b64encode( utxo.serialized_index).decode() if not taddress in ret: ret[taddress] = {} if utxo.detect_value(self): ret[taddress][texted_index] = utxo.value else: ret[taddress][texted_index] = 'unknown' return ret
def load_from_disc(self): lspriv = self.config['p2p']['lspriv'] s = Key(key=PrivateKey(lspriv.to_bytes(32, 'big'), raw=True)) our_node_params = { 'network': { 'host': self.config['p2p']['host'], 'port': self.config['p2p']['port'], 'advertised_host': self.config['p2p'].get('advertised_host', self.config['p2p']['host']), 'advertised_port': self.config['p2p'].get('advertised_port', self.config['p2p']['port']), }, 'static_full_key': s } self.our_node = Node(None, our_node_params, self.loop, None) logger.info("Our node public key %s" % s.pubkey()) self.nodes = {}
def encrypt(pubkey, nonce, plaintext): ''' Encrypt plaintext for some pubkey (owner of corresponding privkey can decrypt). params: pubkey: secp256k1_py PublicKey object nonce: 12 bytes nonce, should not be reused plaintext: bytes object with arbitrary length. Inner logic: 1) generate ephemereal (one time) private_key. 2) generate shared_secret : ecdh of private_key and receiver's pubkey 3) symmetrically encrypt plaintext with shared_secret (encryption with ChaCha20Poly1305) 4) attach ephemereal public_key to ciphertext ''' nonce = top_up_nonce(nonce) ephemeral_privkey = PrivateKey() shared_key = pubkey.ecdh(ephemeral_privkey.private_key) aead = ChaCha20Poly1305(shared_key, 'python') res = aead.seal(nonce, plaintext, b'') res = ephemeral_privkey.pubkey.serialize() + res return res
def new_address(self): prk=PrivateKey() self.wallet.add_privkey_to_pool(prk.pubkey.serialize(), prk.private_key) privkey = PrivateKey(self.wallet.get_privkey_from_pool(), raw=True) return address_from_private_key(privkey)
def blindly_generate(self, change_address, input_data, relay_fee_per_kb=0): self.serialized = None if self.coinbase: raise Exception( "generate() can be used only for common transaction, to create block transaction as miner use compose_block_transaction" ) if not len(input_data): raise Exception("Tx should have at least one input") if not len(self._destinations): raise Exception("Tx should have at least one destination") in_value = sum([ioput[1] for ioput in input_data]) out_value = sum([destination[1] for destination in self._destinations]) relay_fee = self.calc_relay_fee(relay_fee_per_kb=relay_fee_per_kb) # +1 for destination is for change address self.fee = relay_fee + self.calc_new_outputs_fee( len(input_data), len(self._destinations) + 1) remainder = in_value - out_value - self.fee if remainder < 0: raise Exception("Not enough money in inputs to cover outputs") # TODO We need logic here to cover too low remainders (less than new output fee) self._destinations.append((change_address, remainder, True)) privkey_sum = 0 out_blinding_key_sum = None need_proofs = [] excesses_key_sum = None for out_index in range(len(self._destinations) - 1): address, value, need_proof = self._destinations[out_index] output = IOput() output.fill(address, value, generator=default_generator_ser) self.outputs.append(output) out_blinding_key_sum = ( out_blinding_key_sum + output.blinding_key ) if out_blinding_key_sum else output.blinding_key if need_proof: need_proofs.append( (output, PrivateKey() )) #excesses will be generated after output generation offset_pk = PrivateKey() self.mixer_offset = int.from_bytes(offset_pk.private_key, "big") # privkey for the last one output isn't arbitrary address, value, need_proof = self._destinations[-1] if need_proof: need_proofs.append( (output, PrivateKey() )) #excesses will be generated after output generation in_blinding_key_sum = None burdens_to_be_covered = [] for i, v, priv_key, blinding_key, ser_apc in input_data: in_blinding_key_sum = ( in_blinding_key_sum + blinding_key) if in_blinding_key_sum else blinding_key in_blinding_key_sum += priv_key self.updated_excesses[i] = excess_from_private_key( priv_key, b"\x01\x00" + ser_apc) self.inputs.append(pseudoinput( i, ser_apc)) #For tx serialization and sorts if len(need_proofs): excesses_key_sum = need_proofs[0][1] for i in need_proofs[1:]: excesses_key_sum += i[1] output = IOput() last_blinding_key = in_blinding_key_sum - out_blinding_key_sum - offset_pk if excesses_key_sum: last_blinding_key += excesses_key_sum output.fill( address, value, blinding_key=last_blinding_key, relay_fee=relay_fee, generator=default_generator_ser ) #TODO relay fee should be distributed uniformly, privacy leak self.outputs.append(output) [output.generate() for output in self.outputs] for ae in need_proofs: script = generate_proof_script(ae[0]) e = excess_from_private_key(ae[1], script) self.additional_excesses.append(e) self.sort_lists()
def priv_by_pub(self, pubkey, r_txn): raw_priv = self.wallet.get_privkey(pubkey.serialize(), r_txn) if not raw_priv: raise KeyError("Private key not in the wallet") return PrivateKey(raw_priv, raw=True)
def priv_by_address(self, address, cursor): ser_pub = address.serialized_pubkey raw_priv = self._ser_priv_by_ser_pub(ser_pub, cursor) return PrivateKey(raw_priv, raw=True)
class IOput: """ Basic class which represent transaction input or output""" # Pedersen commitment should be unique, it is id of IOput def __init__(self, json_object=None, binary_object=None): """ Initialize empty ioput Parameters ---------- [optional] binary_object : bytes serialized output. """ #serializable data self.version = 1 self.block_version = 1 self.lock_height = 0 self.authorized_pedersen_commitment = None self.address = None self.rangeproof = None self.encrypted_message = None self.generator = None self.relay_fee = None #inner data self.unauthorized_pedersen_commitment = None self.value = None self.blinding_key = None if json_object: self.from_json(json_object) if binary_object: self.deserialize(binary_object) def from_json(self, json_object): pass #TODO def to_json(self): pass #TODO def serialize(self): """ Returns binary representation of output""" ret = self.signed_part() ser_range_proof = self.rangeproof.proof ret += struct.pack("> H", len(ser_range_proof)) ret += ser_range_proof return ret @property def hash(self): seed = self.serialize() m=hashlib.sha256() m.update(seed) return m.digest() @property def index_len(self): #len apc + len hash return 33+32 @property def serialized_index(self): """ Returns binary representation of output index. For transaction to be spent, it is necessary that all inputs are known. Thus it is possible to include only index for inputs. Since all outputs should have unique authorized pedersen commitments(APC), it seems expedient to use APC as index. However it may cause issues with transaction which spends from memory pool. Indeed, while confirmed transactions strictly have unique APC, different nodes may have transaction with different outputs but with the same APC for those outputs (_double spend_) in memory Then, if another transaction which spends _double spending_ will be added to memory pool, different nodes may have different opinions on its validity. It is possible to use such dis-consensus as an attack to disrupt connectivity. Another option is hash of outputs. However, hash as index hinders some basic mimblewimble checks: we can't check that sum of inputs and outputs is zero without access to ledger. In our terms, all checks become context dependent. Thus we use concantenation of APC and hash. It's slightly bigger (although nothing in comparisson with rangeproof) but allows both basic checks and unambiguous indexing. """ return self.authorized_pedersen_commitment.serialize()+self.hash @property def commitment_index(self): """ Special index which is used for building commitment merkle tree """ m=hashlib.sha256() m.update(self.authorized_pedersen_commitment.serialize()) return self.authorized_pedersen_commitment.serialize() + m.digest() @property def is_coinbase(self): return self.version==0 def deserialize(self, serialized_output): """ Decode output from serialized representation. """ self.deserialize_raw(serialized_output) def deserialize_raw(self, serialized_output): #TODO part1, part2, part3 should be substitued with construction `something, serialized = serialized[:x], serialized[x:]` # as it is done in other modules """ Decode output from serialized representation. Return residue of data after serialization""" if len(serialized_output)<145: raise Exception("Serialized output doesn't contain enough bytes for constant length parameters") part1, part2 = serialized_output[:80], serialized_output[80:] (self.version, self.block_version, self.lock_height, self.generator, self.relay_fee, self.apc) = struct.unpack("> H H H 33s Q 33s", part1) if self.generator in generators: self.authorized_pedersen_commitment = PedersenCommitment(commitment=self.apc, raw=True, blinded_generator = generators[self.generator]) else: raise NotImplemented self.address = Address() part2 = self.address.deserialize_raw(part2) if len(part2)<2: raise Exception("Serialized output doesn't contain enough bytes for encrypted message length") (encrypted_message_len,) = struct.unpack("> H", part2[:2]) if len(part2)<2+encrypted_message_len: raise Exception("Serialized output doesn't contain enough bytes for encrypted message") self.encrypted_message = part2[2:2+encrypted_message_len] part3=part2[2+encrypted_message_len:] (range_proof_len,) = struct.unpack("> H", part3[:2]) if len(part3)<2+range_proof_len: raise Exception("Serialized output doesn't contain enough bytes for rangeproof") self._calc_unauthorized_pedersen() self.rangeproof = RangeProof(proof=part3[2:2+range_proof_len], pedersen_commitment=self.unauthorized_pedersen_commitment, additional_data = self.signed_part()) info=self.info() if info['min_value']==info['max_value']: self.value=info['min_value'] return part3[2+range_proof_len:] def detect_value(self, key_manager): try: privkey = key_manager.priv_by_address(self.address) nonce = self.apc decrypted_message = decrypt(privkey, nonce, self.encrypted_message) raw_blinding_key, self.value = struct.unpack( "> 32s Q", decrypted_message) self.blinding_key=PrivateKey(raw_blinding_key, raw=True) if not self._calc_pedersen_wos()==self.unpc: self.blinding_key, self.value = None,None raise Exception("Incorrect blinding key and value") except Exception as e: #TODO definetely some logic should be added here to notify about missed info pass return bool(self.value) def signed_part(self): """ Flurbo output has more abundant structure than classical mw output. To be sure that all elements of structure were relayed unchanged, all elements are signed in rangeproof. This function returns serialized representation elements which should be signed. """ ret = b'' ret += struct.pack("> H H H 33s Q 33s", self.version, self.block_version, self.lock_height, self.generator, self.relay_fee, self.authorized_pedersen_commitment.serialize()) ret += self.address.serialize() ret += struct.pack("> H", len(self.encrypted_message)) ret += self.encrypted_message return ret #Wallet functionality def fill(self, address, value, relay_fee = 0, blinding_key=None, generator=default_generator_ser, coinbase=False, lock_height = 0): """ Fill basic params of ouput. Fill params, but doesn't generate commitments and rangeproofs. It should be done separately by generate() function Parameters ---------- address : Address Address of output value : int Value of output in minimal indivisible units [optional] relay_fee : int Default:0. Relay fee of output in minimal indivisible units [optional] blinding_key : PrivateKey Default: random. Usually wallet should not store this key: it will be encrypted it the output with private key of address [optional] generator : GeneratorOnCurve Default: default_generator. Asset transactions will use another generators [optional] coinbase : bool Default: False. Coinbase outputs have version 0 (while common version is 1) [optional] lock_height : integer Default: 0. Minimal height at which output can be spent. """ self.address = address self.generator = generator self.value = value if blinding_key is None: blinding_key = PrivateKey() #we do not store this key in the wallet self.blinding_key = blinding_key self.relay_fee = relay_fee if coinbase: self.version=0 self.lock_height = lock_height def _calc_pedersen_wos(self): """ Calc unauthorized pedersen commitment from generators, blinding key and value. Note, it differs from _calc_unauthorized_pedersen (which calc UPC from APC and address). """ assert(self.generator and self.address and self.blinding_key and isinstance(self.value, int)) #self.value can be 0 if self.generator in generators: unpc = PedersenCommitment(blinded_generator = generators[self.generator]) else: raise NotImplemented #TODO unpc.create(self.value, self.blinding_key.private_key) return unpc # rename? def _calc_pedersen(self): self.unauthorized_pedersen_commitment = self._calc_pedersen_wos() def _calc_authorized_pedersen(self): """ Calc authorized pedersen commitment from UPC and address. """ assert self.unauthorized_pedersen_commitment assert self.generator self.authorized_pedersen_commitment = \ (self.unauthorized_pedersen_commitment.to_public_key() + self.address.pubkey).to_pedersen_commitment( blinded_generator = generators[self.generator]) def _calc_unauthorized_pedersen(self): """ Calc unauthorized pedersen commitment from APC and address. """ assert self.authorized_pedersen_commitment assert self.generator if not self.generator in generators: raise NotImplemented self.unauthorized_pedersen_commitment = \ (self.authorized_pedersen_commitment.to_public_key() - self.address.pubkey).to_pedersen_commitment( blinded_generator = generators[self.generator]) #TODO default exp should be more wise def generate(self, min_value=0, nonce=None, exp=0, concealed_bits=64): """ Generate output Calc all necessery params like APC, rangeproofs and so on. After generation ouput is ready for serialization. Params listed below control what should be concealed by proof. Parameters ---------- address : Address Address of output [optional] min_value : int Default: 0. Constructs a proof where the verifer can tell the minimum value is at least the specified amount. [optional] exp : int Default: 0. Base-10 exponent. Number of digits which will be made public: Allowed range is -1 to 18 0 corresponds to all digits (with respect to concealed bits) are private, 1 corrsponds to smallest digit are public etc -1 is special value to make all digits public. [optional] nonce : 32bytes Default: random. 32-byte secret nonce used to initialize the proof. [optional] concealed_bits : int Default:64. Number of bits of the value to keep private. (0 = auto/minimal, 64). For instance minimal number of bits to conceal value 100 is 7. For proof with 7 bits observer can check that value is between 0 and 127. If minimal number of bits will be used chainwide observer can guess with a certain degree of confidence that value is between 64 and 127. """ self._calc_pedersen() self._calc_authorized_pedersen() apc = self.authorized_pedersen_commitment.serialize() plaintext = struct.pack( "> 32s Q", self.blinding_key.private_key, self.value) self.encrypted_message = encrypt(self.address.pubkey, apc, plaintext); additional_data = self.signed_part() self.rangeproof = RangeProof(pedersen_commitment=self.unauthorized_pedersen_commitment, additional_data = additional_data) if self.version==0: res = self.rangeproof._sign(exp=-1, concealed_bits=0, nonce=nonce) elif self.version==1: self.rangeproof._sign(min_value=min_value, nonce=nonce, exp=exp, concealed_bits=concealed_bits) #rename to validate? def verify(self): """ Verify IOput. Requirements for valid ouput: 0) version is known 1) address is valid 2) generator is known 3) range_proof is valid and signs (version, address, asset_type, encrypted_message, relay_fee) """ if self.version==1 or self.version==0: try: assert self.address.verify(), "Bad address" assert self.generator in generators, "Bad generator" assert self.rangeproof.verify(), "Bad rangeproof" except AssertionError as e: return False else: return False return self.address.verify() def info(self): """ Returns dictionary with params extracted from proof: 'exp', 'mantissa' (the same as concealed bits), 'min_value', 'max_value' """ a,b,c,d = self.rangeproof.info() return {'exp':a, 'mantissa':b, 'min_value':c, 'max_value':d} def __str__(self): s="" s+="Output[ coinbase: %s, Pedersen: 0x%s, Pubkey: 0x%s, RangeProof: %s, Value: %s, Fee: %s, Blinding key: %s...]" %( "+" if self.version==0 else "-", self.authorized_pedersen_commitment.serialize()[:8].hex(), self.address.pubkey.serialize()[:10].hex(), "+" if self.rangeproof else "-", str(self.value) if self.value else 'unknown', str(self.relay_fee), ('0x'+self.blinding_key.serialize()[:8]) if self.blinding_key else 'unknown', ) return s
def fill_pool(self, cursor, keys_number): for _ in range(keys_number): prk=PrivateKey() self._add_privkey_to_pool(prk, cursor)
def priv_by_address(self, address): raw_priv = self.wallet.get_privkey(address.pubkey.serialize()) if not raw_priv: raise KeyError("Private key not in the wallet") return PrivateKey(raw_priv, raw=True)
def core_loop(syncer, config): message_queue = syncer.queues['Blockchain'] init_storage_space(config) nodes = {} set_ask_for_blocks_hook(storage_space.blockchain, message_queue) set_ask_for_txouts_hook(storage_space.blocks_storage, message_queue) requests = {} message_queue.put({"action": "give nodes list reminder"}) def send_message(destination, message): if not 'id' in message: message['id'] = uuid4() if not 'sender' in message: message['sender'] = "Blockchain" syncer.queues[destination].put(message) def send_to_nm(message): send_message("NetworkManager", message) logger.debug("Start of core loop") while True: sleep(0.05) put_back_messages = [] while not message_queue.empty(): message = message_queue.get() if 'time' in message and message['time'] > time( ): # delay this message put_back_messages.append(message) continue logger.info("Processing message %s" % message) if not 'action' in message: #it is response if message['id'] in requests: # response is awaited if requests[message['id']] == "give nodes list": requests.pop(message['id']) message_queue.put({ "action": "take nodes list", "nodes": message["result"] }) else: pass #Drop continue try: if message["action"] == "take the headers": process_new_headers(message) if message["action"] == "take the blocks": initial_tip = storage_space.blockchain.current_tip process_new_blocks(message) after_tip = storage_space.blockchain.current_tip if not after_tip == initial_tip: notify_all_nodes_about_new_tip(nodes, send_to_nm) if message["action"] == "take the txos": process_new_txos(message) if message["action"] == "give blocks": process_blocks_request(message, send_message) if message["action"] == "give next headers": process_next_headers_request(message, send_message) if message["action"] == "give txos": process_txos_request(message, send_message) if message["action"] == "find common root": process_find_common_root(message, send_message) if message["action"] == "find common root response": process_find_common_root_reponse(message, nodes[message["node"]], send_message) if message["action"] == "give TBM transaction": process_tbm_tx_request(message, send_message) if message["action"] == "take TBM transaction": process_tbm_tx(message, send_to_nm, nodes) if message["action"] == "give tip height": send_message( message["sender"], { "id": message["id"], "result": storage_space.blockchain.current_height }) if message["action"] == "take tip info": if not message["node"] in nodes: nodes[message["node"]] = {'node': message["node"]} process_tip_info(message, nodes[message["node"]], send=send_to_nm) except DOSException as e: logger.info("DOS Exception %s" % str(e)) #raise e #TODO send to NM except Exception as e: raise e if message["action"] == "give block template": block = storage_space.mempool_tx.give_block_template() ser_head = block.header.serialize() send_message(message["sender"], { "id": message["id"], "result": ser_head }) if message["action"] == "take solved block template": try: initial_tip = storage_space.blockchain.current_tip header = Header() header.deserialize(message["solved template"]) solved_block = storage_space.mempool_tx.get_block_by_header_solution( header) storage_space.headers_manager.add_header( solved_block.header) storage_space.headers_manager.context_validation( solved_block.header.hash) solved_block.non_context_verify() storage_space.blockchain.add_block(solved_block) send_message(message["sender"], { "id": message["id"], "result": "Accepted" }) after_tip = storage_space.blockchain.current_tip if not after_tip == initial_tip: notify_all_nodes_about_new_tip(nodes, send_to_nm) except Exception as e: raise e send_message(message["sender"], { "id": message["id"], "error": str(e) }) if message["action"] == "get confirmed balance stats": if storage_space.mempool_tx.key_manager: stats = storage_space.mempool_tx.key_manager.get_confirmed_balance_stats( storage_space.utxo_index, storage_space.txos_storage, storage_space.blockchain.current_height) send_message(message["sender"], { "id": message["id"], "result": stats }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) if message["action"] == "get confirmed balance list": if storage_space.mempool_tx.key_manager: _list = storage_space.mempool_tx.key_manager.get_confirmed_balance_list( storage_space.utxo_index, storage_space.txos_storage, storage_space.blockchain.current_height) send_message(message["sender"], { "id": message["id"], "result": _list }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) if message["action"] == "give new address": if storage_space.mempool_tx.key_manager: texted_address = storage_space.mempool_tx.key_manager.new_address( ).to_text() send_message(message["sender"], { "id": message["id"], "result": texted_address }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) if message["action"] == "give private key": if storage_space.mempool_tx.key_manager: km = storage_space.mempool_tx.key_manager a = Address() a.from_text(message["address"]) serialized_pk = km.priv_by_address(a).serialize() send_message(message["sender"], { "id": message["id"], "result": serialized_pk }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) if message["action"] == "take private key": if storage_space.mempool_tx.key_manager: km = storage_space.mempool_tx.key_manager pk = PrivateKey() pk.deserialize(message['privkey']) km.add_privkey(pk) send_message(message["sender"], { "id": message["id"], "result": "imported" }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) if message["action"] == "give synchronization status": our_height = storage_space.blockchain.current_height best_known_header = storage_space.headers_manager.best_header_height try: best_advertised_height = max([ nodes[node]["height"] for node in nodes if "height" in nodes[node] ]) except: best_advertised_height = None send_message( message["sender"], { "id": message["id"], "result": { 'height': our_height, 'best_known_header': best_known_header, 'best_advertised_height': best_advertised_height } }) if message["action"] == "send to address": value = int(message["value"]) taddress = message["address"] a = Address() a.from_text(taddress) if storage_space.mempool_tx.key_manager: _list = storage_space.mempool_tx.key_manager.get_confirmed_balance_list( storage_space.utxo_index, storage_space.txos_storage, storage_space.blockchain.current_height) list_to_spend = [] summ = 0 for address in _list: for texted_index in _list[address]: if summ > value: continue if isinstance(_list[address][texted_index], int): _index = base64.b64decode( texted_index.encode()) utxo = storage_space.txos_storage.confirmed[ _index] if not utxo.lock_height <= storage_space.blockchain.current_height: continue list_to_spend.append(utxo) summ += _list[address][texted_index] if summ < value: send_message( message["sender"], { "id": message["id"], "error": "Not enough matured coins" }) tx = Transaction( txos_storage=storage_space.txos_storage, key_manager=storage_space.mempool_tx.key_manager) for utxo in list_to_spend: tx.push_input(utxo) tx.add_destination((a, value)) tx.generate() tx.verify() storage_space.mempool_tx.add_tx(tx) tx_skel = TransactionSkeleton(tx=tx) notify_all_nodes_about_tx(tx_skel.serialize( rich_format=True, max_size=40000), nodes, send_to_nm, _except=[], mode=1) send_message(message["sender"], { "id": message["id"], "result": "generated" }) else: send_message(message["sender"], { "id": message["id"], "error": "No registered key manager" }) #message from core_loop if message["action"] == "check txouts download status": txos = message["txos_hashes"] to_be_downloaded = [] for txo in txos: if not storage_space.txos_storage.known(txo): to_be_downloaded.append(txo) if not to_be_downloaded: continue #We are good, txouts are already downloaded already_asked_nodes = message["already_asked_nodes"] asked = False for node_params in nodes: node = nodes[node_params] if node in already_asked_nodes: continue already_asked_nodes += [node] send_to_nm({ "action": "give txos", "txos_hashes": b"".join(to_be_downloaded), "num": len(to_be_downloaded), "id": str(uuid4()), "node": node_params }) new_message = { "action": "check txouts download status", "txos_hashes": to_be_downloaded, "already_asked_nodes": already_asked_nodes, "id": str(uuid4()), "time": int(time() + 300) } asked = True put_back_messages.append(new_message) break if not asked: #We already asked all applicable nodes message["time"] = int(time()) + 3600 message["already_asked_nodes"] = [] put_back_messages.append( message) # we will try to ask again in an hour #message from core_loop if message["action"] == "check blocks download status": #TODO download many blocks at once block_hashes = message["block_hashes"] to_be_downloaded = [] lowest_height = 1e10 for block_hash in block_hashes: if block_hash in storage_space.blocks_storage: continue #We are good, block already downloaded if not block_hash in storage_space.blockchain.awaited_blocks: continue #For some reason we don't need this block anymore to_be_downloaded.append(block_hash) if storage_space.headers_storage[ block_hash].height < lowest_height: lowest_height = storage_space.headers_storage[ block_hash].height already_asked_nodes = message["already_asked_nodes"] asked = False for node_params in nodes: node = nodes[node_params] if node in already_asked_nodes: continue if node["height"] < lowest_height: continue already_asked_nodes += [node] send_to_nm({ "action": "give blocks", "block_hashes": bytes(b"".join(block_hashes)), 'num': len(block_hashes), "id": str(uuid4()), "node": node_params }) new_message = { "action": "check blocks download status", "block_hashes": to_be_downloaded, "already_asked_nodes": already_asked_nodes, "id": str(uuid4()), "time": int(time() + 300) } asked = True put_back_messages.append(new_message) break if not asked: #We already asked all applicable nodes message["time"] = int(time()) + 3600 message["already_asked_nodes"] = [] put_back_messages.append( message) # we will try to ask again in an hour if message["action"] == "take nodes list": for node in message["nodes"]: if not node in nodes: #Do not overwrite nodes[node] = {"node": node} disconnected_nodes = [] for existing_node in nodes: if not existing_node in message["nodes"]: disconnected_nodes.append(existing_node) for dn in disconnected_nodes: nodes.pop(dn) if message["action"] == "give nodes list reminder": _id = str(uuid4()) send_to_nm({ "action": "give nodes list", "sender": "Blockchain", "id": _id }) requests[_id] = "give nodes list" put_back_messages.append({ "action": "give nodes list reminder", "time": int(time()) + 3 }) for _message in put_back_messages: message_queue.put(_message) try: check_sync_status(nodes, send_to_nm) except Exception as e: logger.error(e)
def fill_pool(self, keys_number=100): for _ in range(keys_number): prk=PrivateKey() self.wallet.add_privkey_to_pool(prk.pubkey.serialize(), prk.private_key)
def new_address(self, cursor): prk=PrivateKey() self._add_privkey_to_pool(prk, cursor) privkey = PrivateKey(self._get_privkey_from_pool(cursor), raw=True) return address_from_private_key(privkey)
from leer.core.lubbadubdub.ioput import IOput from leer.core.lubbadubdub.address import address_from_private_key from secp256k1_zkp import PrivateKey keys = [PrivateKey() for i in range(5)] adr1, adr2, adr3, adr4, adr5 = [ address_from_private_key(keys[i]) for i in range(5) ] def test_ioput(): ioput_serialize_deserialize() ioput_proofs_info() ioput_encrypted_messsage() test_ioput_indexes() def ioput_serialize_deserialize(): options = { 'address': [adr1], 'value': [1, 100, int(705e6 * 1e8)], 'relay_fee': [0, 100, int(1e8)], 'generator': [ None ], #b'\x0b\xf8x*\xe9\xdc\xb1\xfd\xe9k\x8eZ\xf9\x8250\xdcrLU`p\xbaD\xf1\xfdh\x93\xd7\x85\xb9\x9e\x07'], #TODO arbitraty value gen 'burden_hash': [None, b"\x44" * 32], 'coinbase': [True, False], 'lock_height': [0, 10, 10000] } all_possible_options = [{}] for opt in options:
def wallet(syncer, config): ''' Wallet is synchronous service which holds private keys and information about owned outputs. It provides information for transactions and block templates generation. ''' def get_height(timeout=2.5): _id = str(uuid4()) syncer.queues['Notifications'].put({ 'action': 'get', 'id': _id, 'key': 'blockchain height', 'sender': "Wallet" }) message_queue = syncer.queues['Wallet'] start_time = time() result = None while True: put_back = [ ] #We wait for specific message, all others will wait for being processed while not message_queue.empty(): message = message_queue.get() if (not 'id' in message) or (not message['id'] == _id): put_back.append(message) continue result = message['result'] break for message in put_back: message_queue.put(message) if result: break sleep(0.01) if time() - start_time > timeout: raise KeyError if result == 'error': raise KeyError return result['value'] notification_cache = {} def notify(key, value, timestamp=None): if (key in notification_cache) and ( notification_cache[key]['value'] == value ) and (time() - notification_cache[key]['timestamp']) < 5: return #Do not spam notifications with the same values message = {} message['id'] = uuid4() message['sender'] = "Wallet" if not timestamp: timestamp = time() message['time'] = timestamp message['action'] = "set" message['key'] = key message['value'] = value syncer.queues["Notifications"].put(message) notification_cache[key] = {'value': value, 'timestamp': timestamp} #set logging default_log_level = logging.INFO if "logging" in config: #debug, info, warning, error, critical loglevels = { "debug": logging.DEBUG, "info": logging.INFO, "warning": logging.WARNING, "error": logging.ERROR, "critical": logging.CRITICAL } if "base" in config["logging"] and config["logging"][ "base"] in loglevels: logger.setLevel(loglevels[config["logging"]["base"]]) if "wallet" in config["logging"] and config["logging"][ "wallet"] in loglevels: #its ok to rewrite logger.setLevel(loglevels[config["logging"]["wallet"]]) message_queue = syncer.queues['Wallet'] _path = config['location']['wallet'] try: password = config['wallet'].get('password', None) except: password = None km = KeyDB(path=_path, password=password) with km.open() as conn: cursor = conn.cursor() apply_migrations(cursor) notify('last wallet update', time()) while True: sleep(0.01) while not message_queue.empty(): message = message_queue.get() if 'action' in message: logger.info("Process message `%s`" % message['action']) logger.debug("Process message %s" % message) else: logger.info("Process message %s" % message) if not 'action' in message: continue if message['action'] == "process new block": tx = Transaction(txos_storage=None, excesses_storage=None) tx.deserialize( message['tx'], rtx=None, skip_verification=True ) #skip_verification allows us to not provide rtx block_height = message['height'] last_time_updated = None with km.open() as conn: cursor = conn.cursor() for index in tx.inputs: #Note it is not check whether output is unspent or not, we check that output is marked as our and unspent in our wallet if km.is_unspent(index, cursor): km.spend_output(index, block_height, cursor) last_time_updated = time() for _o in tx.outputs: if km.is_owned_pubkey(_o.address.pubkey.serialize(), cursor): km.add_output(_o, block_height, cursor) last_time_updated = time() if km.is_saved(_o, cursor): km.register_processed_output( _o.serialized_index, block_height, cursor) if last_time_updated: notify('last wallet update', last_time_updated) if message['action'] == "process rollback": rollback = message['rollback_object'] block_height = message['block_height'] with km.open() as conn: cursor = conn.cursor() km.rollback(block_height, cursor) last_time_updated = time() notify('last wallet update', last_time_updated) if message[ 'action'] == "process indexed outputs": #during private key import correspondent outputs will be processed again pass if message['action'] == "give new taddress": with km.open() as conn: cursor = conn.cursor() address = km.new_address(cursor) response = {"id": message["id"], "result": address.to_text()} syncer.queues[message['sender']].put(response) if message['action'] == "give new address": with km.open() as conn: cursor = conn.cursor() address = km.new_address(cursor) response = {"id": message["id"], "result": address.serialize()} syncer.queues[message['sender']].put(response) if message['action'] == "get confirmed balance stats": response = {"id": message["id"]} try: height = get_height() with km.open() as conn: cursor = conn.cursor() stats = km.get_confirmed_balance_stats(height, cursor) response["result"] = stats except KeyError: response[ "result"] = "error: core_loop didn't set height yet" except Exception as e: response["result"] = "error: " + str(e) syncer.queues[message['sender']].put(response) if message['action'] == "get confirmed balance list": response = {"id": message["id"]} try: height = get_height() with km.open() as conn: cursor = conn.cursor() stats = km.get_confirmed_balance_list(height, cursor) response["result"] = stats except KeyError: response[ "result"] = "error: core_loop didn't set height yet" except Exception as e: response["result"] = "error: " + str(e) syncer.queues[message['sender']].put(response) if message['action'] == "give private key": taddress = message["address"] a = Address() a.from_text(taddress) with km.open() as conn: cursor = conn.cursor() priv = km.priv_by_address(a, cursor) response = {"id": message["id"], "result": priv.private_key} syncer.queues[message['sender']].put(response) if message['action'] == "take private key": response = {"id": message["id"]} rescan = bool(message["rescan"]) ser_privkey = message["privkey"] privkey = PrivateKey(ser_privkey, raw=True) with km.open() as conn: cursor = conn.cursor() res = km.add_privkey(privkey, cursor, duplicate_safe=True) if res and not rescan: response["result"] = "success" elif rescan: response["result"] = "failed" response["error"] = "rescan is not implemented" else: response["result"] = "failed" syncer.queues[message['sender']].put(response) continue if message['action'] == "give last transactions info": response = {"id": message["id"]} num = int(message["num"]) with km.open() as conn: cursor = conn.cursor() response["result"] = km.give_transactions(num, cursor) syncer.queues[message['sender']].put(response) continue if message['action'] == "generate tx": response = {"id": message["id"]} value = int(message["value"]) taddress = message["address"] a = Address() a.from_text(taddress) try: current_height = get_height() except KeyError: response["result"] = "error" response["error"] = "core_loop didn't set height yet" syncer.queues[message['sender']].put(response) continue except Exception as e: response["result"] = "error" response["error"] = str(e) syncer.queues[message['sender']].put(response) continue with km.open() as conn: cursor = conn.cursor() _list = km.get_confirmed_balance_list( current_height, cursor) list_to_spend = [] summ = 0 utxos = [] expected_fee = 0 for address in _list: for texted_index in _list[address]: if summ > value + expected_fee: #TODO fee here continue if isinstance(_list[address][texted_index], int): _index = base64.b64decode( texted_index.encode()) ser_priv, ser_blinding, apc = km.get_output_private_data( _index, cursor) priv = PrivateKey(ser_priv, raw=True) blinding = PrivateKey(ser_blinding, raw=True) utxos.append( (_index, _list[address][texted_index], priv, blinding, apc)) summ += _list[address][texted_index] expected_fee += 60000 #Should be enough if summ < value: response["result"] = "error" response["error"] = "Not enough matured coins" syncer.queues[message['sender']].put(response) continue if summ < value + expected_fee: response["result"] = "error" response[ "error"] = "Not enough matured coins for value and fees (%.8f)" % ( expected_fee / 1e8) syncer.queues[message['sender']].put(response) continue tx = Transaction(None, None) tx.add_destination((a, value, True)) tx.blindly_generate( km.new_address(cursor), utxos, config["fee_policy"].get("generate_fee_per_kb", 3000)) km.save_generated_transaction(tx, cursor) response["result"] = tx.serialize() syncer.queues[message['sender']].put(response) if message['action'] == "stop": logger.info("Wallet stops") return