def wrap_payload_in_txn_batch(txn_key, payload, header, batch_key): """Takes the serialized RBACPayload and creates a batch_list, batch signature tuple. Args: txn_key (sawtooth_signing.Signer): The txn signer's key pair. payload (bytes): The serialized RBACPayload. header (bytes): The serialized TransactionHeader. batch_key (sawtooth_signing.Signer): The batch signer's key pair. Returns: tuple The zeroth element is a BatchList, and the first element is the batch header_signature. """ transaction = transaction_pb2.Transaction( payload=payload, header=header, header_signature=txn_key.sign(header)) transaction_ids = [transaction.header_signature] logging.info(f"This is the transaction id {transaction_ids}") batch_header = batch_pb2.BatchHeader( signer_public_key=batch_key.get_public_key().as_hex(), transaction_ids=[transaction.header_signature]).SerializeToString() batch = batch_pb2.Batch(header=batch_header, header_signature=batch_key.sign(batch_header), transactions=[transaction]) batch_list_bytes = BatchList(batches=[batch]).SerializeToString() return transaction_ids, [batch], batch.header_signature, batch_list_bytes
def _make_batch(self, payload, inputs, outputs, signer): """Creates and signs a batch. """ signer_public_key = signer.get_public_key().as_hex() payload_bytes = payload.SerializeToString() txn_header = transaction_pb2.TransactionHeader( family_name=self._family_name, family_version=self._family_version, inputs=inputs, outputs=outputs, signer_public_key=signer_public_key, batcher_public_key=signer_public_key, payload_sha512=hashlib.sha512(payload_bytes).hexdigest()) txn_header_bytes = txn_header.SerializeToString() txn = transaction_pb2.Transaction( header=txn_header_bytes, header_signature=signer.sign(txn_header_bytes), payload=payload_bytes) batch_header = batch_pb2.BatchHeader( signer_public_key=signer_public_key, transaction_ids=[txn.header_signature]) batch_header_bytes = batch_header.SerializeToString() batch = batch_pb2.Batch( header=batch_header_bytes, header_signature=signer.sign(batch_header_bytes), transactions=[txn]) return batch
def process(rec, database): """ Process inbound queue records """ try: if "batch" not in rec or not rec["batch"]: database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) rec["sync_direction"] = "inbound" database.run_query(database.get_table("sync_errors").insert(rec)) return batch = batch_pb2.Batch() batch.ParseFromString(rec["batch"]) batch_list = batcher.batch_to_list(batch=batch) status = ClientSync().send_batches_get_status(batch_list=batch_list) if status[0]["status"] == "COMMITTED": if "metadata" in rec and rec["metadata"]: data = { "address": rec["address"], "object_type": rec["object_type"], "object_id": rec["object_id"], "provider_id": rec["provider_id"], "created_at": r.now(), "updated_at": r.now(), **rec["metadata"], } query = ( database.get_table("metadata").get( rec["address"]).replace(lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge({ "metadata": rec["metadata"], "updated_at": r.now() }), ))) result = database.run_query(query) if (not result["inserted"] and not result["replaced"]) or result["errors"] > 0: LOGGER.warning("error updating metadata record:\n%s\n%s", result, query) rec["sync_direction"] = "inbound" database.run_query(database.get_table("changelog").insert(rec)) database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) else: rec["error"] = get_status_error(status) rec["sync_direction"] = "inbound" database.run_query(database.get_table("sync_errors").insert(rec)) database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) except Exception as err: # pylint: disable=broad-except LOGGER.exception("%s exception processing inbound record:\n%s", type(err).__name__, rec) LOGGER.exception(err)
def make_batch(transaction, batcher_keypair=BATCHER_KEY_PAIR): """Batch a transaction""" batch_header = batch_pb2.BatchHeader( signer_public_key=batcher_keypair.public_key, transaction_ids=[transaction.header_signature], ).SerializeToString() return batch_pb2.Batch( header=batch_header, header_signature=batcher_keypair.sign(batch_header), transactions=[transaction], )
def make_batch_from_txns(transactions, signer_keypair): """ Given a list of transactions, create a batch to be applied on the Sawtooth blockchain. """ batch_header = batch_pb2.BatchHeader( signer_public_key=signer_keypair.public_key, transaction_ids=[txn.header_signature for txn in transactions], ).SerializeToString() return batch_pb2.Batch( header=batch_header, header_signature=signer_keypair.sign(batch_header), transactions=transactions, )
def create_batch(transactions, private_key, public_key): transaction_signatures = [t.header_signature for t in transactions] header = batch_pb2.BatchHeader(signer_pubkey=public_key, transaction_ids=transaction_signatures) header_bytes = header.SerializeToString() signature = signing.sign(header_bytes, private_key) batch = batch_pb2.Batch(header=header_bytes, transactions=transactions, header_signature=signature) return batch
def multi_transactions_batch(transactions, batch_key): if type(transactions) != list: raise CustomError("Transactions must be instance of list") batch_header = batch_pb2.BatchHeader( signer_public_key=batch_key.get_public_key().as_hex(), transaction_ids=[transaction.header_signature for transaction in transactions]).SerializeToString() batch = batch_pb2.Batch( header=batch_header, header_signature=batch_key.sign(batch_header), transactions=transactions) batch_list_bytes = BatchList(batches=[batch]).SerializeToString() return batch.header_signature, batch_list_bytes
def create_batch(transactions, private_key, public_key): transaction_signatures = [t.signature for t in transactions] header = batch_pb2.BatchHeader( signer=public_key, transaction_signatures=transaction_signatures) header_bytes = header.SerializeToString() signature = pybitcointools.ecdsa_sign(header_bytes, private_key) batch = batch_pb2.Batch(header=header_bytes, transactions=transactions, signature=signature) return batch
def create_batch(transactions, signer): transaction_signatures = [t.header_signature for t in transactions] header = batch_pb2.BatchHeader( signer_public_key=signer.get_public_key().as_hex(), transaction_ids=transaction_signatures) header_bytes = header.SerializeToString() signature = signer.sign(header_bytes) batch = batch_pb2.Batch(header=header_bytes, transactions=transactions, header_signature=signature) return batch
def _create_txn_and_batch(self, txn_key, batch_key, inputs, outputs, payload): txn_header_bytes, signature = self._transaction_header( txn_key, batch_key, inputs, outputs, payload) txn = transaction_pb2.Transaction(header=txn_header_bytes, header_signature=signature, payload=payload.SerializeToString()) transactions = [txn] batch_header_bytes, signature = self._batch_header( batch_key, transactions) batch = batch_pb2.Batch(header=batch_header_bytes, header_signature=signature, transactions=transactions) return batch, signature
def _create_batch(signer, transactions): """Creates a batch from a list of transactions and a signer, and signs the resulting batch with the given signing key. Args: signer (:obj:`Signer`): Cryptographic signer to sign the batch transactions (list of `Transaction`): The transactions to add to the batch. Returns: `Batch`: The constructed and signed batch. """ txn_ids = [txn.header_signature for txn in transactions] batch_header = batch_pb.BatchHeader( signer_public_key=signer.get_public_key().as_hex(), transaction_ids=txn_ids).SerializeToString() return batch_pb.Batch(header=batch_header, header_signature=signer.sign(batch_header), transactions=transactions)
def wrap_payload_in_txn_batch(txn_key, payload, header, batch_key): """Takes the serialized RBACPayload and creates a batch_list, batch signature tuple. Args: txn_key (Key): The txn signer's public/private key pair. payload (bytes): The serialized RBACPayload. header (bytes): The serialized TransactionHeader. batch_key (Key): The batch signer's public/private key pair. Returns: tuple The zeroth element is a BatchList, and the first element is the batch header_signature. """ factory = CryptoFactory(sawtooth_signing.create_context("secp256k1")) txn_signer = factory.new_signer( Secp256k1PrivateKey.from_hex(txn_key.private_key)) transaction = transaction_pb2.Transaction( payload=payload, header=header, header_signature=txn_signer.sign(header)) batch_header = batch_pb2.BatchHeader( signer_public_key=batch_key.public_key, transaction_ids=[transaction.header_signature], ).SerializeToString() batch_signer = factory.new_signer( Secp256k1PrivateKey.from_hex(batch_key.private_key)) batch = batch_pb2.Batch( header=batch_header, header_signature=batch_signer.sign(batch_header), transactions=[transaction], ) batch_list = batch_pb2.BatchList(batches=[batch]) return batch_list, batch.header_signature
def process(rec, conn): """ Process inbound queue records """ try: # Changes members from distinguished name to next_id for roles if "members" in rec["data"]: rec = translate_field_to_next(rec, "members") if "owners" in rec["data"]: rec = translate_field_to_next(rec, "owners") add_transaction(rec) if "batch" not in rec or not rec["batch"]: r.table("inbound_queue").get(rec["id"]).delete().run(conn) rec["sync_direction"] = "inbound" r.table("sync_errors").insert(rec).run(conn) return batch = batch_pb2.Batch() batch.ParseFromString(rec["batch"]) batch_list = batch_to_list(batch=batch) client = ClientSync() status = client.send_batches_get_status(batch_list=batch_list) while status[0]["status"] == "PENDING": LOGGER.info("Batch status is %s", status) status = client.status_recheck(batch_list) if status[0]["status"] == "COMMITTED": if rec["data_type"] == "user": insert_to_user_mapping(rec) if "metadata" in rec and rec["metadata"]: data = { "address": rec["address"], "object_type": rec["object_type"], "object_id": rec["object_id"], "provider_id": rec["provider_id"], "created_at": r.now(), "updated_at": r.now(), **rec["metadata"], } query = ( r.table("metadata").get( rec["address"]).replace(lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge({ "metadata": rec["metadata"], "updated_at": r.now() }), ))) result = query.run(conn) if (not result["inserted"] and not result["replaced"]) or result["errors"] > 0: LOGGER.warning("error updating metadata record:\n%s\n%s", result, query) rec["sync_direction"] = "inbound" r.table("changelog").insert(rec).run(conn) r.table("inbound_queue").get(rec["id"]).delete().run(conn) else: rec["error"] = get_status_error(status) rec["sync_direction"] = "inbound" r.table("sync_errors").insert(rec).run(conn) r.table("inbound_queue").get(rec["id"]).delete().run(conn) except Exception as err: # pylint: disable=broad-except LOGGER.exception("%s exception processing inbound record:\n%s", type(err).__name__, rec) LOGGER.exception(err)