def transaction(self, commands, quorum=1, creator_account=None, created_time=None): """ Creates a protobuf transaction with specified set of entities :param commands: list of commands generated via command factory method :param quorum: required number of signatures, 1 is default :param creator_account: id of transaction creator account :param created_time: transaction creation timestamp in milliseconds :return: a proto transaction """ assert creator_account or self.creator_account, "No account name specified as transaction creator id" if not created_time: created_time = self.now() if not creator_account: creator_account = self.creator_account tx = transaction_pb2.Transaction() core_payload = tx.payload.reduced_payload # setting transaction contents core_payload.quorum = quorum core_payload.created_time = created_time core_payload.creator_account_id = creator_account core_payload.commands.extend(commands) return tx
def test_sign(self): tx = self.unsigned_tx() tx.payload.reduced_payload.commands.extend([self.valid_add_peer_command()]) self.assertEqual(len(tx.signatures), 0) tx_blob = iroha.signTransaction(iroha.Blob(tx.SerializeToString()).blob(), self.keys) signed_tx = trx.Transaction() signed_tx.ParseFromString(bytearray(tx_blob)) self.assertEqual(len(signed_tx.signatures), 1)
def register_to_abci(self): '''Method used when the client makes the first connection to the ABCI''' msg = tx.Transaction() msg.new_contract.uuid = self.uuid.bytes msg.new_contract.timestamp = int(time.time()) msg.new_contract.public_key = self.public_key.to_bytes() payload = self.uuid.bytes + \ msg.new_contract.timestamp.to_bytes(8, byteorder='big') + \ self.public_key.to_bytes() msg.new_contract.signature = self.priv_key.sign(payload) data = msg.SerializeToString() self.send_request(data)
def send_tx(tx, key_pair): tx_blob = iroha.ModelProtoTransaction(tx).signAndAddSignature(key_pair).finish().blob() proto_tx = transaction_pb2.Transaction() if sys.version_info[0] == 2: tmp = ''.join(map(chr, tx_blob)) else: tmp = bytes(tx_blob) proto_tx.ParseFromString(tmp) channel = grpc.insecure_channel('127.0.0.1:50051') stub = endpoint_pb2_grpc.CommandService_v1Stub(channel) stub.Torii(proto_tx)
def make_tx( sender, to_address, data, quota, chain_id, valid_until, nonce, version=0, ): tx = proto.Transaction() tx.chain_id = chain_id tx.data = hex2bytes(data) tx.valid_until_block = valid_until tx.nonce = nonce tx.quota = quota tx.version = version tx.to = to_address return tx
def run(self): time.sleep(int(os.environ.get("STARTUP_DELAY", 5))) '''Main loop of the client, sends data retrieved from the file to the ABCI server''' self.register_to_abci() # Variables needed for simulating the smart meter signals min_per_block = 5 client_offset = 1440 # minutes in one day day_offset = 720 pow_significance = pow(10, 1) consumption_sum = 0 production_sum = 0 prev_consumption_sum = 0 prev_production_sum = 0 consumption_percentage_to_flexibility = 0.2 production_percentage_to_flexibility = 0.1 # make each client start on different day for i in range(0, randint(0, 25)): for j in range(0, client_offset): self.data_file.readline() # make clients start with noon data for i in range(0, day_offset): self.data_file.readline() counter = 1 try: while True: msg = tx.Transaction() msg.usage.contract_uuid = self.uuid.bytes msg.usage.timestamp = int(time.time()) # need to sum, as data is only per minute for minute in range(0, min_per_block): row = self.data_file.readline() if row: consumption_sum += int(float(row.split(';')[3].replace(",", ".")) * pow_significance) production_sum += int(float(row.split(';')[4].replace(",", ".")) * pow_significance) else: self.data_file.seek(0) msg.usage.consumption = prev_consumption_sum msg.usage.production = prev_production_sum # Consumption prediction for coming blocks msg.usage.prediction_consumption['t+1'] = int(consumption_sum * (1 - consumption_percentage_to_flexibility)) # Production prediction for coming blocks msg.usage.prediction_production['t+1'] = int(production_sum * (1 - production_percentage_to_flexibility)) # Consumption flexibility options for coming block msg.usage.consumption_flexibility[randint(150, 220) * 100] = int(0.2 * (consumption_sum * consumption_percentage_to_flexibility)) msg.usage.consumption_flexibility[randint(100, 150) * 100] = int(0.3 * (consumption_sum * consumption_percentage_to_flexibility)) msg.usage.consumption_flexibility[randint(50, 100) * 100] = int(0.5 * (consumption_sum * consumption_percentage_to_flexibility)) # Production flexibility options for coming block msg.usage.production_flexibility[randint(150, 220) * 100] = int(0.5 * (consumption_sum * production_percentage_to_flexibility)) msg.usage.production_flexibility[randint(100, 150) * 100] = int(0.3 * (consumption_sum * production_percentage_to_flexibility)) msg.usage.production_flexibility[randint(50, 100) * 100] = int(0.2 * (consumption_sum * production_percentage_to_flexibility)) msg.usage.default_consumption_price = 22000 msg.usage.default_production_price = 500 msg.usage.signature = self.priv_key.sign(msg.usage.SerializeToString()) # print("Sending message: {}".format(msg)) data = msg.SerializeToString() self.send_request(data) prev_consumption_sum = consumption_sum prev_production_sum = production_sum consumption_sum = 0 production_sum = 0 # In case of no time interval, new transactions are sent by the user manually if self.time_interval == 0: input() else: time.sleep(self.time_interval) counter += 1 except (KeyboardInterrupt, EOFError): msg = tx.Transaction() msg.close_contract.uuid = self.uuid.bytes msg.close_contract.timestamp = int(time.time()) msg.close_contract.signature = self.priv_key.sign(msg.close_contract.SerializeToString()) data = msg.SerializeToString() self.send_request(data) print('\nExiting\n')
import logging from datetime import datetime from pykafka import KafkaClient import transaction_pb2 # logging logging.basicConfig(filename=f'consumer_{str(datetime.now())}.log', level=logging.INFO) # initializing protocol buffer schema class transaction = transaction_pb2.Transaction() # connect to multiple kafka brokers client = KafkaClient(hosts="127.0.0.1:9093,127.0.0.1:9094") # select kafka topic topic = client.topics['trans'] # create kafka consumer cons = topic.get_simple_consumer() # all the account numbers and aggregated balance is stored in a dict data_dict = {} for msg in cons: if msg is not None: # deserialize the message parsed = transaction.FromString(msg.value) ac_no = parsed.account_number amnt = parsed.amount
def __init__(self): self.transactions = { '1': transaction_pb2.Transaction(id="uuid_1", amount=10) }
def unsigned_tx(self): tx = trx.Transaction() tx.payload.reduced_payload.creator_account_id = "admin@test" tx.payload.reduced_payload.created_time = int(time() * 1000) tx.payload.reduced_payload.quorum = 1 return tx