def test_web_api_store(self): # Test _handlestorerequest local_node = self._create_node(8800) gossip = Gossip(local_node) path = tempfile.mkdtemp() ledger = Journal(gossip, data_directory=path, genesis_ledger=True) validator = TestValidator(ledger) store_page = StorePage(validator) request = self._create_get_request("/store", {}) try: # Test no GlobalStore ledger.GlobalStore = None store_page.do_get(request) self.fail("This should throw an error.") except: self.assertIsNotNone(ledger.GlobalStore) kv = KeyValueStore() ledger.GlobalStore.TransactionStores["/TestTransaction"] = kv ledger.GlobalStore.TransactionStores["/TestTransaction"].set("TestKey", 0) # GET /store self.assertEquals(store_page.do_get(request), '["/TestTransaction"]') # GET /store/TestTransaction request = self._create_get_request("/store/TestTransaction", {}) self.assertEquals(store_page.do_get(request), '["TestKey"]') # GET /store/TestTransaction/* request = self._create_get_request("/store/TestTransaction/*", {}) self.assertEquals(store_page.do_get(request), '{"TestKey": 0}') # GET /store/TestTransaction/*?delta=1 request = self._create_get_request("/store/TestTransaction/*", {"delta": ['1']}) self.assertEquals(store_page.do_get(request), '{"DeletedKeys": [], "Store": {"TestKey": 0}}') # GET /store/TestTransaction/TestKey request = self._create_get_request("/store/TestTransaction/TestKey", {}) self.assertEquals(store_page.do_get(request), "0") try: blockstore = BlockStore() ledger.GlobalStoreMap.commit_block_store("123", blockstore) request = self._create_get_request("/store/TestTransaction/*", {"blockid": ["123"]}) store_page.do_get(request) self.fail("This should throw an error") except: blockstore = BlockStore() blockstore.add_transaction_store("/TestTransaction", kv) ledger.GlobalStoreMap.commit_block_store("123", blockstore) # GET /store/TestTransaction/*?blockid=123 request = self._create_get_request("/store/TestTransaction/*", {"blockid": ["123"]}) self.assertEquals(store_page.do_get(request), '{"TestKey": 0}')
def test_web_api_store(self): # Test _handlestorerequest LocalNode = self._create_node(8800) path = tempfile.mkdtemp() ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) request = self._create_get_request("/store", {}) try: # Test no GlobalStore ledger.GlobalStore = None root.do_get(request) self.fail("This should throw an error.") except: self.assertIsNotNone(ledger.GlobalStore) kv = KeyValueStore() ledger.GlobalStore.TransactionStores["/TestTransaction"] = kv ledger.GlobalStore.TransactionStores["/TestTransaction"].set("TestKey", 0) # GET /store self.assertEquals(root.do_get(request), '["/TestTransaction"]') # GET /store/TestTransaction request = self._create_get_request("/store/TestTransaction", {}) self.assertEquals(root.do_get(request), '["TestKey"]') # GET /store/TestTransaction/* request = self._create_get_request("/store/TestTransaction/*", {}) self.assertEquals(root.do_get(request), '{"TestKey": 0}') # GET /store/TestTransaction/*?delta=1 request = self._create_get_request("/store/TestTransaction/*", {"delta": ['1']}) self.assertEquals(root.do_get(request), '{"DeletedKeys": [], "Store": {"TestKey": 0}}') # GET /store/TestTransaction/TestKey request = self._create_get_request("/store/TestTransaction/TestKey", {}) self.assertEquals(root.do_get(request), "0") try: blockstore = BlockStore() ledger.GlobalStoreMap.commit_block_store("123", blockstore) request = self._create_get_request("/store/TestTransaction/*", {"blockid": ["123"]}) root.do_get(request) self.fail("This should throw an error") except: blockstore = BlockStore() blockstore.add_transaction_store("/TestTransaction", kv) ledger.GlobalStoreMap.commit_block_store("123", blockstore) # GET /store/TestTransaction/*?blockid=123 request = self._create_get_request("/store/TestTransaction/*", {"blockid": ["123"]}) self.assertEquals(root.do_get(request), '{"TestKey": 0}')
def test_web_api_transaction(self): # Test _handletxnrequest local_node = self._create_node(8802) gossip = Gossip(local_node) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(gossip, data_directory=path, genesis_ledger=True) validator = TestValidator(ledger) transaction_page = TransactionPage(validator) # TransactionBlock to the ledger txns = [] i = 0 while i < 10: txn = Transaction() txn.sign_from_node(local_node) txns += [txn.Identifier] ledger.TransactionStore[txn.Identifier] = txn i += 1 trans_block = self._create_tblock(local_node, 0, common.NullIdentifier, txns) ledger.BlockStore[trans_block.Identifier] = trans_block ledger.handle_advance(trans_block) # GET /transaction request = self._create_get_request("/transaction/", {}) r = transaction_page.do_get(request) print request.path, r r = r[1:-1].replace('"', "") r = r.replace(" ", "").split(",") self.assertEquals(r, txns) # GET /transaction?blockcount=1 request = self._create_get_request("/transaction", {"blockcount": [1]}) r = transaction_page.do_get(request) r = r[1:-1].replace('"', "") r = r.replace(" ", "").split(",") self.assertEquals(r, txns) # Returns None if testing # GET /transaction/{TransactionID} request = self._create_get_request("/transaction/" + txns[1], {}) txn = ledger.TransactionStore[txns[1]] tinfo = txn.dump() tinfo['Identifier'] = txn.Identifier tinfo['Status'] = txn.Status if txn.Status == tStatus.committed: tinfo['InBlock'] = txn.InBlock self.assertEquals(yaml.load(transaction_page.do_get(request)), tinfo) # GET /transaction/{TransactionID{}/InBlock request = self._create_get_request("/transaction/" + txns[1] + "/InBlock", {}) self.assertEquals(transaction_page.do_get(request).replace('"', ""), txn.InBlock)
def test_web_api_transaction(self): # Test _handletxnrequest LocalNode = self._create_node(8802) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) # TransactionBlock to the ledger txns = [] i = 0 while i < 10: txn = Transaction() txn.sign_from_node(LocalNode) txns += [txn.Identifier] ledger.TransactionStore[txn.Identifier] = txn i += 1 transBlock = self._create_tblock(LocalNode, 0, common.NullIdentifier, txns) ledger.BlockStore[transBlock.Identifier] = transBlock ledger.handle_advance(transBlock) request = self._create_get_request("/transaction", {}) # GET /transaction request = self._create_get_request("/transaction", {}) r = root.do_get(request) r = r[1:-1].replace('"', "") r = r.replace(" ", "").split(",") self.assertEquals(r, txns) # GET /transaction?blockcount=1 request = self._create_get_request("/transaction", {"blockcount": [1]}) r = root.do_get(request) r = r[1:-1].replace('"', "") r = r.replace(" ", "").split(",") self.assertEquals(r, txns) # Returns None if testing # GET /transaction/{TransactionID} request = self._create_get_request("/transaction/" + txns[1], {}) txn = ledger.TransactionStore[txns[1]] tinfo = txn.dump() tinfo['Identifier'] = txn.Identifier tinfo['Status'] = txn.Status if txn.Status == tStatus.committed: tinfo['InBlock'] = txn.InBlock self.assertEquals(yaml.load(root.do_get(request)), tinfo) # GET /transaction/{TransactionID{}/InBlock request = self._create_get_request("/transaction/" + txns[1] + "/InBlock", {}) self.assertEquals(root.do_get(request).replace('"', ""), txn.InBlock)
def test_web_api_forward(self): # Test _msgforward local_node = self._create_node(8807) gossip = Gossip(local_node) path = tempfile.mkdtemp() ledger = Journal(gossip, data_directory=path, genesis_ledger=True) # Create peers for the message to be forwarded to node1 = self._create_node(8881) node2 = self._create_node(8882) node1.is_peer = True node2.is_peer = True gossip.add_node(node1) gossip.add_node(node2) validator = TestValidator(ledger) forward_page = ForwardPage(validator) # Create message to use and the data to forward msg = shutdown_message.ShutdownMessage() msg.sign_from_node(local_node) data = msg.dump() # Post /forward request = self._create_post_request("forward", data) r = yaml.load(forward_page.do_post(request)) self.assertEquals(r, data) self.assertIn(msg.Identifier, node1.MessageQ.Messages) self.assertIn(msg.Identifier, node2.MessageQ.Messages)
def test_generate_coupons_coupon(self): self._set_clock(2015, 4, 1, 1) signingkey = signed_object.generate_signing_key() ident = signed_object.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10021)) node.is_peer = True path = tempfile.mkdtemp() gossip = Gossip(node) journal = Journal(node, gossip, gossip.dispatcher, consensus=DevModeConsensus(), data_directory=path) journal.global_store.TransactionStores['/BondTransaction'] = \ self.store # creates a redemption updates = Family._generate_coupons(journal) self.assertNotEquals(updates, []) transaction = BondTransaction(updates[0]) transaction.sign_object(self.key) transaction.check_valid(self.store) transaction.apply(self.store) org_usd_holding = self.store["34d813716009ca1786222a44347ccff" "258a4ab6029d936664fde0d13f23992b5"] self.assertEquals(org_usd_holding["amount"], 25000.0)
def test_journal_transaction_block_missing_transactions(self): # Test missing transactions, should return list of missing transactions minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0} transBlock = TransactionBlock(minfo) signingkey = SigObj.generate_signing_key() ident = SigObj.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10002)) path = tempfile.mkdtemp() # Takes a journal, create a temporary directory to use with the journal journal = Journal(node, DataDirectory=path) transBlock.sign_from_node(node) missing = transBlock.missing_transactions(journal) # No missing transactions self.assertEquals(missing, []) minfo = { '__SIGNATURE__': 'Test', '__NONCE__': time.time(), 'Dependencies': [] } transaction = Transaction(minfo) transaction.sign_from_node(node) transBlock.TransactionIDs += [transaction.Identifier] missing = transBlock.missing_transactions(journal) # One missing transactions self.assertEquals(missing, [transaction.Identifier]) journal.TransactionStore[transaction.Identifier] = transaction missing = transBlock.missing_transactions(journal) # Back to no missing transactions self.assertEquals(missing, [])
def test_web_api_block(self): # Test _handleblkrequest local_node = self._create_node(8801) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(local_node, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) block_page = BlockPage(validator) # TransactionBlock to the ledger trans_block = self._create_tblock(local_node, 0, common.NullIdentifier, []) trans_block2 = self._create_tblock(local_node, 1, trans_block.Identifier, []) ledger.BlockStore[trans_block.Identifier] = trans_block ledger.BlockStore[trans_block2.Identifier] = trans_block2 ledger.handle_advance(trans_block) ledger.handle_advance(trans_block2) # GET /block request = self._create_get_request("/block", {}) string = '["' + str(trans_block2.Identifier) + '", "' + \ str(trans_block.Identifier) + '"]' self.assertEquals(block_page.do_get(request), string) # GET /block?blockcount=2 request = self._create_get_request("/block", {"blockcount": [2]}) self.assertEquals(block_page.do_get(request), string) # GET /block?blockcount=1 string = '["' + str(trans_block2.Identifier) + '"]' request = self._create_get_request("/block", {"blockcount": [1]}) self.assertEquals(block_page.do_get(request), string) # Add identifier to dictionary dict_b = trans_block.dump() dict_b["Identifier"] = trans_block.Identifier # GET /block/{BlockId} request = self._create_get_request("/block/" + trans_block.Identifier, {}) self.assertEquals(yaml.load(block_page.do_get(request)), dict_b) # GET /block/{BlockId}/Signature request = self._create_get_request("/block/" + trans_block.Identifier + "/Signature", {}) self.assertEquals(block_page.do_get(request), '"' + trans_block.Signature + '"')
def check_for_chain(data_dir, node_name, store_type): block_store = Journal.get_store_file(node_name, 'block', data_dir, store_type=store_type) if os.path.isfile(block_store): msg = 'block store: %s exists; ' % block_store msg += 'skipping genesis block creation.' raise CliException(msg)
def _create_journal(self, node=None): node = node or self._create_node() gossip = Gossip(node) # Takes a journal, create a temporary directory to use with the journal path = tempfile.mkdtemp() journal = Journal(gossip.LocalNode, gossip, gossip.dispatcher, consensus=DevModeConsensus(), data_directory=path) return (gossip, journal)
def test_web_api_error_response(self): # Test error_response LocalNode = self._create_node(8809) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) request = self._create_get_request("/stat", {}) error = root.error_response(request, http.BAD_REQUEST, 'error processing http request {0}', request.path) self.assertEquals(error, "error processing http request /stat\n")
def test_web_api_msg_echo(self): # Test _msgecho LocalNode = self._create_node(8805) path = tempfile.mkdtemp() ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) # Create message to use and the data to echo msg = shutdown_message.ShutdownMessage({'__SIGNATURE__': "test"}) msg.sign_from_node(LocalNode) data = msg.dump() # POST /echo request = self._create_post_request("/echo", data) self.assertEquals(yaml.load(root.do_post(request)), data)
def test_web_api_stats(self): # Test _handlestatrequest LocalNode = self._create_node(8803) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) request = self._create_get_request("/stat", {}) try: root.do_get(request) self.fail("This should cause an error") except: self.assertIsNotNone(root) dic = {} dic["ledger"] = ledger.StatDomains["ledger"].get_stats() dic["ledgerconfig"] = ledger.StatDomains["ledgerconfig"].get_stats() dic["message"] = ledger.StatDomains["message"].get_stats() dic["packet"] = ledger.StatDomains["packet"].get_stats() # GET /statistics/ledger request = self._create_get_request("/statistics/ledger", {}) self.assertEquals(yaml.load(root.do_get(request)), dic) # GET /statistics/node - with no peers request = self._create_get_request("/statistics/node", {}) self.assertEquals(yaml.load(root.do_get(request)), {}) node = self._create_node(8804) ledger.add_node(node) dic2 = {} dic2[node.Name] = node.Stats.get_stats() dic2[node.Name]["IsPeer"] = node.is_peer # GET /stats/node - with one peer self.assertEquals(yaml.load(root.do_get(request)), dic2) request = self._create_get_request("AnythingElse", {}) dic3 = root.do_get(request) self.assertTrue('404 - No Such Resource' in dic3)
def test_web_api_forward(self): # Test _msgforward LocalNode = self._create_node(8807) path = tempfile.mkdtemp() ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) # Create peers for the message to be forwarded to node1 = self._create_node(8881) node2 = self._create_node(8882) node1.is_peer = True node2.is_peer = True ledger.add_node(node1) ledger.add_node(node2) validator = TestValidator(ledger) root = RootPage(validator) # Create message to use and the data to forward msg = shutdown_message.ShutdownMessage() msg.sign_from_node(LocalNode) data = msg.dump() # Post /forward request = self._create_post_request("forward", data) r = yaml.load(root.do_post(request)) self.assertEquals(r, data) self.assertIn(msg.Identifier, node1.MessageQ.Messages) self.assertIn(msg.Identifier, node2.MessageQ.Messages)
def test_journal_transaction_block_is_valid(self): # Test whether or not a transblock is valid minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0} transBlock = TransactionBlock(minfo) signingkey = SigObj.generate_signing_key() ident = SigObj.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10000)) # Takes a journal, create a temporary directory to use with the journal path = tempfile.mkdtemp() journal = Journal(node, DataDirectory=path) # Need to sign TransactionBlock, use sign_from_node form signed object transBlock.sign_from_node(node) self.assertTrue(transBlock.is_valid(journal))
def test_matching_nothing_to_match(self): signingkey = signed_object.generate_signing_key() ident = signed_object.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10002)) node.is_peer = True path = tempfile.mkdtemp() gossip = Gossip(node) journal = Journal(node, gossip, gossip.dispatcher, consensus=DevModeConsensus(), data_directory=path) journal.global_store.TransactionStores['/BondTransaction'] = \ self.store matched_orders = _generate_match_orders(journal) self.assertEquals(matched_orders, [])
def test_journal_transaction_block_not_is_valid(self): # Test that an invalid Transblock does not get verified as valid minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0} transBlock = TransactionBlock(minfo) signingkey = SigObj.generate_signing_key() ident = SigObj.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10001)) # Takes a journal, create a temporary directory to use with the journal path = tempfile.mkdtemp() journal = Journal(node, DataDirectory=path) # Need to sign TransactionBlock, use sign_from_node form signed object try: transBlock.is_valid(journal) except AssertionError, e: self.assertIsInstance(e, AssertionError)
def test_journal_transaction_block_update_block_weight(self): # Test block update weight minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0} transBlock = TransactionBlock(minfo) transBlock.Status = tbStatus.valid signingkey = SigObj.generate_signing_key() ident = SigObj.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10003)) # Takes a journal, create a temporary directory to use with the journal path = tempfile.mkdtemp() journal = Journal(node, DataDirectory=path) transBlock.sign_from_node(node) transBlock.update_block_weight(journal) # No transactions self.assertEquals(transBlock.TransactionDepth, 0) minfo = { '__SIGNATURE__': 'Test', '__NONCE__': time.time(), 'Dependencies': [] } transaction = Transaction(minfo) transaction.sign_from_node(node) transBlock.TransactionIDs += [transaction.Identifier] transBlock.update_block_weight(journal) # One transaction self.assertEquals(transBlock.TransactionDepth, 1) minfo = { '__SIGNATURE__': 'Test', "BlockNum": 1, 'PreviousBlockID': transBlock.Identifier } newTransBlock = TransactionBlock(minfo) newTransBlock.Status = tbStatus.valid journal.BlockStore[transBlock.Identifier] = transBlock newTransBlock.update_block_weight(journal) # Get depth from previous block self.assertEquals(newTransBlock.TransactionDepth, 1)
def test_web_api_msg_initiate(self): # Test _msginitiate LocalNode = self._create_node(8806) path = tempfile.mkdtemp() ledger = Journal(LocalNode, DataDirectory=path, GenesisLedger=True) validator = TestValidator(ledger) root = RootPage(validator) # Create message to use and the data to initiate msg = shutdown_message.ShutdownMessage() data = msg.dump() request = self._create_post_request("/initiate", data) r = root.do_post(request) self.assertEquals(r, "error processing http request /initiate\n") request.client = address.IPv4Address("TCP", '127.0.0.1', 8806) # Post /initiate - This should sign the message r = yaml.load(root.do_post(request)) sig = r["__SIGNATURE__"] r.pop("__SIGNATURE__", None) data.pop("__SIGNATURE__", None) self.assertEquals(r, data) self.assertIsNotNone(sig)
def test_matching_no_quotes(self): signingkey = signed_object.generate_signing_key() ident = signed_object.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10003)) node.is_peer = True path = tempfile.mkdtemp() gossip = Gossip(node) journal = Journal(node, gossip, gossip.dispatcher, consensus=DevModeConsensus(), data_directory=path) org2 = self.store.lookup("organization:name", "Second Bank") bond = self.store.lookup("bond:cusip", "912828R77") transaction = BondTransaction({ "UpdateType": "CreateOrder", 'Updates': [{ "UpdateType": "CreateOrder", "Action": "Buy", "OrderType": "Market", "FirmId": org2["object-id"], "Isin": bond["isin"], "Quantity": 100000, "object_id": "123453716009ca1786222a44347ccff258a4ab6029" + "d936664fde0d13f23992b7" }] }) transaction.sign_object(self.key) try: transaction.check_valid(self.store) transaction.apply(self.store) except InvalidTransactionError: self.fail("This should be valid") journal.global_store.TransactionStores['/BondTransaction'] = \ self.store matched_orders = _generate_match_orders(journal) self.assertEquals(matched_orders, [])
def test_web_api_block(self): # Test _handleblkrequest local_node = self._create_node(8801) gossip = Gossip(local_node) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(gossip, data_directory=path, genesis_ledger=True) validator = TestValidator(ledger) block_page = BlockPage(validator) # TransactionBlock to the ledger trans_block = self._create_tblock(local_node, 0, common.NullIdentifier, []) trans_block2 = self._create_tblock(local_node, 1, trans_block.Identifier, []) ledger.BlockStore[trans_block.Identifier] = trans_block ledger.BlockStore[trans_block2.Identifier] = trans_block2 ledger.handle_advance(trans_block) ledger.handle_advance(trans_block2) # GET /block request = self._create_get_request("/block", {}) string = '["' + str(trans_block2.Identifier) + '", "' + \ str(trans_block.Identifier) + '"]' self.assertEquals(block_page.do_get(request), string) # GET /block?blockcount=2 request = self._create_get_request("/block", {"blockcount": [2]}) self.assertEquals(block_page.do_get(request), string) # GET /block?blockcount=1 string = '["' + str(trans_block2.Identifier) + '"]' request = self._create_get_request("/block", {"blockcount": [1]}) self.assertEquals(block_page.do_get(request), string) # Add identifier to dictionary dict_b = trans_block.dump() dict_b["Identifier"] = trans_block.Identifier # GET /block/{BlockId} request = self._create_get_request("/block/" + trans_block.Identifier, {}) self.assertEquals(yaml.load(block_page.do_get(request)), dict_b) # GET /block/{BlockId}/Signature request = self._create_get_request("/block/" + trans_block.Identifier + "/Signature", {}) self.assertEquals(block_page.do_get(request), '"' + trans_block.Signature + '"')
def test_matching_no_order(self): signingkey = signed_object.generate_signing_key() ident = signed_object.generate_identifier(signingkey) node = Node(identifier=ident, signingkey=signingkey, address=("localhost", 10004)) node.is_peer = True path = tempfile.mkdtemp() gossip = Gossip(node) journal = Journal(node, gossip, gossip.dispatcher, consensus=DevModeConsensus(), data_directory=path) transaction = BondTransaction({ "UpdateType": "CreateQuote", 'Updates': [{ "UpdateType": "CreateQuote", "Firm": "ABCD", "Isin": "US912828R770", "BidPrice": "101", "BidQty": 250000, "AskPrice": "101", "AskQty": 250000, "object_id": "555553716009ca1786222a44347ccff258a4ab6029" + "d936664fde0d13f23992b7" }] }) transaction.sign_object(self.key) try: transaction.check_valid(self.store) transaction.apply(self.store) except InvalidTransactionError: self.fail("This should be valid") journal.global_store.TransactionStores['/BondTransaction'] = \ self.store matched_orders = _generate_match_orders(journal) self.assertEquals(matched_orders, [])
def test_web_api_stats(self): # Test _handlestatrequest local_node = self._create_node(8803) gossip = Gossip(local_node) path = tempfile.mkdtemp() # Setup ledger and RootPage ledger = Journal(gossip, data_directory=path, genesis_ledger=True) validator = TestValidator(ledger) statistics_page = StatisticsPage(validator) request = self._create_get_request("/stat", {}) try: statistics_page.do_get(request) self.fail("This should cause an error") except: self.assertIsNotNone(statistics_page) dic = {} dic["ledger"] = gossip.StatDomains["ledger"].get_stats() dic["ledgerconfig"] = gossip.StatDomains["ledgerconfig"].get_stats() dic["message"] = gossip.StatDomains["message"].get_stats() dic["packet"] = gossip.StatDomains["packet"].get_stats() # GET /statistics/ledger request = self._create_get_request("/statistics/ledger", {}) self.assertEquals(yaml.load(statistics_page.do_get(request)), dic) # GET /statistics/node - with no peers request = self._create_get_request("/statistics/node", {}) self.assertEquals(yaml.load(statistics_page.do_get(request)), {}) node = self._create_node(8804) gossip.add_node(node) dic2 = {} dic2[node.Name] = node.Stats.get_stats() dic2[node.Name]["IsPeer"] = node.is_peer # GET /stats/node - with one peer self.assertEquals(yaml.load(statistics_page.do_get(request)), dic2) request = self._create_get_request("AnythingElse", {}) dic3 = statistics_page.do_get(request) self.assertTrue('Invalid page name' in dic3)
def do_poet0_genesis(args): # Get ledger config: # ...set the default value of config because argparse 'default' in # ...combination with action='append' does the wrong thing. if args.config is None: args.config = ['txnvalidator.js'] # ...convert any comma-delimited argument strings to list elements for arglist in [args.config]: if arglist is not None: for arg in arglist: if ',' in arg: loc = arglist.index(arg) arglist.pop(loc) for element in reversed(arg.split(',')): arglist.insert(loc, element) options_config = ArgparseOptionsConfig([('conf_dir', 'ConfigDirectory'), ('data_dir', 'DataDirectory'), ('type', 'LedgerType'), ('log_config', 'LogConfigFile'), ('keyfile', 'KeyFile'), ('node', 'NodeName'), ('verbose', 'Verbose'), ('family', 'TransactionFamilies')], args) cfg = get_validator_configuration(args.config, options_config) # Obtain Journal object: # ...set WaitTimer globals target_wait_time = cfg.get("TargetWaitTime") initial_wait_time = cfg.get("InitialWaitTime") certificate_sample_length = cfg.get('CertificateSampleLength') fixed_duration_blocks = cfg.get("FixedDurationBlocks") from journal.consensus.poet0.wait_timer import set_wait_timer_globals set_wait_timer_globals( target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, ) # ...build Gossip dependency (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # ...build Journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") genesis_ledger = cfg.get("GenesisLedger") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") stat_domains = {} from journal.consensus.poet0.poet_consensus import PoetConsensus consensus_obj = PoetConsensus(cfg) journal = Journal( gossiper.LocalNode, gossiper, gossiper.dispatcher, consensus_obj, stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, genesis_ledger=genesis_ledger, data_directory=data_directory, store_type=store_type, ) # ...add 'built in' txn families default_transaction_families = [endpoint_registry] for txn_family in default_transaction_families: txn_family.register_transaction_types(journal) # ...add auxiliary transaction families for txn_family_module_name in cfg.get("TransactionFamilies", []): txn_family = importlib.import_module(txn_family_module_name) txn_family.register_transaction_types(journal) # Make genesis block: # ...make sure there is no current chain here, or fail # ...create block g_block g_block = journal.build_block(genesis=True) journal.claim_block(g_block) # ...simulate receiving the genesis block msg from reactor to force commit g_block_msg = gossiper.IncomingMessageQueue.pop() journal.dispatcher.dispatch(g_block_msg) journal.initialization_complete() head = journal.most_recent_committed_block_id chain_len = len(journal.committed_block_ids()) # Run shutdown: # ...persist new state journal.shutdown() # ...release gossip obj's UDP port gossiper.Listener.loseConnection() gossiper.Listener.connectionLost(reason=None) # Log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': chain_len, } gblock_fname = genesis_info_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data))
def do_poet1_genesis(args): # Get journal config: cfg = mirror_validator_parsing(args) # Check for existing block store node_name = cfg.get("NodeName") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") check_for_chain(data_directory, node_name, store_type) # Obtain Journal object: # ...set WaitTimer globals target_wait_time = cfg.get("TargetWaitTime") initial_wait_time = cfg.get("InitialWaitTime") certificate_sample_length = cfg.get('CertificateSampleLength') fixed_duration_blocks = cfg.get("FixedDurationBlocks") set_wait_timer_globals(target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, ) # ...build Gossip dependency (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # ...build Journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") stat_domains = {} consensus_obj = PoetConsensus(cfg) journal = Journal(gossiper.LocalNode, gossiper, gossiper.dispatcher, consensus_obj, stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, data_directory=data_directory, store_type=store_type, ) # ...add 'built in' txn families default_transaction_families = [ endpoint_registry, validator_registry, ] for txn_family in default_transaction_families: txn_family.register_transaction_types(journal) # ...add auxiliary transaction families for txn_family_module_name in cfg.get("TransactionFamilies", []): txn_family = importlib.import_module(txn_family_module_name) txn_family.register_transaction_types(journal) # Make genesis block: consensus_obj.register_signup_information(journal=journal) # ...make sure there is no current chain here, or fail # ...pop VR seed (we'll presently defer resolving VR seed issues) vr_seed = gossiper.IncomingMessageQueue.pop() journal.initial_transactions.append(vr_seed.Transaction) # ...create block g_block (including VR seed txn just popped) journal.on_genesis_block.fire(journal) journal.initializing = False for txn in journal.initial_transactions: journal.add_pending_transaction(txn, build_block=False) g_block = journal.build_block(genesis=True) # seed later... journal.claim_block(g_block) # ...simulate receiving the genesis block msg from reactor to force commit g_block_msg = gossiper.IncomingMessageQueue.pop() poet_public_key = g_block.poet_public_key journal.dispatcher.dispatch(g_block_msg) journal.initialization_complete() head = journal.most_recent_committed_block_id chain_len = len(journal.committed_block_ids()) # Run shutdown: # ...persist new state journal.shutdown() # ...release gossip obj's UDP port gossiper.Listener.loseConnection() gossiper.Listener.connectionLost(reason=None) # Log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': chain_len, 'PoetPublicKey': poet_public_key, } gblock_fname = genesis_info_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data, indent=4))
def do_dev_mode_genesis(args): # Get journal config: cfg = mirror_validator_parsing(args) # Check for existing block store node_name = cfg.get("NodeName") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") check_for_chain(data_directory, node_name, store_type) # Obtain Journal object: # ...build Gossip dependency (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # ...build Journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") stat_domains = {} consensus_obj = DevModeConsensus(block_publisher=True, block_wait_time=cfg.get('BlockWaitTime')) journal = Journal(gossiper.LocalNode, gossiper, gossiper.dispatcher, consensus_obj, stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, data_directory=data_directory, store_type=store_type, ) # ...add 'built in' txn families default_transaction_families = [ endpoint_registry ] for txn_family in default_transaction_families: txn_family.register_transaction_types(journal) # ...add auxiliary transaction families for txn_family_module_name in cfg.get("TransactionFamilies", []): txn_family = importlib.import_module(txn_family_module_name) txn_family.register_transaction_types(journal) # Make genesis block: # ...make sure there is no current chain here, or fail # ...create block g_block journal.on_genesis_block.fire(journal) journal.initializing = False for txn in journal.initial_transactions: journal.add_pending_transaction(txn, build_block=False) g_block = journal.build_block(genesis=True) journal.claim_block(g_block) # ...simulate receiving the genesis block msg from reactor to force commit g_block_msg = gossiper.IncomingMessageQueue.pop() journal.dispatcher.dispatch(g_block_msg) journal.initialization_complete() head = journal.most_recent_committed_block_id chain_len = len(journal.committed_block_ids()) # Run shutdown: # ...persist new state journal.shutdown() # ...release gossip obj's UDP port gossiper.Listener.loseConnection() gossiper.Listener.connectionLost(reason=None) # Log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': chain_len, } gblock_fname = genesis_info_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data))
def do_poet1_genesis(args): # Get journal config: cfg = mirror_validator_parsing(args) # Check for existing block store node_name = cfg.get("NodeName") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") check_for_chain(data_directory, node_name, store_type) # Obtain Journal object: # ...set WaitTimer globals target_wait_time = cfg.get("TargetWaitTime") initial_wait_time = cfg.get("InitialWaitTime") certificate_sample_length = cfg.get('CertificateSampleLength') fixed_duration_blocks = cfg.get("FixedDurationBlocks") set_wait_timer_globals( target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, ) # ...build Gossip dependency (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # ...build Journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") stat_domains = {} consensus_obj = PoetConsensus(cfg) journal = Journal( gossiper.LocalNode, gossiper, gossiper.dispatcher, consensus_obj, stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, data_directory=data_directory, store_type=store_type, ) # ...add 'built in' txn families default_transaction_families = [ endpoint_registry, validator_registry, ] for txn_family in default_transaction_families: txn_family.register_transaction_types(journal) # ...add auxiliary transaction families for txn_family_module_name in cfg.get("TransactionFamilies", []): txn_family = importlib.import_module(txn_family_module_name) txn_family.register_transaction_types(journal) # Make genesis block: # ...make sure there is no current chain here, or fail # ...pop VR seed (we'll presently defer resolving VR seed issues) vr_seed = gossiper.IncomingMessageQueue.pop() journal.initial_transactions.append(vr_seed.Transaction) # ...create block g_block (including VR seed txn just popped) journal.on_genesis_block.fire(journal) journal.initializing = False for txn in journal.initial_transactions: journal.add_pending_transaction(txn, build_block=False) g_block = journal.build_block(genesis=True) # seed later... journal.claim_block(g_block) # ...simulate receiving the genesis block msg from reactor to force commit g_block_msg = gossiper.IncomingMessageQueue.pop() poet_public_key = g_block.poet_public_key journal.dispatcher.dispatch(g_block_msg) journal.initialization_complete() head = journal.most_recent_committed_block_id chain_len = len(journal.committed_block_ids()) # Run shutdown: # ...persist new state journal.shutdown() # ...release gossip obj's UDP port gossiper.Listener.loseConnection() gossiper.Listener.connectionLost(reason=None) # Log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': chain_len, 'PoetPublicKey': poet_public_key, } gblock_fname = genesis_info_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data, indent=4))
def local_main(config, windows_service=False, daemonized=False): """ Implement the actual application logic for starting the txnvalidator """ # If this process has been daemonized, then we want to make # sure to print out an information message as quickly as possible # to the logger for debugging purposes. if daemonized: logger.info('validator has been daemonized') # These imports are delayed because of poor interactions between # epoll and fork. Unfortunately, these import statements set up # epoll and we need that to happen after the forking done with # Daemonize(). This is a side-effect of importing twisted. from twisted.internet import reactor from txnserver.validator import parse_networking_info from txnserver.validator import Validator from txnserver import web_api from gossip.gossip_core import GossipException from gossip.gossip_core import Gossip logger.warn('validator pid is %s', os.getpid()) consensus_type = config.get('LedgerType', 'poet0') stat_domains = {} try: (node, http_port) = parse_networking_info(config) # to construct a validator, we pass it a consensus specific journal validator = None journal = None # Gossip parameters minimum_retries = config.get("MinimumRetries") retry_interval = config.get("RetryInterval") gossip = Gossip(node, minimum_retries, retry_interval, stat_domains) # WaitTimer globals target_wait_time = config.get("TargetWaitTime") initial_wait_time = config.get("InitialWaitTime") certificate_sample_length = config.get('CertificateSampleLength') fixed_duration_blocks = config.get("FixedDurationBlocks") minimum_wait_time = config.get("MinimumWaitTime") # Journal parameters min_txn_per_block = config.get("MinimumTransactionsPerBlock") max_txn_per_block = config.get("MaxTransactionsPerBlock") max_txn_age = config.get("MaxTxnAge") data_directory = config.get("DataDirectory") store_type = config.get("StoreType") if consensus_type == 'poet0': from sawtooth_validator.consensus.poet0 import poet_consensus from sawtooth_validator.consensus.poet0.wait_timer \ import set_wait_timer_globals set_wait_timer_globals(target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks) # Continue to pass config to PoetConsensus for possible other # enclave implementations - poet_enclave.initialize consensus = poet_consensus.PoetConsensus(config) elif consensus_type == 'poet1': from sawtooth_validator.consensus.poet1 import poet_consensus from sawtooth_validator.consensus.poet1.wait_timer \ import set_wait_timer_globals set_wait_timer_globals(target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, minimum_wait_time) # Continue to pass config to PoetConsensus for possible other # enclave implementations - poet_enclave.initialize consensus = poet_consensus.PoetConsensus(config) elif consensus_type == 'quorum': quorum = config.get("Quorum") nodes = config.get("Nodes") vote_time_interval = config.get("VoteTimeInterval") ballot_time_interval = config.get("BallotTimeInterval") voting_quorum_target_size = config.get("VotingQuorumTargetSize") from sawtooth_validator.consensus.quorum import quorum_consensus consensus = quorum_consensus.QsuorumConsensus( vote_time_interval, ballot_time_interval, voting_quorum_target_size, quorum, nodes) elif consensus_type == 'dev_mode': block_publisher = config.get("DevModePublisher", False) block_wait_time = config.get("BlockWaitTime") from sawtooth_validator.consensus.dev_mode \ import dev_mode_consensus consensus = dev_mode_consensus.DevModeConsensus( block_publisher, block_wait_time) else: warnings.warn('Unknown consensus type %s' % consensus_type) sys.exit(1) permissioned_validators =\ config.get("WhitelistOfPermissionedValidators") journal = Journal(gossip.LocalNode, gossip, gossip.dispatcher, consensus, permissioned_validators, stat_domains, min_txn_per_block, max_txn_per_block, max_txn_age, data_directory, store_type) validator = Validator( gossip, journal, stat_domains, config, windows_service=windows_service, http_port=http_port, ) except GossipException as e: print >> sys.stderr, str(e) sys.exit(1) listen_info = config.get("Listen", None) web_api.initialize_web_server(listen_info, validator) # go through the list of transaction families that should be initialized in # this validator. the endpoint registry is always included if consensus_type == 'poet1': from sawtooth_validator.consensus.poet1 import validator_registry validator_registry.register_transaction_types(journal) for txnfamily in config.get('TransactionFamilies'): logger.info("adding transaction family: %s", txnfamily) try: validator.add_transaction_family( importlib.import_module(txnfamily)) except ImportError: warnings.warn("transaction family not found: {}".format(txnfamily)) sys.exit(1) # attempt to restore journal state from persistence try: validator.journal.restore() except KeyError as e: logger.error( "Config is not compatible with data files" " found on restore. Keyerror on %s", e) sys.exit(1) try: validator.pre_start() reactor.run(installSignalHandlers=False) except KeyboardInterrupt: pass except SystemExit as e: raise e except: traceback.print_exc(file=sys.stderr) sys.exit(1)
def do_poet0_genesis(args): # Get ledger config: # set the default value of config because argparse 'default' in # combination with action='append' does the wrong thing. if args.config is None: args.config = ['txnvalidator.js'] # convert any comma-delimited argument strings to list elements for arglist in [args.config]: if arglist is not None: for arg in arglist: if ',' in arg: loc = arglist.index(arg) arglist.pop(loc) for element in reversed(arg.split(',')): arglist.insert(loc, element) options_config = ArgparseOptionsConfig( [ ('conf_dir', 'ConfigDirectory'), ('data_dir', 'DataDirectory'), ('type', 'LedgerType'), ('log_config', 'LogConfigFile'), ('keyfile', 'KeyFile'), ('node', 'NodeName'), ('verbose', 'Verbose'), ('family', 'TransactionFamilies') ], args) cfg = get_validator_configuration(args.config, options_config) # Perform requisite overrides and validation: cfg['GenesisLedger'] = True # should check that sigining key exists... # debug report for key, value in cfg.iteritems(): LOGGER.debug("CONFIG: %s = %s", key, value) # set WaitTimer globals target_wait_time = cfg.get("TargetWaitTime") initial_wait_time = cfg.get("InitialWaitTime") certificate_sample_length = cfg.get('CertificateSampleLength') fixed_duration_blocks = cfg.get("FixedDurationBlocks") from journal.consensus.poet0.wait_timer \ import set_wait_timer_globals set_wait_timer_globals(target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, ) # build gossiper (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # build journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") genesis_ledger = cfg.get("GenesisLedger") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") stat_domains = {} # in future, dynamically select consensus obj based on ConsensusType journal = Journal(gossiper.LocalNode, gossiper, gossiper.dispatcher, PoetConsensus(cfg), stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, genesis_ledger=genesis_ledger, data_directory=data_directory, store_type=store_type, ) # may need to add transaction family objects ad hoc from cfg dfl_txn_families = [endpoint_registry, integer_key] for txnfamily in dfl_txn_families: txnfamily.register_transaction_types(journal) # ...skipping onNodeDisconnect handler (using ledger, not validator...) # Create genesis block: # we should make sure there is no current chain here, or fail # calling initialization_complete will create the genesis block journal.initialization_complete() # simulate receiving the genesis block msg from reactor to force commit msg = journal.gossip.IncomingMessageQueue.pop() (_, msg_handler) = journal.dispatcher.message_handler_map[msg.MessageType] msg_handler(msg, journal) # Gather data, then shutdown to save state: head = journal.most_recent_committed_block_id # ...not sure why n_blocks is experimentally 0 and not 1 # ...if we only make the genesis, it would be good to check n_blks = 1 n_blks = journal.committed_block_count journal.shutdown() # log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': n_blks, } gblock_fname = get_genesis_block_id_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data))
def do_poet0_genesis(args): # Get ledger config: # ...set the default value of config because argparse 'default' in # ...combination with action='append' does the wrong thing. if args.config is None: args.config = ['txnvalidator.js'] # ...convert any comma-delimited argument strings to list elements for arglist in [args.config]: if arglist is not None: for arg in arglist: if ',' in arg: loc = arglist.index(arg) arglist.pop(loc) for element in reversed(arg.split(',')): arglist.insert(loc, element) options_config = ArgparseOptionsConfig( [ ('conf_dir', 'ConfigDirectory'), ('data_dir', 'DataDirectory'), ('type', 'LedgerType'), ('log_config', 'LogConfigFile'), ('keyfile', 'KeyFile'), ('node', 'NodeName'), ('verbose', 'Verbose'), ('family', 'TransactionFamilies') ], args) cfg = get_validator_configuration(args.config, options_config) # Obtain Journal object: # ...set WaitTimer globals target_wait_time = cfg.get("TargetWaitTime") initial_wait_time = cfg.get("InitialWaitTime") certificate_sample_length = cfg.get('CertificateSampleLength') fixed_duration_blocks = cfg.get("FixedDurationBlocks") from journal.consensus.poet0.wait_timer import set_wait_timer_globals set_wait_timer_globals(target_wait_time, initial_wait_time, certificate_sample_length, fixed_duration_blocks, ) # ...build Gossip dependency (nd, _) = parse_networking_info(cfg) minimum_retries = cfg.get("MinimumRetries") retry_interval = cfg.get("RetryInterval") gossiper = Gossip(nd, minimum_retries, retry_interval) # ...build Journal min_txn_per_block = cfg.get("MinimumTransactionsPerBlock") max_txn_per_block = cfg.get("MaxTransactionsPerBlock") max_txn_age = cfg.get("MaxTxnAge") genesis_ledger = cfg.get("GenesisLedger") data_directory = cfg.get("DataDirectory") store_type = cfg.get("StoreType") stat_domains = {} from journal.consensus.poet0.poet_consensus import PoetConsensus consensus_obj = PoetConsensus(cfg) journal = Journal(gossiper.LocalNode, gossiper, gossiper.dispatcher, consensus_obj, stat_domains, minimum_transactions_per_block=min_txn_per_block, max_transactions_per_block=max_txn_per_block, max_txn_age=max_txn_age, genesis_ledger=genesis_ledger, data_directory=data_directory, store_type=store_type, ) # ...add 'built in' txn families default_transaction_families = [ endpoint_registry ] for txn_family in default_transaction_families: txn_family.register_transaction_types(journal) # ...add auxiliary transaction families for txn_family_module_name in cfg.get("TransactionFamilies", []): txn_family = importlib.import_module(txn_family_module_name) txn_family.register_transaction_types(journal) # Make genesis block: # ...make sure there is no current chain here, or fail # ...create block g_block g_block = journal.build_block(genesis=True) journal.claim_block(g_block) # ...simulate receiving the genesis block msg from reactor to force commit g_block_msg = gossiper.IncomingMessageQueue.pop() journal.dispatcher.dispatch(g_block_msg) journal.initialization_complete() head = journal.most_recent_committed_block_id chain_len = len(journal.committed_block_ids()) # Run shutdown: # ...persist new state journal.shutdown() # ...release gossip obj's UDP port gossiper.Listener.loseConnection() gossiper.Listener.connectionLost(reason=None) # Log genesis data, then write it out to ease dissemination genesis_data = { 'GenesisId': head, 'ChainLength': chain_len, } gblock_fname = genesis_info_file_name(cfg['DataDirectory']) LOGGER.info('genesis data: %s', genesis_data) LOGGER.info('writing genesis data to %s', gblock_fname) with open(gblock_fname, 'w') as f: f.write(json.dumps(genesis_data))