def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB consensusPlugin = bigchaindb.config.get('consensus_plugin') if consensusPlugin: self.consensus = config_utils.load_consensus_plugin( consensusPlugin) else: self.consensus = BaseConsensusRules # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.validity = {} self.invalid_dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)])
def get(): # Cryptographic Identities Generation alice, bob = generate_keypair(), generate_keypair() print(" ") # Digital Asset Definition (e.g. bicycle) asset = Asset(data={ "bicycle": { "manufacturer": "bkfab", "serial_number": "abcd1234" } }) # Metadata Definition metadata = {'planet': 'earth'} # create trnsaction TODO : owners_before might be node_pubkey in v0.8.0 tx = Transaction.create([alice.public_key], [alice.public_key], metadata=metadata, asset=asset) # sign with private key tx = tx.sign([alice.private_key]) # get tx by id b = Bigchain() block = b.create_block([tx]) block_voters = block.to_dict()['block']['voters'] print(block_voters) tx_id = '2cb004cad29c0b79872646558f8c867a4c0aecbc4997f0917' tx = b.get_transaction(tx_id) print( "block status : ", b.block_election_status( '2257384a0cee8cf98bd82c3142dd38eee5c268c124b5b357b773b8c6c1fa1221', block_voters))
def __init__(self, host=None, port=None, dbname=None,pub_key=None,priv_key=None,keyring=[], consensus_plugin=None): self.host = host self.port = port self.dbname = dbname self.conn =r.connect(host=host,port=port,db=dbname) self.bigchain=Bigchain(host=host,port=port,dbname=dbname,public_key=pub_key,private_key=priv_key,keyring=keyring,consensus_plugin=consensus_plugin)
def test_check_for_quorum_invalid(b, user_pk): from bigchaindb.models import Transaction e = election.Election() # create blocks with transactions tx1 = Transaction.create([b.me], [([user_pk], 1)]) tx1.sign([b.me_private]) test_block = b.create_block([tx1]) # simulate a federation with four voters key_pairs = [crypto.generate_key_pair() for _ in range(4)] test_federation = [ Bigchain(public_key=key_pair[1], private_key=key_pair[0]) for key_pair in key_pairs ] # add voters to block and write test_block.voters = [key_pair[1] for key_pair in key_pairs] test_block = test_block.sign(b.me_private) b.write_block(test_block) # split_vote (invalid) votes = [member.vote(test_block.id, 'a' * 64, True) for member in test_federation[:2]] + \ [member.vote(test_block.id, 'b' * 64, False) for member in test_federation[2:]] # cast votes for vote in votes: b.write_vote(vote) # since this block is now invalid, should pass to the next process assert e.check_for_quorum(votes[-1]) == test_block
def validate_transactions(self): """ Checks if the incoming transactions are valid """ # create a bigchain instance b = Bigchain() while True: self.monitor.gauge('tx_queue_gauge', self.q_tx_to_validate.qsize(), rate=bigchaindb.config['statsd']['rate']) tx = self.q_tx_to_validate.get() # poison pill if tx == 'stop': self.q_tx_delete.put('stop') self.q_tx_validated.put('stop') return self.q_tx_delete.put(tx['_id']) with self.monitor.timer('validate_transaction', rate=bigchaindb.config['statsd']['rate']): is_valid_transaction = b.is_valid_transaction(tx) if is_valid_transaction: self.q_tx_validated.put(tx)
def create(): # Cryptographic Identities Generation alice, bob = generate_keypair(), generate_keypair() # Digital Asset Definition (e.g. bicycle) asset = Asset(data={ "bicycle": { "manufacturer": "bkfab", "serial_number": "abcd1234" } }) # Metadata Definition metadata = {'planet': 'earth'} # create trnsaction TODO : owners_before might be node_pubkey in v0.8.0 tx = Transaction.create([alice.public_key], [([alice.public_key], 1)], metadata=metadata, asset=asset) # sign with private key tx = tx.sign([alice.private_key]) tx_id = tx.to_dict()['id'] # write to backlog b = Bigchain() b.write_transaction(tx) # wait 2 sec sleep(2) # get tx by id tx = b.get_transaction(tx_id) return tx.to_dict()
def create_duplicate_tx(): ##################################################### 1.CREATE # Cryptographic Identities Generation alice, bob = generate_keypair(), generate_keypair() # Digital Asset Definition (e.g. bicycle) asset = Asset(data={"bicycle": {"manufacturer": "bkfab", "serial_number": "abcd1234"}}) # Metadata Definition metadata = {'planet': 'earth1'} # create trnsaction TODO : owners_before might be node_pubkey in v0.8.0 tx = Transaction.create([alice.public_key], [([alice.public_key], 1)], metadata=metadata, asset=asset) print(" ") print("1.tx_create asset id : alice-----bicycle(", tx.to_dict()['transaction']['asset']['id'], ")----->alice") print("1.tx_create tx id : ", tx.to_dict()['id']) # sign with alice's private key tx = tx.sign([alice.private_key]) tx_id = tx.to_dict()['id'] # write to backlog b = Bigchain() print("1.tx_create db response : ", b.write_transaction(tx)) # wait 2 sec sleep(delay) # get tx by id tx = b.get_transaction(tx_id) print("1.tx_create query : ", tx) print(" ") ##################################################### print("2.write dulpicate tx: ", b.write_transaction(tx))
def test_get_owned_ids_calls_get_outputs_filtered(): from bigchaindb.core import Bigchain with patch('bigchaindb.core.Bigchain.get_outputs_filtered') as gof: b = Bigchain() res = b.get_owned_ids('abc') gof.assert_called_once_with('abc', spent=False) assert res == gof()
def create_blocks(self): """ Create a block with valid transactions """ # create a bigchain instance b = Bigchain() stop = False while True: # read up to 1000 transactions validated_transactions = [] for i in range(1000): try: tx = self.q_tx_validated.get(timeout=5) except queue.Empty: break # poison pill if tx == 'stop': stop = True break validated_transactions.append(tx) # if there are no transactions skip block creation if validated_transactions: # create block block = b.create_block(validated_transactions) self.q_block.put(block) if stop: self.q_block.put('stop') return
def test_check_for_quorum_invalid_prev_node(b, user_pk): from bigchaindb.models import Transaction e = election.Election() # create blocks with transactions tx1 = Transaction.create([b.me], [([user_pk], 1)]) test_block = b.create_block([tx1]) # simulate a federation with four voters key_pairs = [crypto.generate_key_pair() for _ in range(4)] test_federation = [ Bigchain(public_key=key_pair[1], private_key=key_pair[0]) for key_pair in key_pairs ] # add voters to block and write test_block.voters = [key_pair[1] for key_pair in key_pairs] test_block = test_block.sign(b.me_private) b.write_block(test_block) # split vote over prev node votes = [member.vote(test_block.id, 'a' * 64, True) for member in test_federation[:2]] + \ [member.vote(test_block.id, 'b' * 64, True) for member in test_federation[2:]] # cast votes for vote in votes: b.write_vote(vote) # since nodes cannot agree on prev block, the block is invalid assert e.check_for_quorum(votes[-1]) == test_block
def test_check_for_quorum_valid(b, user_pk): from bigchaindb.models import Transaction # simulate a federation with four voters key_pairs = [crypto.generate_key_pair() for _ in range(4)] test_federation = [ Bigchain(public_key=key_pair[1], private_key=key_pair[0]) for key_pair in key_pairs ] b.nodes_except_me = [key_pair[1] for key_pair in key_pairs] # create blocks with transactions tx1 = Transaction.create([b.me], [([user_pk], 1)]) test_block = b.create_block([tx1]) # add voters to block and write test_block = test_block.sign(b.me_private) b.write_block(test_block) # votes for block one votes = [ member.vote(test_block.id, 'a' * 64, True) for member in test_federation ] # cast votes for vote in votes: b.write_vote(vote) e = election.Election() e.bigchain = b # since this block is valid, should go nowhere assert e.check_for_quorum(votes[-1]) is None
def validate(self): """ Checks if incoming blocks are valid or not """ # create a bigchain instance. All processes should create their own bigchcain instance so that they all # have their own connection to the database b = Bigchain() logger.info('voter waiting for new blocks') while True: new_block = self.q_blocks_to_validate.get() # poison pill if new_block == 'stop': self.q_validated_block.put('stop') return logger.info('new_block arrived to voter') block_number = self.v_previous_block_number.value + 1 validity = b.is_valid_block(new_block) self.q_validated_block.put( (new_block, self.v_previous_block_id.value.decode(), block_number, validity)) self.v_previous_block_id.value = new_block['id'].encode() self.v_previous_block_number.value = block_number
def delete_transactions(self): """ Delete transactions from the backlog """ # create bigchain instance b = Bigchain() client=pymongo.MongoClient() backlog=client.bigchain.backlog stop = False while True: # try to delete in batch to reduce io tx_to_delete = [] for i in range(1000): try: tx = self.q_tx_delete.get(timeout=5) except queue.Empty: break # poison pill if tx == 'stop': stop = True break tx_to_delete.append(tx) if len(tx_to_delete)>0: #r.table('backlog').get_all(*tx_to_delete).delete(durability='soft').run(b.conn) for transaction in tx_to_delete: backlog.remove({'_id' : transaction}) if stop: return
def bootstrap(self): """ Get transactions from the backlog that may have been assigned to this while it was online (not listening to the changefeed) """ # create bigchain instance b = Bigchain() # create a queue to store initial results q_initial = mp.Queue() # get initial results initial_results = r.table('backlog')\ .between([b.me, r.minval], [b.me, r.maxval], index='assignee__transaction_timestamp')\ .order_by(index=r.asc('assignee__transaction_timestamp'))\ .run(b.conn) # add results to the queue for result in initial_results: q_initial.put(result) for i in range(mp.cpu_count()): q_initial.put('stop') return q_initial
def get_changefeed(): """Create and return ordered changefeed of blocks starting from last voted block""" b = Bigchain() last_block_id = b.get_last_voted_block().id feed = backend.query.get_new_blocks_feed(b.connection, last_block_id) return Node(feed.__next__, name='changefeed')
def delete_transactions(self): """ Delete transactions from the backlog """ # create bigchain instance b = Bigchain() stop = False while True: # try to delete in batch to reduce io tx_to_delete = [] for i in range(1000): try: tx = self.q_tx_delete.get(timeout=5) except queue.Empty: break # poison pill if tx == 'stop': stop = True break tx_to_delete.append(tx) if tx_to_delete: r.table('backlog').get_all(*tx_to_delete).delete( durability='soft').run(b.conn) if stop: return
def validate(self): """ Checks if incoming blocks are valid or not """ # create a bigchain instance. All processes should create their own bigchcain instance so that they all # have their own connection to the database b = Bigchain() logger.info('voter waiting for new blocks') # signal initialization complete self.initialized.set() while True: new_block = self.q_blocks_to_validate.get() # poison pill if new_block == 'stop': self.q_validated_block.put('stop') return logger.info('new_block arrived to voter') with self.monitor.timer('validate_block'): # FIXME: the following check is done also in `is_valid_block`, # but validity can be true even if the block has already # a vote. if b.has_previous_vote(new_block): continue validity = b.is_valid_block(new_block) self.q_validated_block.put( (new_block, self.v_previous_block_id.value.decode(), validity)) self.v_previous_block_id.value = new_block['id'].encode()
def __init__(self,operation=1, prefeed=None, bigchain=None): super().__init__(name='changefeed') self.prefeed = prefeed if prefeed else [] self.operation = operation self.bigchain = bigchain or Bigchain() self.txStr={ "id": "d875fbdadf6693d121b443cfa01a9a9256ae18e84bbc93dde8850881067d3e40" , "transaction": { "asset": { "data": None , "divisible": False , "id": "ee9277a1-d8c7-41a3-86bc-8078ec9351ca" , "refillable": False , "updatable": False } , "conditions": [ { "amount": 1 , "cid": 0 , "condition": { "details": { "bitmask": 32 , "public_key": "5mVrPtqUzXwKYL2JeZo4cQq2spt8qfGVx3qE2V7NqgyU" , "signature": None , "type": "fulfillment" , "type_id": 4 } , "uri": "cc:4:20:RtTtCxNf1Bq7MFeIToEosMAa3v_jKtZUtqiWAXyFz1c:96" } , "owners_after": [ "5mVrPtqUzXwKYL2JeZo4cQq2spt8qfGVx3qE2V7NqgyU" ] } ] , "contracts": None , "fulfillments": [ { "fid": 0 , "fulfillment": "cf:4:RtTtCxNf1Bq7MFeIToEosMAa3v_jKtZUtqiWAXyFz1d7uHEPYdeANttkdzF5sfzsOpPAa4HeQvZ9xPl61ObH1xJUOgm3Q93iVX7HRwzuz10GW0d3Ef1KCZ0bClBqugcI" , "input": None , "owners_before": [ "5mVrPtqUzXwKYL2JeZo4cQq2spt8qfGVx3qE2V7NqgyU" ] } ] , "metadata": { "data": { "message": "Hello World from the BigchainDB" } , "id": "8214065d-db18-4a44-a0ff-7b02bacb2785" } , "operation": "GENESIS" , "relaction": None , "timestamp": "1504784119013" } , "version": 1 }
def create_invalid_tx(): """Create and return an invalid transaction. The transaction is invalid because it's missing the signature.""" b = Bigchain() tx = b.create_transaction(b.me, b.me, None, 'CREATE') return tx
def bft(): # Cryptographic Identities Generation alice, bob = generate_keypair(), generate_keypair() print(" ") # Digital Asset Definition (e.g. bicycle) asset = Asset(data={ "bicycle": { "manufacturer": "bkfab", "serial_number": "abcd1234" } }) # Metadata Definition metadata = {'planet': 'earth'} # create trnsaction TODO : owners_before might be node_pubkey in v0.8.0 tx = Transaction.create([alice.public_key], [([alice.public_key], 1)], metadata=metadata, asset=asset) # sign with private key tx = tx.sign([alice.private_key]) tx_id = tx.to_dict()['id'] print("tx_id : ", tx_id) # create block b = Bigchain() block = b.create_block([tx]) print("valid block timestamp : ", block.to_dict()['block']['timestamp']) # tamper block block.timestamp = '1' print("tamper block.timestamp to 1 : ") block_id = block.to_dict()['id'] block_voters = block.to_dict()['block']['voters'] print("invalid block timestamp : ", block.to_dict()['block']['timestamp']) print("tamper_block_id : ", block_id) print("db response of block : ", b.write_block(block)) sleep(0) last_voted_id = b.get_last_voted_block().id vote = b.vote(block_id, last_voted_id, True) print("crate vote 'True' : ", vote) print("db response of vote : ", b.write_vote(vote)) print("tamper_block status : ", b.block_election_status(block_id, block_voters)) print("blocks_status_containing_tx : ", b.get_blocks_status_containing_tx(tx_id)) print("wait for 20 sec : ") sleep(20) print("blocks_status_containing_tx : ", b.get_blocks_status_containing_tx(tx_id)) print(" ")
def merge_utxo(alicepub, alicepriv, include_spent): asset = Asset(data={ "bicycle": { "manufacturer": "bkfab", "serial_number": "abcd1234" } }, data_id="334cd061-846b-4213-bd25-588e951def5f") metadata = {'planet': 'earth'} b = Bigchain() utxo = b.get_outputs_filtered_not_include_freeze(alicepub, include_spent) for u in utxo: # print(u) u.pop('details') print('userA unspent asset:') print(json.dumps(utxo, indent=4)) inputs = [] balance = 0 utxo = b.get_outputs_filtered_not_include_freeze(alicepub, include_spent) for i in utxo: f = Fulfillment.from_dict({ 'fulfillment': i['details'], 'input': { 'cid': i['cid'], 'txid': i['txid'], }, 'owners_before': [alicepub], }) inputs.append(f) balance += i['amount'] length = len(utxo) if balance <= 0: print('No need to merge, because of lack of balance') elif length <= 1: print('No need to merge, because utxo len = 1') else: tx = Transaction.transfer(inputs, [([alicepub], balance)], metadata=metadata, asset=asset) tx = tx.sign([alicepriv]) tx_id = tx.to_dict()['id'] # write to backlog b.write_transaction(tx) # wait 2 sec print("========userA merge multi-asset========") print("========wait for block and vote...========") sleep(5) # get tx by id tx = b.get_transaction(tx_id) print("merge txid:" + tx.to_dict()['id']) utxo = b.get_outputs_filtered_not_include_freeze(alicepub, include_spent) for u in utxo: # print(u) u.pop('details') print('userA unspent asset:') print(json.dumps(utxo, indent=4))
def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB last_voted = Bigchain().get_last_voted_block() self.consensus = config_utils.load_consensus_plugin() # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = last_voted['id'] self.counters = Counter() self.validity = {} self.invalid_dummy_tx = create_invalid_tx()
def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.validity = {} self.invalid_dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)])
def delete_databases(dbnames=[]): b = Bigchain() for dbname in dbnames: logger.info('Dropping database: {}'.format(dbname)) try: r.db_drop(dbname).run(b.conn) except r.ReqlOpFailedError as e: logger.info(e.message)
def initial(): """Return old transactions from the backlog.""" bigchain = Bigchain() return bigchain.connection.run( r.table('backlog').between( [bigchain.me, r.minval], [bigchain.me, r.maxval], index='assignee__transaction_timestamp').order_by( index=r.asc('assignee__transaction_timestamp')))
def __init__(self, timeout=5, backlog_reassign_delay=None): """Initialize StaleTransaction monitor Args: timeout: how often to check for stale tx (in sec) backlog_reassign_delay: How stale a transaction should be before reassignment (in sec). If supplied, overrides the Bigchain default value. """ self.bigchain = Bigchain(backlog_reassign_delay=backlog_reassign_delay) self.timeout = timeout
def create_app(debug=False): """Return an instance of the Flask application. Args: debug (bool): a flag to activate the debug mode for the app (default: False). """ app = Flask(__name__) app.debug = debug app.config['bigchain'] = Bigchain() app.register_blueprint(views.basic_views, url_prefix='/api/v1') return app
def get_connector_accounts(db='interledger'): b = Bigchain() connector_accounts = [] accounts_db = retrieve_accounts(b, db) for name, accounts in groupby(sorted(accounts_db, key=lambda d: d['name']), key=lambda d: d['name']): accounts = list(accounts) if len(accounts) == 2: connector_accounts.append(tuple(accounts)) return connector_accounts
def initial(): """Return old transactions from the backlog.""" b = Bigchain() rs = r.table('backlog')\ .between([b.me, r.minval], [b.me, r.maxval], index='assignee__transaction_timestamp')\ .order_by(index=r.asc('assignee__transaction_timestamp'))\ .run(b.conn) return rs
def bootstrap(self): """ Before starting handling the new blocks received by the changefeed we need to handle unvoted blocks added to the bigchain while the process was down We also need to set the previous_block_id. """ b = Bigchain() last_voted = b.get_last_voted_block() self.v_previous_block_id.value = last_voted['id'].encode()