def test_full_pipeline(b, user_pk): import random from bigchaindb.backend import query from bigchaindb.models import Transaction outpipe = Pipe() # write two blocks txs = [] for i in range(100): tx = Transaction.create([b.me], [([user_pk], 1)], {'msg': random.random()}) tx = tx.sign([b.me_private]) txs.append(tx) valid_block = b.create_block(txs) b.write_block(valid_block) txs = [] for i in range(100): tx = Transaction.create([b.me], [([user_pk], 1)], {'msg': random.random()}) tx = tx.sign([b.me_private]) txs.append(tx) invalid_block = b.create_block(txs) b.write_block(invalid_block) pipeline = election.create_pipeline() pipeline.setup(indata=election.get_changefeed(), outdata=outpipe) pipeline.start() time.sleep(1) # vote one block valid, one invalid vote_valid = b.vote(valid_block.id, 'b' * 64, True) vote_invalid = b.vote(invalid_block.id, 'c' * 64, False) b.write_vote(vote_valid) b.write_vote(vote_invalid) outpipe.get() pipeline.terminate() # only transactions from the invalid block should be returned to # the backlog assert query.count_backlog(b.connection) == 100 # NOTE: I'm still, I'm still tx from the block. tx_from_block = set([tx.id for tx in invalid_block.transactions]) tx_from_backlog = set([ tx['id'] for tx in list(query.get_stale_transactions(b.connection, 0)) ]) assert tx_from_block == tx_from_backlog
def test_changefeed_delete(mock_cursor_next, mock_changefeed_data): from bigchaindb.backend import get_changefeed, connect from bigchaindb.backend.changefeed import ChangeFeed conn = connect() mock_cursor_next.side_effect = [mock.DEFAULT] + mock_changefeed_data outpipe = Pipe() changefeed = get_changefeed(conn, 'backlog', ChangeFeed.DELETE) changefeed.outqueue = outpipe changefeed.run_forever() assert outpipe.get()['msg'] == 'seems like we have a delete here' assert outpipe.qsize() == 0
def test_changefeed_prefeed(mock_cursor_next, mock_changefeed_data): from bigchaindb.backend import get_changefeed, connect from bigchaindb.backend.changefeed import ChangeFeed conn = connect() mock_cursor_next.side_effect = [mock.DEFAULT] + mock_changefeed_data outpipe = Pipe() changefeed = get_changefeed(conn, 'backlog', ChangeFeed.INSERT, prefeed=[1, 2, 3]) changefeed.outqueue = outpipe changefeed.run_forever() assert outpipe.qsize() == 4
def test_changefeed_insert(mock_cursor_next, mock_changefeed_data): from bigchaindb.backend import get_changefeed, connect from bigchaindb.backend.changefeed import ChangeFeed # setup connection and mocks conn = connect() # mock the `next` method of the cursor to return the mocked data mock_cursor_next.side_effect = [mock.DEFAULT] + mock_changefeed_data outpipe = Pipe() changefeed = get_changefeed(conn, 'backlog', ChangeFeed.INSERT) changefeed.outqueue = outpipe changefeed.run_forever() assert outpipe.get()['msg'] == 'seems like we have an insert here' assert outpipe.qsize() == 0
def create_pipeline(): testPipline = TestPipline() pipeline = Pipeline([ Pipe(maxsize=5000), Node(testPipline.filter_tx, fraction_of_cores=5), Node(testPipline.create), ]) return pipeline
def test_changefeed_multiple_operations(mock_cursor_next, mock_cursor_find_one, mock_changefeed_data): from bigchaindb.backend import get_changefeed, connect from bigchaindb.backend.changefeed import ChangeFeed conn = connect() mock_cursor_next.side_effect = [mock.DEFAULT] + mock_changefeed_data mock_cursor_find_one.return_value = mock_changefeed_data[2]['o'] outpipe = Pipe() changefeed = get_changefeed(conn, 'backlog', ChangeFeed.INSERT | ChangeFeed.UPDATE) changefeed.outqueue = outpipe changefeed.run_forever() assert outpipe.get()['msg'] == 'seems like we have an insert here' assert outpipe.get()['msg'] == 'seems like we have an update here' assert outpipe.qsize() == 0
def test_changefeed_update(mock_cursor_next, mock_cursor_find_one, mock_changefeed_data): from bigchaindb.backend import get_changefeed, connect from bigchaindb.backend.changefeed import ChangeFeed conn = connect() mock_cursor_next.side_effect = [mock.DEFAULT] + mock_changefeed_data mock_cursor_find_one.return_value = mock_changefeed_data[2]['o'] outpipe = Pipe() changefeed = get_changefeed(conn, 'backlog', ChangeFeed.UPDATE) changefeed.outqueue = outpipe changefeed.run_forever() assert outpipe.get()['msg'] == 'seems like we have an update here' assert outpipe.qsize() == 0 assert mock_cursor_find_one.called_once_with( {'_id': mock_changefeed_data[2]['o']}, {'_id': False})
def test_full_pipeline(monkeypatch, user_pk): from bigchaindb.backend import query from bigchaindb.models import Transaction CONFIG = { 'keyring': ['aaa', 'bbb'], 'backlog_reassign_delay': 0.01 } config_utils.update_config(CONFIG) b = Bigchain() original_txs = {} original_txc = [] monkeypatch.setattr('time.time', lambda: 1) for i in range(100): tx = Transaction.create([b.me], [([user_pk], 1)], metadata={'msg': random.random()}) tx = tx.sign([b.me_private]) original_txc.append(tx.to_dict()) b.write_transaction(tx) original_txs = list(query.get_stale_transactions(b.connection, 0)) original_txs = {tx['id']: tx for tx in original_txs} assert len(original_txs) == 100 monkeypatch.undo() inpipe = Pipe() # Each time the StaleTransactionMonitor pipeline runs, it reassigns # all eligible transactions. Passing this inpipe prevents that from # taking place more than once. inpipe.put(()) outpipe = Pipe() pipeline = stale.create_pipeline(backlog_reassign_delay=1, timeout=1) pipeline.setup(indata=inpipe, outdata=outpipe) pipeline.start() # to terminate for _ in range(100): outpipe.get() pipeline.terminate() assert len(list(query.get_stale_transactions(b.connection, 0))) == 100 reassigned_txs = list(query.get_stale_transactions(b.connection, 0)) # check that every assignment timestamp has increased, and every tx has a new assignee for reassigned_tx in reassigned_txs: assert reassigned_tx['assignment_timestamp'] > original_txs[reassigned_tx['id']]['assignment_timestamp'] assert reassigned_tx['assignee'] != original_txs[reassigned_tx['id']]['assignee']
def indata_pipeline_outdata(): from multipipes import Pipeline, Pipe, Node indata = Pipe() outdata = Pipe() def divide(a, b): return a / b def inc(n): return n + 1 p = Pipeline([ Node(divide), Node(inc, fraction_of_cores=1), ]) p.setup(indata=indata, outdata=outdata) return (indata, p, outdata)
def test_full_pipeline(b, user_pk): from bigchaindb.models import Block, Transaction from bigchaindb.pipelines.block import create_pipeline outpipe = Pipe() pipeline = create_pipeline() pipeline.setup(outdata=outpipe) inpipe = pipeline.items[0] # include myself here, so that some tx are actually assigned to me b.nodes_except_me = [b.me, 'aaa', 'bbb', 'ccc'] number_assigned_to_others = 0 for i in range(100): tx = Transaction.create([b.me], [([user_pk], 1)], metadata={'msg': random.random()}) tx = tx.sign([b.me_private]) tx = tx.to_dict() # simulate write_transaction tx['assignee'] = random.choice(b.nodes_except_me) if tx['assignee'] != b.me: number_assigned_to_others += 1 tx['assignment_timestamp'] = time.time() inpipe.put(tx) assert inpipe.qsize() == 100 pipeline.start() time.sleep(2) pipeline.terminate() block_doc = outpipe.get() chained_block = b.get_block(block_doc.id) chained_block = Block.from_dict(chained_block) block_len = len(block_doc.transactions) assert chained_block == block_doc assert number_assigned_to_others == 100 - block_len
def create_pipeline(): """Create and return the pipeline of operations to be distributed on different processes.""" block_pipeline = BlockPipeline() pipeline = Pipeline([ Pipe(maxsize=1000), Node(block_pipeline.filter_tx), Node(block_pipeline.validate_tx, fraction_of_cores=1), Node(block_pipeline.create, timeout=1), Node(block_pipeline.write), Node(block_pipeline.delete_tx), ]) return pipeline
def test_valid_block_voting_with_create_transaction(b, genesis_block, monkeypatch): from bigchaindb.backend import query from bigchaindb.common import crypto, utils from bigchaindb.models import Transaction from bigchaindb.pipelines import vote # create a `CREATE` transaction test_user_priv, test_user_pub = crypto.generate_key_pair() tx = Transaction.create([b.me], [([test_user_pub], 1)]) tx = tx.sign([b.me_private]) monkeypatch.setattr('time.time', lambda: 1111111111) block = b.create_block([tx]) block_dict = decouple_assets(b, block) inpipe = Pipe() outpipe = Pipe() vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) inpipe.put(block_dict) vote_pipeline.start() vote_out = outpipe.get() vote_pipeline.terminate() vote_rs = query.get_votes_by_block_id_and_voter(b.connection, block.id, b.me) vote_doc = vote_rs.next() assert vote_out['vote'] == vote_doc['vote'] assert vote_doc['vote'] == { 'voting_for_block': block.id, 'previous_block': genesis_block.id, 'is_block_valid': True, 'invalid_reason': None, 'timestamp': '1111111111' } serialized_vote = utils.serialize(vote_doc['vote']).encode() assert vote_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote, vote_doc['signature']) is True
def test_invalid_content_in_tx_in_block_voting(monkeypatch, b, user_pk, genesis_block): from bigchaindb.backend import query from bigchaindb.common import crypto, utils from bigchaindb.models import Transaction from bigchaindb.pipelines import vote inpipe = Pipe() outpipe = Pipe() monkeypatch.setattr('time.time', lambda: 1111111111) vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) # NOTE: `tx` is invalid, because its content is not corresponding to its id tx = Transaction.create([b.me], [([b.me], 1)]) tx = tx.sign([b.me_private]) block = b.create_block([tx]).to_dict() block['block']['transactions'][0]['id'] = 'an invalid tx id' inpipe.put(block) vote_pipeline.start() vote_out = outpipe.get() vote_pipeline.terminate() vote_rs = query.get_votes_by_block_id_and_voter(b.connection, block['id'], b.me) vote_doc = vote_rs.next() assert vote_out['vote'] == vote_doc['vote'] assert vote_doc['vote'] == { 'voting_for_block': block['id'], 'previous_block': genesis_block.id, 'is_block_valid': False, 'invalid_reason': None, 'timestamp': '1111111111' } serialized_vote = utils.serialize(vote_doc['vote']).encode() assert vote_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote, vote_doc['signature']) is True
def test_valid_block_voting_multiprocessing(b, genesis_block, monkeypatch): from bigchaindb.backend import query from bigchaindb.common import crypto, utils from bigchaindb.pipelines import vote inpipe = Pipe() outpipe = Pipe() monkeypatch.setattr('time.time', lambda: 1111111111) vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) block = dummy_block(b) block_dict = decouple_assets(b, block) inpipe.put(block_dict) vote_pipeline.start() vote_out = outpipe.get() vote_pipeline.terminate() vote_rs = query.get_votes_by_block_id_and_voter(b.connection, block.id, b.me) vote_doc = vote_rs.next() assert vote_out['vote'] == vote_doc['vote'] assert vote_doc['vote'] == { 'voting_for_block': block.id, 'previous_block': genesis_block.id, 'is_block_valid': True, 'invalid_reason': None, 'timestamp': '1111111111' } serialized_vote = utils.serialize(vote_doc['vote']).encode() assert vote_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote, vote_doc['signature']) is True
def test_invalid_block_voting(monkeypatch, b, user_pk, genesis_block): from bigchaindb.backend import query from bigchaindb.common import crypto, utils from bigchaindb.pipelines import vote inpipe = Pipe() outpipe = Pipe() monkeypatch.setattr('time.time', lambda: 1111111111) vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) block = dummy_block(b).to_dict() block['block']['id'] = 'this-is-not-a-valid-hash' inpipe.put(block) vote_pipeline.start() vote_out = outpipe.get() vote_pipeline.terminate() vote_rs = query.get_votes_by_block_id_and_voter(b.connection, block['id'], b.me) vote_doc = vote_rs.next() assert vote_out['vote'] == vote_doc['vote'] assert vote_doc['vote'] == { 'voting_for_block': block['id'], 'previous_block': genesis_block.id, 'is_block_valid': False, 'invalid_reason': None, 'timestamp': '1111111111' } serialized_vote = utils.serialize(vote_doc['vote']).encode() assert vote_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote, vote_doc['signature']) is True
def test_valid_block_voting_with_transfer_transactions(monkeypatch, b, genesis_block): from bigchaindb.backend import query from bigchaindb.common import crypto, utils from bigchaindb.models import Transaction from bigchaindb.pipelines import vote # create a `CREATE` transaction test_user_priv, test_user_pub = crypto.generate_key_pair() tx = Transaction.create([b.me], [([test_user_pub], 1)]) tx = tx.sign([b.me_private]) monkeypatch.setattr('time.time', lambda: 1000000000) block = b.create_block([tx]) b.write_block(block) # create a `TRANSFER` transaction test_user2_priv, test_user2_pub = crypto.generate_key_pair() tx2 = Transaction.transfer(tx.to_inputs(), [([test_user2_pub], 1)], asset_id=tx.id) tx2 = tx2.sign([test_user_priv]) monkeypatch.setattr('time.time', lambda: 2000000000) block2 = b.create_block([tx2]) b.write_block(block2) inpipe = Pipe() outpipe = Pipe() vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) vote_pipeline.start() inpipe.put(block.to_dict()) time.sleep(1) inpipe.put(block2.to_dict()) vote_out = outpipe.get() vote2_out = outpipe.get() vote_pipeline.terminate() vote_rs = query.get_votes_by_block_id_and_voter(b.connection, block.id, b.me) vote_doc = vote_rs.next() assert vote_out['vote'] == vote_doc['vote'] assert vote_doc['vote'] == { 'voting_for_block': block.id, 'previous_block': genesis_block.id, 'is_block_valid': True, 'invalid_reason': None, 'timestamp': '2000000000' } serialized_vote = utils.serialize(vote_doc['vote']).encode() assert vote_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote, vote_doc['signature']) is True vote2_rs = query.get_votes_by_block_id_and_voter(b.connection, block2.id, b.me) vote2_doc = vote2_rs.next() assert vote2_out['vote'] == vote2_doc['vote'] assert vote2_doc['vote'] == { 'voting_for_block': block2.id, 'previous_block': block.id, 'is_block_valid': True, 'invalid_reason': None, 'timestamp': '2000000000' } serialized_vote2 = utils.serialize(vote2_doc['vote']).encode() assert vote2_doc['node_pubkey'] == b.me assert crypto.PublicKey(b.me).verify(serialized_vote2, vote2_doc['signature']) is True
def test_voter_checks_for_previous_vote(monkeypatch, b): from bigchaindb.backend import query from bigchaindb.pipelines import vote inpipe = Pipe() outpipe = Pipe() monkeypatch.setattr('time.time', lambda: 1000000000) monkeypatch.setattr('time.time', lambda: 1000000020) block_1 = dummy_block(b) inpipe.put(block_1.to_dict()) assert len(list(query.get_votes_by_block_id(b.connection, block_1.id))) == 0 vote_pipeline = vote.create_pipeline() vote_pipeline.setup(indata=inpipe, outdata=outpipe) vote_pipeline.start() # wait for the result outpipe.get() # queue block for voting AGAIN monkeypatch.setattr('time.time', lambda: 1000000030) inpipe.put(block_1.to_dict()) # queue another block monkeypatch.setattr('time.time', lambda: 1000000040) block_2 = dummy_block(b) inpipe.put(block_2.to_dict()) # wait for the result of the new block outpipe.get() vote_pipeline.terminate() assert len(list(query.get_votes_by_block_id(b.connection, block_1.id))) == 1 assert len(list(query.get_votes_by_block_id(b.connection, block_2.id))) == 1