def __init__(self, blocksize, orientation): Shape.__init__(self, blocksize) self.add(Block(blocksize, '1', (blocksize * 7, 0))) self.add(Block(blocksize, '1', (blocksize * 8, 0))) self.add(Block(blocksize, '1', (blocksize * 7, blocksize))) self.add(Block(blocksize, '1', (blocksize * 8, blocksize)))
def receive(obj): d = rlp.decode(obj) # Is transaction if len(d) == 8: tx = Transaction(obj) if mainblk.get_balance(tx.sender) < tx.value + tx.fee: return if mainblk.get_nonce(tx.sender) != tx.nonce: return txpool[bin_sha256(blk)] = blk broadcast(blk) # Is message elif len(d) == 2: if d[0] == 'getobj': try: return db.Get(d[1][0]) except: try: return mainblk.state.db.get(d[1][0]) except: return None elif d[0] == 'getbalance': try: return mainblk.state.get_balance(d[1][0]) except: return None elif d[0] == 'getcontractroot': try: return mainblk.state.get_contract(d[1][0]).root except: return None elif d[0] == 'getcontractsize': try: return mainblk.state.get_contract(d[1][0]).get_size() except: return None elif d[0] == 'getcontractstate': try: return mainblk.state.get_contract(d[1][0]).get(d[1][1]) except: return None # Is block elif len(d) == 3: blk = Block(obj) p = block.prevhash try: parent = Block(db.Get(p)) except: return uncles = block.uncles for s in uncles: try: sib = db.Get(s) except: return processblock.eval(parent, blk.transactions, blk.timestamp, blk.coinbase) if parent.state.root != blk.state.root: return if parent.difficulty != blk.difficulty: return if parent.number != blk.number: return db.Put(blk.hash(), blk.serialize())
def test(): g0 = Block(0) g1 = Block(1) g2 = Block(2) a = Block(1, g1) b = Block(1, g1) c = Block(1, b) d = Block(1, c) e = Block(1, c) f = Block(1, e) weighted_blocks = {g0: 1, b: 1, g2: 1, f: 1, d: 100} starting_blocks = {0: g0, 1: g1, 2: g2} blocks = [g0, g1, g2, a, b, c, d, e, f] result = sharded_fork_choice(starting_blocks, blocks, weighted_blocks) print("0", result[0]) print("1", result[1]) print("2", result[2]) print("height f", f.height) return result
def __init__(self, blocksize, orientation): Shape.__init__(self, blocksize) self.axis = Block(blocksize, '6', (blocksize * 7, blocksize)) self.add(Block(blocksize, '6', (blocksize * 6, blocksize))) self.add(self.axis) self.add(Block(blocksize, '6', (blocksize * 8, blocksize))) self.add(Block(blocksize, '6', (blocksize * 7, blocksize * 2))) while self.orientation != orientation: self._do_rotate(self)
def __init__(self, blocksize, orientation): Shape.__init__(self, blocksize) #Shape starts horizontal self.axis = Block(blocksize, '0', (blocksize * 7, blocksize)) self.add(Block(blocksize, '0', (blocksize * 6, blocksize))) self.add(self.axis) self.add(Block(blocksize, '0', (blocksize * 8, blocksize))) self.add(Block(blocksize, '0', (blocksize * 9, blocksize))) #If desired orientation is vertical, rotate if orientation % 2: self._do_rotate(self)
def __init__(self, blocksize, orientation): Shape.__init__(self, blocksize) self.orientation = 3 #Starts off in an 'L' orientation self.axis = Block(blocksize, '4', (blocksize * 7, blocksize)) self.add(Block(blocksize, '4', (blocksize * 7, 0))) self.add(self.axis) self.add(Block(blocksize, '4', (blocksize * 7, blocksize * 2))) self.add(Block(blocksize, '4', (blocksize * 8, blocksize * 2))) while self.orientation != orientation: self._do_rotate(self)
def create_block(self): yield env.timeout(1) print("starting block formation") if len(self.txpool) != 0: for each_tx in self.txpool: self.current_gas += each_tx.gas self.current_size+= each_tx.size if self.current_gas<self.block_gas_limit: self.pendingpool.append(self.txpool.pop(0)) else: break global BLOCKID BLOCKID+=1 self.prev_block +=1 block = Block(self.current_size,self.prev_block,self.pendingpool,self.nodeID,self.prev_hash) self.prev_hash = block.hash print('%d, %d, Created, block, %d,%d'%(env.now,self.nodeID,block.id,block.size)) logger.debug('%d, %d, Created, block,%d,%d'%(env.now,self.nodeID,block.id,block.size)) print("hash of block is %s"%block.hash) self.block_list.insert(0,block) block_stability_logger.info("%s,%d,%d,created,%d"%(env.now,self.nodeID,block.id,block.size)) #print("No of blocks in node %d is %d"%(self.nodeID,len(self.block_list))) logger.info("No of blocks in node %d is %d"%(self.nodeID,len(self.block_list))) self.known_blocks.append(block.id) import ipdb; ipdb.set_trace() self.broadcaster(block,self.nodeID,1,0) self.current_gas=0 self.current_size=0 self.pendingpool=[]
def parse(s, parent_block): config = copy.copy(s) pos, brackets_level, param_start = 0, 0, 0 while pos < len(config): if config[pos] == '#' and brackets_level == 0: re_sharp_comment = re.search( '(?P<offset>[\s\n]*)#(?P<comment>.*)$', config, re.M) sharp_comment = re_sharp_comment.groupdict() parent_block.add_comment( Comment(sharp_comment['offset'], sharp_comment['comment'])) config = config[re_sharp_comment.end():] pos, param_start = 0, 0 continue if config[pos] == ';' and brackets_level == 0: re_option = re.search( '\s*(?P<param_name>\w+)\s*(?P<param_options>.*?);', config[param_start:], re.S) if not re_option: raise Exception('Wrong option') option = re_option.groupdict() parent_block[option['param_name']] = KeyValueOption( re.sub('[ \n]+', ' ', option['param_options'])) config = config[re_option.end():] pos, param_start = 0, 0 continue if config[pos] == '{': brackets_level += 1 elif config[pos] == '}': brackets_level -= 1 if brackets_level == 0 and param_start is not None: re_block = re.search( '(?P<param_name>\w+)\s*(?P<param_options>.*)\s*{(\n){0,1}(?P<block>(.|\n)*)}', config[param_start:pos + 1], ) block = re_block.groupdict() if block['param_name'].lower() == 'location': new_block = Location(block['param_options']) parent_block.add_location(new_block) else: new_block = Block() parent_block[block['param_name']] = new_block if block['block']: parse(block['block'], new_block) config = config[re_block.end():] pos, param_start = 0, 0 continue pos += 1 if brackets_level != 0: raise Exception('Not closed bracket')
def build(self): layout = load_image('assets/level1layout.png') level = Background((0, 0), 'assets/level1.png', 1, 1) background = Background((0, 0), 'assets/background1.png', 15, 15) #layout = load_image('henesyslayout.png') #level = Background((0,0), 'henesys.png', 1,1) #layout = load_image('citylevellayout.png') #level = Background((0,0), 'citylevel.png',1, 1) #layout = load_image('level3layout.png') #level = Background((0, 0), 'level3.png', 1, 1) #background = Background((0, 0), 'background3.png', 20, 20) self.backgrounds = [background, level] width, height = layout.get_size() self.blocks = [] from blocks import Block, Platform, Step for y in xrange(height): for x in xrange(width): pos = x * 8, y * 8 if layout.get_at((x, y)) == (0, 0, 0, 255): self.blocks.append(Block(pos)) elif layout.get_at((x, y)) == (0, 0, 255, 255): self.blocks.append(Platform(pos)) elif layout.get_at((x, y)) == (255, 0, 0, 255): self.blocks.append(Step(pos)) event = LevelBuildEvent(layout, self.backgrounds) self.evManager.Post(event)
def create_blocks(sets, screen, blocks, bird): """创建柱子群""" block = Block(sets, screen) block_width = block.rect.width for block_number in range(15): block_1 = Block(sets, screen) block_2 = Block(sets, screen) block_1.x = 4 * bird.rect.x + 2.5 * block_width * block_number block_2.x = 4 * bird.rect.x + 2.5 * block_width * block_number block_1.y = 200 #random.randint(150, 350) block_1.rect.x = block_1.x block_2.rect.x = block_2.x #block_1.rect.left block_1.rect.bottom = block_1.y block_2.rect.top = block_1.rect.bottom + 150 blocks.add(block_1) blocks.add(block_2)
def test_kv_block(self): """ Tests key-value option in block """ kvb = Block() kvb.kv = KeyValueOption('value') self.assertEqual(kvb.render('kbv_name'), '\nkbv_name {\n kv value;\n}')
def create_blocks(): global blocks_group colors = rand.get_random_color(5) values = rand.get_random_block_numbers(time) x = 0 y = -80 for i in range(0, 5): block = Block(colors[i], block_width, block_height, x, y, values[i]) x += block_x_increment y += block_y_increment blocks_group.add(block)
def __init__(self): # Call the parent's constructor pygame.sprite.Sprite.__init__(self) # get image self.image = pygame.image.load('pics/bro_jump3.gif').convert() self.rect = self.image.get_rect() self.image = pygame.transform.scale( self.image, (int(self.rect.width * constants.MARIO_MULTIPLIER), int(self.rect.height * constants.MARIO_MULTIPLIER))) self.rect = self.image.get_rect() self.rect.x = constants.PLAYER_START_POS self.rect.y = constants.SCREEN_HEIGHT # speed vector self.change_x = 0 self.change_y = 0 # state of player self.state = 's' # stop - right - left - jump self.pos_state = 's' # steady - jump - fall # prev coordinates self.prev_rect_x = 0 # viewport self.end_of_viewport = 1 # create blocks , remove after testing self.block_sprite_list = pygame.sprite.Group() self.block1 = Block(250, 490) self.block_sprite_list.add(self.block1) self.block2 = Block(800, 500) self.block_sprite_list.add(self.block2) self.block3 = Block(1300, constants.SCREEN_HEIGHT - 90) self.block_sprite_list.add(self.block3) # for screen stopping self.sstop = 0
def new_block(ledgerfilename): # ledger class instance for file name reference ledger = writer.Ledger(ledgerfilename) # parse the selected ledger writer.ledger_parse(ledger.filename) # data gets stored in lists in module chain.py l = bc_l() # blockchain length calc l -= 1 # generate the next block in the chain newblock = Block(chain.c_hash[l], chain.n_hash[l]) chain.blockchain.append(newblock) nblocklist = [ newblock.previous_hash, newblock.current_hash, newblock.next_hash ] #TODO: concensus check to eliminate duplicate blocks # add new block to the ledger writer.ledger_constructor(ledger.filename, nblocklist)
def get_tile_list(self, layer): '''given a map & specified layer, returns a Group of Block sprites on each tile in that layer ''' block_list = pygame.sprite.Group() #get block layer blocklayer = self.gamemap.get_layer_by_name(layer) #make a bunch of blocks, one for each collidable tile for x, y, gid in blocklayer: tile = self.gamemap.get_tile_image_by_gid(gid) if tile: blockx = x * self.gamemap.tilewidth blocky = y * self.gamemap.tileheight block = Block(tile, blockx, blocky) block_list.add(block) return block_list
def reset(self): self.surface.fill(backgroundcolor) self.point = 0 self.level = 1 self.count = 0 self.time = 500 self.board_show = { i: [SingleBlock(self.surface, BLACK, WHITE) for j in range(NROWS)] for i in range(NCOLS) } self.board = { i: [0 for j in range(NROWS + 1)] for i in range(-1, NCOLS + 1) } self.board[-1] = [1 for j in range(NROWS + 1)] self.board[NCOLS] = [1 for j in range(NROWS + 1)] for i in range(-1, NCOLS + 1): self.board[i][NROWS] = 1 self.block = Block(self.board, self.board_show, random.choice(Blocks)) self.block_next = random.choice(Blocks) self.draw()
def make_obstacles(self): """Adds some arbitrarily placed obstacles to a sprite.Group.""" walls = [Block(pg.Color("chocolate"), (0, 980, 1000, 20)), Block(pg.Color("chocolate"), (0, 0, 20, 1000)), Block(pg.Color("chocolate"), (980, 0, 20, 1000))] static = [Block(pg.Color("darkgreen"), (250, 780, 200, 100)), Block(pg.Color("darkgreen"), (600, 880, 200, 100)), Block(pg.Color("darkgreen"), (20, 360, 880, 40)), Block(pg.Color("darkgreen"), (950, 400, 30, 20)), Block(pg.Color("darkgreen"), (20, 630, 50, 20)), Block(pg.Color("darkgreen"), (80, 530, 50, 20)), Block(pg.Color("darkgreen"), (130, 470, 200, 215)), Block(pg.Color("darkgreen"), (20, 760, 30, 20)), Block(pg.Color("darkgreen"), (400, 740, 30, 40))] moving = [MovingBlock(pg.Color("olivedrab"), (20, 740, 75, 20), 325, 0), MovingBlock(pg.Color("olivedrab"), (600, 500, 100, 20), 880, 0), MovingBlock(pg.Color("olivedrab"), (420, 430, 100, 20), 550, 1, speed=3, delay=200), MovingBlock(pg.Color("olivedrab"), (450, 700, 50, 20), 930, 1, start=930), MovingBlock(pg.Color("olivedrab"), (500, 700, 50, 20), 730, 0, start=730), MovingBlock(pg.Color("olivedrab"), (780, 700, 50, 20), 895, 0, speed=-1)] return pg.sprite.Group(walls, static, moving)
insn_len = 4 reg_size = 4 r1 = Varnode('register', 0, reg_size) r2 = Varnode('register', reg_size, reg_size) r3 = Varnode('register', reg_size * 2, reg_size) u0 = Varnode('unique', 0, reg_size) insn1 = Instruction(insn_addr, insn_len, [PcodeOp(insn_addr, 'INT_ADD', [r1, r2], u0)]) insn2 = Instruction(insn_addr, insn_len, [PcodeOp(insn_addr, 'COPY', [u0], r3)]) insn3 = Instruction(insn_addr, insn_len, [PcodeOp(insn_addr, 'COPY', [u0], r1)]) # Simple test CFG s_block1 = Block([insn1.copy()], name='s_block_1') s_block2 = Block([insn1.copy()], predecessor=s_block1, name='s_block_2') s_block3 = Block([insn1.copy()], predecessor=s_block1, name='s_block_3') s_block4 = Block([insn1.copy()], predecessor=s_block3, name='s_block_4') simple_cfg_blocks = [s_block2, s_block3, s_block4, s_block1] # Complex test CFG 1 c1_block1 = Block([insn1.copy(), insn2], name='c1_block_1') c1_block2 = Block([insn1.copy()], predecessor=c1_block1, name='c1_block_2') c1_block3 = Block([insn1.copy()], predecessor=c1_block1, name='c1_block_3') c1_block4 = Block([insn1.copy()], predecessor=c1_block3, name='c1_block_4') c1_block4.add_predecessor(c1_block2) complex1_cfg_blocks = [c1_block2, c1_block3, c1_block4, c1_block1] # Complex test CFG 2 c2_block1 = Block([insn1.copy(), insn2], name='c2_block_1')
def miner(self): ''' Block creation method: 1. For each transaction, add the gas of the transaction to the current gas. 2. If the current gas is less than block_gas_limit, add more transaction 3. Else, hold that transaction and create a new block 4. For new block, store its hash as previous hash, add that block to know list and broadcast it to the other nodes. Interrupt after receiving block from other nodes: If a new block is received, the mining process will be interrupted. After interrupt, check if the previous block hash of the node matches the previous hash of the block. TODO: What to do if prev hash and block id do not match ''' while True: try: yield env.timeout(0) if self.miner_flag == 1: yield env.timeout(config["POA"]["mining_time"]) if len(self.txpool) != 0: for each_tx in self.txpool: self.current_gas += each_tx.gas self.current_size += each_tx.size # Checked: done if self.current_gas < self.block_gas_limit: self.pendingpool.append(self.txpool.pop(0)) else: break else: pass # could this pass for pending pool be pass by reference ? global BLOCKID BLOCKID += 1 self.prev_block += 1 block = Block(self.current_size, self.prev_block, self.pendingpool, self.nodeID, self.prev_hash) self.prev_hash = block.hash print('%d, %d, Created, block, %d,%d' % (env.now, self.nodeID, block.id, block.size)) logger.debug('%d, %d, Created, block,%d,%d' % (env.now, self.nodeID, block.id, block.size)) print("hash of block is %s" % block.hash) self.block_list.insert(0, block) block_stability_logger.info( "%s,%d,%d,created,%d" % (env.now, self.nodeID, block.id, block.size)) network_stability_calc(env, 'c') #print("No of blocks in node %d is %d"%(self.nodeID,len(self.block_list))) logger.info("No of blocks in node %d is %d" % (self.nodeID, len(self.block_list))) self.known_blocks.append(block.id) self.broadcaster(block, self.nodeID, 1, 0) self.current_gas = 0 self.current_size = 0 self.pendingpool = [] else: yield env.timeout(0.1) except simpy.Interrupt: #print("%d,%d, interrupted, block, %d " %(env.now,self.nodeID,self.intr_data.id)) logger.debug("%d,%d, interrupted, block, %d " % (env.now, self.nodeID, self.intr_data.id)) # logger.info("testing ...No of blocks in node %d is %d"%(self.nodeID,len(self.block_list))) # Verify the block: #import ipdb; ipdb.set_trace() # check block number if self.prev_hash == self.intr_data.prev_hash: #print("Previous hash match") # check the list of transactions block_set = set(self.intr_data.transactions) node_set = set(self.pendingpool) yield env.timeout(config['block_verify_time']) if block_set != node_set: block_extra = block_set - node_set node_extra = node_set - block_set # add item to known tx and transaction pool # Todo : tx id could be repeated in the known tx. Use set for known_tx # print("know tx before extend----> ",self.known_tx) self.known_tx.extend(list(block_extra)) # print("know tx after extend----> ",self.known_tx) # move mismatched tx from pendingpool to the txpool self.temp_trans = [ each for each in self.pendingpool if each.id in node_extra ] self.txpool.extend(self.temp_trans) self.block_list.insert(0, self.intr_data) self.prev_hash = self.intr_data.hash wait = random.randint(0, 45) yield self.env.timeout(wait) block_stability_logger.info( "%s,%d,%d,received" % (env.now, self.nodeID, self.intr_data.id)) network_stability_calc(env, 'r') #print("No of blocks in node %d is %d"%(self.nodeID,len(self.block_list))) logger.info("No of blocks in node %d is %d" % (self.nodeID, len(self.block_list))) self.pendingpool = [] self.intr_data = None self.current_gas = 0 else: # print("%s,%d,%d,outofsync"%(env.now,self.nodeID,self.intr_data.id)) # print(self.prev_hash) # print(self.intr_data.prev_hash) self.prev_hash = self.intr_data.hash block_stability_logger.info( "%s,%d,%d,outofsync" % (env.now, self.nodeID, self.intr_data.id))
def make_block(self, shard_ID, mempools, drain_amount, genesis_blocks, TTL=TTL_CONSTANT): genesis_blocks = self.genesis_blocks() # RUN FORK CHOICE RULE ON SELF # will only have fork choices for parent and children my_fork_choice = self.make_fork_choice(shard_ID) # --------------------------------------------------------------------# # GET PREVBLOCK POINTER FROM FORK CHOICE prevblock = my_fork_choice # --------------------------------------------------------------------# # EXTEND THE TRANSACTION LOG FROM THE MEMPOOL prev_txn_log = prevblock.txn_log new_txn_log = copy.copy(prev_txn_log) data = [] num_prev_txs = len(prev_txn_log) neighbor_shard_IDs = [] if my_fork_choice.parent_ID is not None: neighbor_shard_IDs.append(my_fork_choice.parent_ID) for IDs in my_fork_choice.child_IDs: neighbor_shard_IDs.append(IDs) # BUILD SOURCES sources = {ID: genesis_blocks[ID] for ID in SHARD_IDS} for ID in neighbor_shard_IDs: if ID == shard_ID: continue #if len(prevblock.received_log.log[ID]): # assert prevblock.received_log.log[ID][-1].base.shard_ID == ID # sources[ID] = prevblock.received_log.log[ID][-1].base neighbor_fork_choice = self.make_fork_choice(ID) # SOURCES = FORK CHOICE (except for self) sources[ID] = neighbor_fork_choice assert sources[ID].is_in_chain( prevblock.sources[ID] ), "Sources inconsistent, new block shard id: %s, source from shard: %s, heights: %s over %s, new_source_parent_ID: %s, old_source_parent_ID: %s, first_children: %s, three_parent: %s" % ( shard_ID, ID, sources[ID].height, prevblock.sources[ID].height, sources[ID].parent_ID, prevblock.sources[ID].parent_ID, self.make_fork_choice(1).child_IDs, self.make_fork_choice(3).parent_ID) #for ID in SHARD_IDS: # if ID not in neighbor_shard_IDs and ID != shard_ID: # sources[ID] = prevblock.sources[ID] # --------------------------------------------------------------------# if num_prev_txs < len(mempools[shard_ID]) and 'opcode' in mempools[ shard_ID][num_prev_txs]: child_to_become_parent = mempools[shard_ID][num_prev_txs][ 'child_to_become_parent'] child_to_move_down = mempools[shard_ID][num_prev_txs][ 'child_to_move_down'] # TODO: for swapping with root only one message will be needed sources = copy.copy(prevblock.sources) msg1 = SwitchMessage_BecomeAParent(sources[child_to_become_parent], 1, child_to_become_parent, child_to_move_down, sources[child_to_move_down]) msg2 = SwitchMessage_ChangeParent(sources[child_to_move_down], 1, child_to_move_down, child_to_become_parent, sources[child_to_become_parent]) mempools[shard_ID] = mempools[shard_ID][:num_prev_txs] + mempools[ shard_ID][num_prev_txs + 1:] sent_log = copy.copy(prevblock.sent_log) received_log = copy.copy(prevblock.received_log) sent_log.add_message(msg1.target_shard_ID, msg1) sent_log.add_message(msg2.target_shard_ID, msg2) ret = Block(shard_ID, prevblock, new_txn_log, sent_log, received_log, sources, prevblock.vm_state) ret.child_IDs.remove(child_to_move_down) ret.routing_table[child_to_move_down] = child_to_become_parent print("IMPORTANT: %s" % ret.routing_table) return ret for i in range(drain_amount): if num_prev_txs + i < len(mempools[shard_ID]): new_tx = mempools[shard_ID][num_prev_txs + i] if 'opcode' in new_tx: # this is a switch message, stop processing messages break new_txn_log.append(new_tx) data.append(new_tx) # --------------------------------------------------------------------# # BUILD RECEIVED LOG WITH: received_log = MessagesLog() for ID in SHARD_IDS: if ID == shard_ID: continue if ID in neighbor_shard_IDs: neighbor_fork_choice = self.make_fork_choice(ID) # RECEIVED = SENT MESSAGES FROM FORK CHOICE received_log.log[ID] = copy.copy( neighbor_fork_choice.sent_log.log[shard_ID]) else: received_log.log[ID] = copy.copy( prevblock.received_log.log[ID]) # --------------------------------------------------------------------# # PREP NEWLY RECEIVED PMESSAGES IN A RECEIVEDLOG FOR EVM: newly_received_messages = {} new_sent_messages = MessagesLog() for ID in SHARD_IDS: previous_received_log_size = len(prevblock.received_log.log[ID]) newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] become_a_parent_of = None change_parent_to = None newly_received_payloads = MessagesLog() for ID in SHARD_IDS: for m in newly_received_messages[ID]: if m.target_shard_ID == shard_ID: if isinstance(m, SwitchMessage_BecomeAParent): become_a_parent_of = (m.new_child_ID, m.new_child_source) elif isinstance(m, SwitchMessage_ChangeParent): change_parent_to = (m.new_parent_ID, m.new_parent_source) routing_table = copy.copy(prevblock.routing_table) new_parent_ID = prevblock.parent_ID if become_a_parent_of is not None: routing_table[become_a_parent_of[0]] = become_a_parent_of[0] neighbor_fork_choice = self.make_fork_choice(become_a_parent_of[0]) sources[become_a_parent_of[0]] = neighbor_fork_choice ID = become_a_parent_of[0] received_log.log[ID] = copy.copy( neighbor_fork_choice.sent_log.log[shard_ID]) previous_received_log_size = len(prevblock.received_log.log[ID]) newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] if change_parent_to is not None: new_parent_ID = change_parent_to[0] neighbor_fork_choice = self.make_fork_choice(change_parent_to[0]) sources[change_parent_to[0]] = neighbor_fork_choice ID = change_parent_to[0] received_log.log[ID] = copy.copy( neighbor_fork_choice.sent_log.log[shard_ID]) previous_received_log_size = len(prevblock.received_log.log[ID]) newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] for ID in SHARD_IDS: for m in newly_received_messages[ID]: if m.target_shard_ID == shard_ID: if isinstance(m, SwitchMessage_BecomeAParent): become_a_parent_of = (m.new_child_ID, m.new_child_source) elif isinstance(m, SwitchMessage_ChangeParent): change_parent_to = (m.new_parent_ID, m.new_parent_source) else: newly_received_payloads.add_message(ID, m) else: next_hop_ID = self.next_hop(routing_table, m.target_shard_ID) if next_hop_ID is not None: assert next_hop_ID in prevblock.child_IDs, "shard_ID: %s, destination: %s, next_hop: %s, children: %s" % ( shard_ID, ID, next_hop_ID, prevblock.child_IDs) else: next_hop_ID = new_parent_ID assert next_hop_ID is not None new_sent_messages.log[next_hop_ID].append( Message(sources[next_hop_ID], m.TTL, m.target_shard_ID, m.payload)) # --------------------------------------------------------------------# # KEY EVM INTEGRATION HERE # this is where we have this function that produces the new vm state and the new outgoing payloads # new_vm_state, new_outgoing_payloads = apply_to_state(prevblock.vm_state, data, newly_received_payloads) new_vm_state, new_outgoing_payloads = apply_to_state( prevblock.vm_state, data, newly_received_payloads, genesis_blocks) # --------------------------------------------------------------------# # BUILD SENT LOG FROM NEW OUTGOING PAYLOADS # by this time new_sent_messages might already have some messages from rerouting above for ID in SHARD_IDS: if ID != shard_ID: for m in new_outgoing_payloads.log[ID]: # if TTL == 0, then we'll make an invalid block # one that sends a message that must be included by the base # which already exists and therefore cannot include this message if TTL > 0: first_hop_ID = self.next_hop(routing_table, ID) if first_hop_ID is not None: assert first_hop_ID in prevblock.child_IDs, "shard_ID: %s, target: %s, first_hop_ID: %s, parent: %s, children: %s, rtable: %s" % ( shard_ID, ID, first_hop_ID, prevblock.parent_ID, prevblock.child_IDs, prevblock.routing_table) else: first_hop_ID = new_parent_ID assert first_hop_ID is not None new_sent_messages.log[first_hop_ID].append( Message(sources[first_hop_ID], TTL, ID, m.payload)) else: print("Warning: Not sending message because TTL == 0") sent_log = prevblock.sent_log.append_MessagesLog(new_sent_messages) # --------------------------------------------------------------------# ret = Block(shard_ID, prevblock, new_txn_log, sent_log, received_log, sources, new_vm_state, postpone_validation=True) if become_a_parent_of is not None: assert become_a_parent_of[ 0] not in ret.child_IDs, "shard_ID: %s, become_of: %s, child_IDs: %s" % ( shard_ID, become_a_parent_of[0], ret.child_IDs) ret.child_IDs.append(become_a_parent_of[0]) ret.routing_table[become_a_parent_of[0]] = become_a_parent_of[0] if change_parent_to is not None: assert change_parent_to[0] not in ret.child_IDs assert change_parent_to[0] != ret.parent_ID ret.parent_ID = change_parent_to[0] check = ret.is_valid() assert check[0], check[1] return ret
def __init__(self): self.addBlock( 1, DataValues(1, [ Block(3, "Stone", "stone"), Block(1, "Granite", "stone_granite"), Block(1, "Polished Granite", "stone_granite_smooth"), Block(1, "Diorite", "stone_diorite"), Block(1, "Polished Diorite", "stone_diorite_smooth"), Block(1, "Andesite", "stone_andesite"), Block(1, "Polished Andesite", "stone_andesite_smooth") ])) self.addBlock( 2, Grass(2, "Grass Block", "dirt", "grass_top", "grass_side", "grass_side_overlay")) self.addBlock( 3, DataValues(3, [ Block(3, "Dirt", "dirt"), Block(3, "Coarse Dirt", "coarse_dirt"), Multitextured(3, "Podzol", "dirt", "dirt_podzol_top", "dirt_podzol_side") ])) self.addBlock(4, Block(4, "Cobblestone", "cobblestone")) self.addBlock( 5, DataValues(5, [ Block(5, "Oak Wood Planks", "planks_oak"), Block(5, "Spruce Wood Planks", "planks_spruce"), Block(5, "Birch Wood Planks", "planks_birch"), Block(5, "Jungle Wood Planks", "planks_jungle") ])) self.addBlock( 6, DataValues(6, [ Plant(6, "Oak Sapling", "sapling_oak"), Plant(6, "Spruce Sapling", "sapling_spruce"), Plant(6, "Birch Sapling", "sapling_birch"), Plant(6, "Jungle Sapling", "sapling_jungle") ] * 4)) self.addBlock(7, Block(7, "Block", "bedrock")) self.addBlock( 12, DataValues( 12, [Block(12, "Sand", "sand"), Block(12, "Red Sand", "red_sand")])) self.addBlock(13, Block(13, "Gravel", "gravel")) self.addBlock(14, Block(14, "Gold Ore", "oreGold")) self.addBlock(15, Block(15, "Iron Ore", "oreIron")) self.addBlock(16, Block(16, "Coal Ore", "oreCoal")) self.addBlock( 17, DataValues(17, [ Log(17, "Oak Log", "tree_top", "tree_side"), Log(17, "Spruce Wood", "tree_top", "tree_spruce"), None, None ] * 4)) self.addBlock( 18, DataValues(18, [ Leaves(18, "Oak Leaves", "leaves_oak"), Leaves(18, "Spruce Leaves", "leaves_spruce"), Leaves(18, "Birch Leaves", "leaves_birch"), Leaves(18, "Jungle Leaves", "leaves_jungle") ] * 4)) self.addBlock( 19, DataValues(19, [ Block(19, "Sponge", "sponge"), Block(19, "Wet Sponge", "sponge_wet") ])) self.addBlock(20, Transparent(20, "Glass", "glass")) self.addBlock(21, Block(21, "Lapis Lazuli Ore", "oreLapis")) self.addBlock(22, Block(22, "Lapis Lazuli Block", "blockLapis")) self.addBlock( 24, DataValues(24, [ Multitextured(24, "Sandstone", "sandstone_bottom", "sandstone_top", "sandstone_normal"), Multitextured(24, "Chiseled Sandstone", textureTop="sandstone_top", textureFront="sandstone_carved"), Multitextured(24, "Smooth Sandstone", textureTop="sandstone_top", textureFront="sandstone_smooth") ])) self.addBlock(25, Block(25, "Note Block", "musicBlock")) self.addBlock(30, Plant(30, "Cobweb", "web")) self.addBlock( 31, DataValues(31, [ Plant(31, "Shrub", "deadbush"), Tallgrass(31, "Tallgrass", "tallgrass"), Tallgrass(31, "Fern", "fern") ])) self.addBlock( 35, DataValues(35, [ Block(35, "White Wool", "cloth_0"), Block(35, "Orange Wool", "cloth_1"), Block(35, "Magenta Wool", "cloth_2"), Block(35, "Light Blue Wool", "cloth_3"), Block(35, "Yellow Wool", "cloth_4"), Block(35, "Lime Wool", "cloth_5"), Block(35, "Pink Wool", "cloth_6"), Block(35, "Gray Wool", "cloth_7"), Block(35, "Light Gray Wool", "cloth_8"), Block(35, "Cyan Wool", "cloth_9"), Block(35, "Purple Wool", "cloth_10"), Block(35, "Blue Wool", "cloth_11"), Block(35, "Brown Wool", "cloth_12"), Block(35, "Green Wool", "cloth_13"), Block(35, "Red Wool", "cloth_14"), Block(35, "Black Wool", "cloth_15") ])) self.addBlock(39, Plant(39, "Brown Mushroom", "mushroom_brown")) self.addBlock(40, Plant(40, "Red Mushroom", "mushroom_red")) self.addBlock(41, Block(41, "Gold Block", "blockGold")) self.addBlock(42, Block(42, "Iron Block", "iron_block")) self.addBlock( 43, DataValues(43, [ Multitextured(43, "Double Stone Slab", textureTop="stoneslab_top", textureFront="stoneslab_side"), Multitextured(43, "Double Sandstone Slab", "sandstone_bottom", "sandstone_top", "sandstone_side"), Block(43, "Double Wooden Slab (Stone)", "planks_oak"), Block(43, "Double Cobblestone Slab", "cobblestone"), Block(43, "Double Brick Slab", "brick"), Block(43, "Double Stone Brick Slab", "stonebrick"), Block(43, "Double Nether Brick Slab", "netherBrick"), Multitextured(43, "Double Quartz Slab", "quartzblock_bottom", "quartzblock_top", "quartzblock_side"), Block(43, "Double Smooth Stone Slab", "stoneslab_top"), Block(43, "Double Smooth Sandstone Slab", "sandstone_top") ])) self.addBlock( 44, DataValues(44, [ MultitexturedSlab(44, "Stone Slab", textureTop="stoneslab_top", textureFront="stoneslab_side"), MultitexturedSlab(44, "Sandstone Slab", "sandstone_bottom", "sandstone_top", "sandstone_side"), Slab(44, "Wooden Slab (Stone)", "planks_oak"), Slab(44, "Cobblestone Slab", "cobblestone"), Slab(44, "Brick Slab", "brick"), Slab(44, "Stone Brick Slab", "stonebrick"), Slab(44, "Nether Brick Slab", "netherBrick"), MultitexturedSlab(44, "Quartz Slab", "quartzblock_bottom", "quartzblock_top", "quartzblock_side") ] * 2)) self.addBlock(45, Block(45, "Brick Block", "brick")) self.addBlock( 46, Multitextured(46, "TNT", "tnt_bottom", "tnt_top", "tnt_side")) self.addBlock( 47, Multitextured(47, "Bookshelf", textureTop="planks_oak", textureFront="bookshelf")) self.addBlock(48, Block(48, "Moss Stone", "stoneMoss")) self.addBlock(49, Block(49, "Obsidian", "obsidian")) self.addBlock(51, Fire(51, "Fire", "fire_layer_0")) self.addBlock(53, Stairs(53, "Oak Wood Stairs", "planks_oak")) self.addBlock(56, Block(56, "Diamond Ore", "oreDiamond")) self.addBlock(57, Block(57, "Diamond Block", "blockDiamond")) self.addBlock( 58, Multitextured(58, "Crafting Table", "planks_oak", "workbench_top", "workbench_front", "workbench_front", "workbench_side", "workbench_side")) self.addBlock( 60, DataValues(60, [ Farmland(60, "Dry Farmland", "dirt", "farmland_dry", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_Wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt"), Farmland(60, "Wet Farmland", "dirt", "farmland_wet", "dirt") ])) self.addBlock(67, Stairs(67, "Cobblestone Stairs", "cobblestone")) self.addBlock(73, Block(73, "Redstone Ore", "oreRedstone")) self.addBlock(74, Block(74, "Glowing Redstone Ore", "oreRedstone")) self.addBlock(79, Block(79, "Ice", "ice")) self.addBlock(80, Block(80, "Snow Block", "blockSnow")) self.addBlock( 81, Cactus(81, "Cactus", "cactus_bottom", "cactus_top", "cactus_side")) self.addBlock(82, Block(82, "Clay Block", "clay")) self.addBlock( 84, Multitextured(84, "Jukebox", "musicBlock", "jukebox_top", "musicBlock")) self.addBlock( 86, DataValues(86, [ Multitextured(86, "Pumpkin (0)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_side", "pumpkin_face", "pumpkin_side"), Multitextured(86, "Pumpkin (1)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_face", "pumpkin_side", "pumpkin_side"), Multitextured(86, "Pumpkin (2)", "pumpkin_top", "pumpkin_top", "pumpkin_face", "pumpkin_side", "pumpkin_side", "pumpkin_side"), Multitextured(86, "Pumpkin (3)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_side", "pumpkin_side", "pumpkin_face"), Multitextured(86, "Pumpkin (4)", textureTop="pumpkin_top", textureFront="texture_side") ])) self.addBlock(87, Block(87, "Netherrack", "netherrack")) self.addBlock(88, Block(88, "Soul Sand", "hellsand")) self.addBlock(89, Block(89, "Glowstone Block", "lightgem")) self.addBlock( 91, DataValues(86, [ Multitextured(86, "Jack O'Lantern (0)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_side", "pumpkin_jack", "pumpkin_side"), Multitextured(86, "Jack O'Lantern (1)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_jack", "pumpkin_side", "pumpkin_side"), Multitextured(86, "Jack O'Lantern (2)", "pumpkin_top", "pumpkin_top", "pumpkin_jack", "pumpkin_side", "pumpkin_side", "pumpkin_side"), Multitextured(86, "Jack O'Lantern (3)", "pumpkin_top", "pumpkin_top", "pumpkin_side", "pumpkin_side", "pumpkin_side", "pumpkin_jack"), Multitextured(86, "Jack O'Lantern (4)", textureTop="pumpkin_top", textureFront="texture_side") ])) self.addBlock( 95, DataValues(95, [ Transparent(95, "White Stained Glass", "glass_white"), Transparent(95, "Orange Stained Glass", "glass_orange"), Transparent(95, "Magenta Stained Glass", "glass_magenta"), Transparent(95, "Light Blue Stained Glass", "glass_light_blue"), Transparent(95, "Yellow Stained Glass", "glass_yellow"), Transparent(95, "Lime Stained Glass", "glass_lime"), Transparent(95, "Pink Stained Glass", "glass_pink"), Transparent(95, "Gray Stained Glass", "glass_gray"), Transparent(95, "Light Gray Stained Glass", "glass_silver"), Transparent(95, "Cyan Stained Glass", "glass_cyan"), Transparent(95, "Purple Stained Glass", "glass_purple"), Transparent(95, "Blue Stained Glass", "glass_blue"), Transparent(95, "Brown Stained Glass", "glass_brown"), Transparent(95, "Green Stained Glass", "glass_green"), Transparent(95, "Red Stained Glass", "glass_red"), Transparent(95, "Black Stained Glass", "glass_black") ])) self.addBlock(97, Block(97, "Silverfish Stone", "stone")) self.addBlock(98, Block(98, "Stone Brick", "stonebrick")) self.addBlock( 103, Multitextured(103, "Melon", textureTop="melon_top", textureFront="melon_side")) self.addBlock(108, Stairs(108, "Brick Stairs", "brick")) self.addBlock(109, Stairs(109, "Stone Brick Stairs", "stonebrick")) self.addBlock( 110, Multitextured(110, "Mycelium", "dirt", "mycel_top", "mycel_side")) self.addBlock(112, Block(112, "Nether Brick", "netherBrick")) self.addBlock(114, Stairs(114, "Nether Brick Stairs", "netherBrick")) self.addBlock(121, Block(121, "End Stone", "whiteStone")) self.addBlock(123, Block(123, "Redstone Lamp (Inactive)", "redstoneLight")) self.addBlock( 124, Block(124, "Redstone Lamp (Active)", "redstoneLight_lit")) self.addBlock( 125, DataValues(125, [ Block(125, "Double Oak Wood Slab", "planks_oak"), Block(125, "Double Spruce Wood Slab", "planks_spruce"), Block(125, "Double Birch Wood Slab", "planks_birch"), Block(125, "Double Jungle Wood Slab", "planks_jungle") ])) self.addBlock( 126, DataValues(126, [ Slab(126, "Oak Wood Slab", "planks_oak"), Slab(126, "Spruce Wood Slab", "planks_spruce"), Slab(126, "Birch Wood Slab", "planks_birch"), Slab(126, "Jungle Wood Slab", "planks_jungle") ] * 2)) self.addBlock( 128, MultitexturedStairs(128, "Sandstone Stairs", "sandstone_bottom", "sandstone_top", "sandstone_side")) self.addBlock(129, Block(129, "Emerald Ore", "oreEmerald")) self.addBlock(133, Block(133, "Block of Emerald", "blockEmerald")) self.addBlock(134, Stairs(134, "Spruce Wood Stairs", "planks_spruce")) self.addBlock(135, Stairs(135, "Birch Wood Stairs", "planks_birch")) self.addBlock(136, Stairs(136, "Jungle Wood Stairs", "planks_jungle")) self.addBlock(137, Block(137, "Command Block", "commandBlock")) self.addBlock(152, Block(152, "Block of Redstone", "blockRedstone")) self.addBlock(153, Block(153, "Nether Quartz Ore", "netherquartz")) self.addBlock( 155, DataValues(155, [ Multitextured(155, "Block of Quartz", "quartzblock_bottom", "quartzblock_top", "quartzblock_side"), Multitextured(155, "Chiseled Quartz Block", textureTop="quartzblock_chiseled_top", textureFront="quartzblock_chiseled") ])) self.addBlock( 156, MultitexturedStairs(156, "Quartz Stairs", "quartzblock_bottom", "quartzblock_top", "quartzblock_side")) self.addBlock(174, Block(174, "Packed Ice", "ice_packed")) self.addBlock( 179, DataValues(179, [ Multitextured(179, "Red Sandstone", "red_sandstone_bottom", "red_sandstone_top", "red_sandstone_normal"), Multitextured(179, "Chiseled Red Sandstone", textureTop="red_sandstone_top", textureFront="red_sandstone_carved"), Multitextured(179, "Smooth Red Sandstone", textureTop="red_sandstone_top", textureFront="red_sandstone_smooth") ]))
def make_block(self, shard_ID, mempools, drain_amount, genesis_blocks, TTL=TTL_CONSTANT): genesis_blocks = self.genesis_blocks() # RUN FORK CHOICE RULE # will only have fork choices for parent and children fork_choice = self.fork_choice(shard_ID) # --------------------------------------------------------------------# # GET PREVBLOCK POINTER FROM FORK CHOICE prevblock = fork_choice[shard_ID] # --------------------------------------------------------------------# # EXTEND THE TRANSACTION LOG FROM THE MEMPOOL prev_txn_log = prevblock.txn_log new_txn_log = copy.copy(prev_txn_log) data = [] num_prev_txs = len(prev_txn_log) for i in range(drain_amount): if num_prev_txs + i < len(mempools[shard_ID]): new_tx = mempools[shard_ID][num_prev_txs + i] new_txn_log.append(new_tx) data.append(new_tx) # --------------------------------------------------------------------# # BUILD RECEIVED LOG WITH: received_log = MessagesLog() sources = {ID: genesis_blocks[ID] for ID in SHARD_IDS} for ID in fork_choice.keys(): if ID == shard_ID: continue # SOURCES = FORK CHOICE (except for self) sources[ID] = fork_choice[ID] # RECEIVED = SENT MESSAGES FROM FORK CHOICE received_log.log[ID] = fork_choice[ID].sent_log.log[shard_ID] # --------------------------------------------------------------------# # PREP NEWLY RECEIVED PMESSAGES IN A RECEIVEDLOG FOR EVM: newly_received_messages = {} new_sent_messages = MessagesLog() for ID in fork_choice.keys(): previous_received_log_size = len(prevblock.received_log.log[ID]) current_received_log_size = len(received_log.log[ID]) newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] newly_received_payloads = MessagesLog() for ID in fork_choice.keys(): for m in newly_received_messages[ID]: if m.target_shard_ID == shard_ID: newly_received_payloads.add_message(ID, m) else: next_hop_ID = self.next_hop(prevblock, m.target_shard_ID) if next_hop_ID is not None: assert next_hop_ID in prevblock.child_IDs else: next_hop_ID = prevblock.parent_ID new_sent_messages.log[next_hop_ID].append( Message(fork_choice[next_hop_ID], m.TTL, m.target_shard_ID, m.payload)) # --------------------------------------------------------------------# # KEY EVM INTEGRATION HERE # this is where we have this function that produces the new vm state and the new outgoing payloads # new_vm_state, new_outgoing_payloads = apply_to_state(prevblock.vm_state, data, newly_received_payloads) new_vm_state, new_outgoing_payloads = apply_to_state( prevblock.vm_state, data, newly_received_payloads, genesis_blocks) # --------------------------------------------------------------------# # BUILD SENT LOG FROM NEW OUTGOING PAYLOADS # by this time new_sent_messages might already have some messages from rerouting above for ID in SHARD_IDS: if ID != shard_ID: for m in new_outgoing_payloads.log[ID]: # if TTL == 0, then we'll make an invalid block # one that sends a message that must be included by the base # which already exists and therefore cannot include this message if TTL > 0: first_hop_ID = self.next_hop(prevblock, ID) if first_hop_ID is not None: assert first_hop_ID in [prevblock.parent_ID ] + prevblock.child_IDs else: first_hop_ID = prevblock.parent_ID new_sent_messages.log[first_hop_ID].append( Message(fork_choice[first_hop_ID], TTL, ID, m.payload)) else: print("Warning: Not sending message because TTL == 0") sent_log = prevblock.sent_log.append_MessagesLog(new_sent_messages) # --------------------------------------------------------------------# return Block(shard_ID, prevblock, new_txn_log, sent_log, received_log, sources, new_vm_state)
def make_block(self, shard_ID, mempools, drain_amount, TTL=TTL_CONSTANT): # RUN FORK CHOICE RULE fork_choice = self.fork_choice() # --------------------------------------------------------------------# # GET PREVBLOCK POINTER FROM FORK CHOICE prevblock = fork_choice[shard_ID] # --------------------------------------------------------------------# # EXTEND THE TRANSACTION LOG FROM THE MEMPOOL prev_txn_log = prevblock.txn_log new_txn_log = copy.copy(prev_txn_log) data = [] num_prev_txs = len(prev_txn_log) for i in range(drain_amount): if num_prev_txs + i < len(mempools[shard_ID]): new_tx = mempools[shard_ID][num_prev_txs + i] new_txn_log.append(new_tx) data.append(new_tx) # --------------------------------------------------------------------# # BUILD RECEIVED LOG WITH: received_log = ReceivedLog() for ID in SHARD_IDS: if ID == shard_ID: continue # SOURCES = FORK CHOICE (except for self) received_log.sources[ID] = fork_choice[ID] # RECEIVED = SENT MESSAGES FROM FORK CHOICE received_log.log[ID] = fork_choice[ID].sent_log.log[shard_ID] # --------------------------------------------------------------------# # PREP NEWLY RECEIVED PMESSAGES IN A RECEIVEDLOG FOR EVM: newly_received_messages = {} for ID in SHARD_IDS: previous_received_log_size = len(prevblock.received_log.log[ID]) current_received_log_size = len(received_log.log[ID]) newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] newly_received_payloads = ReceivedLog() for ID in SHARD_IDS: for m in newly_received_messages[ID]: newly_received_payloads.add_received_message(ID, m) # --------------------------------------------------------------------# # KEY EVM INTEGRATION HERE # this is where we have this function that produces the new vm state and the new outgoing payloads # new_vm_state, new_outgoing_payloads = apply_to_state(prevblock.vm_state, data, newly_received_payloads) new_vm_state, new_outgoing_payloads = apply_to_state( prevblock.vm_state, data, newly_received_payloads) if shard_ID == 1: new_outgoing_payloads.log[0], new_outgoing_payloads.log[ 1] = new_outgoing_payloads.log[1], new_outgoing_payloads.log[0] # --------------------------------------------------------------------# # BUILD SENT LOG FROM NEW OUTGOING PAYLOADS new_sent_messages = SentLog() for ID in SHARD_IDS: if ID != shard_ID: for m in new_outgoing_payloads.log[ID]: # if TTL == 0, then we'll make an invalid block # one that sends a message that must be included by the base # which already exists and therefore cannot include this message if TTL > 0: new_sent_messages.log[ID].append( Message(fork_choice[ID], TTL, m.payload)) else: print("Warning: Not sending message because TTL == 0") sent_log = prevblock.sent_log.append_SentLog(new_sent_messages) # --------------------------------------------------------------------# return Block(shard_ID, prevblock, new_txn_log, sent_log, received_log, new_vm_state)
from config import VALIDATOR_NAMES import generate_transactions # Experiment parameters NUM_PROPOSALS = 100 NUM_RECEIPTS_PER_PROPOSAL = 30 MEMPOOL_DRAIN_RATE = 1 REPORT_INTERVAL = 10 PAUSE_LENGTH = 0.01 # Setup GENESIS_BLOCKS = {} GENESIS_MESSAGES = [] for ID in SHARD_IDS: GENESIS_BLOCKS[ID] = Block(ID) GENESIS_MESSAGES.append(ConsensusMessage( GENESIS_BLOCKS[ID], 0, [])) # The watcher is the sender of the genesis blocks validators = {} for name in VALIDATOR_NAMES: validators[name] = Validator(name) # Watcher lives at validator name 0 and receives all the messages watcher = validators[0] for v in VALIDATOR_NAMES: for genesis_message in GENESIS_MESSAGES: validators[v].receive_consensus_message(genesis_message)
def main(): parser = argparse.ArgumentParser() # general & dataset & training settings parser.add_argument('--k_max', type=int, default=5, help='Max reconstruction iterations') parser.add_argument('--save_figs', type = lambda x:bool(strtobool(x)), default=True, help='save pics in reconstruction') parser.add_argument('--img_mode', type=str, default='SimpleCT', help=' image-modality reconstruction: SimpleCT') parser.add_argument('--train_size', type=int, default=4000, help='dataset size') parser.add_argument('--dataset_type', type=str, default='GenEllipsesSamples', help='GenEllipsesSamples or GenFoamSamples') parser.add_argument('--pseudo_inverse_init', type = lambda x:bool(strtobool(x)), default=True, help='initialise with pseudoinverse') parser.add_argument('--epochs', type=int, default=150, help='number of epochs to train') parser.add_argument('--batch_size', type=int, default=128, help='input batch size for training') parser.add_argument('--initial_lr', type=float, default=1e-3, help='initial_lr') parser.add_argument('--val_batch_size', type=int, default=128, help='input batch size for valing') parser.add_argument('--arch_args', type=json.loads, default=dict(), help='load architecture dictionary') parser.add_argument('--block_type', type=str, default='bayesian_homo', help='deterministic, bayesian_homo, bayesian_hetero') parser.add_argument('--save', type= lambda x:bool(strtobool(x)), default=True, help='save model') parser.add_argument('--load', type= lambda x:bool(strtobool(x)), default=False, help='save model') # forward models setting parser.add_argument('--size', type=int, default=128, help='image size') parser.add_argument('--beam_num_angle', type=int, default=30, help='number of angles / projections') parser.add_argument('--limited_view', type = lambda x:bool(strtobool(x)), default=False, help='limited view geometry instead of sparse view geometry') # options parser.add_argument('--no_cuda', type = lambda x:bool(strtobool(x)), default=False, help='disables CUDA training') parser.add_argument('--seed', type=int, default=222, help='random seed') parser.add_argument('--config', default='configs/bayesian_arch_config.json', help='config file path') args = parser.parse_args() if args.config is not None: with open(args.config) as handle: config = json.load(handle) vars(args).update(config) block_utils.set_gpu_mode(True) torch.manual_seed(args.seed) np.random.seed(args.seed) use_cuda = not args.no_cuda and torch.cuda.is_available() device = torch.device('cuda' if use_cuda else 'cpu') if args.img_mode == SimpleCT.__name__: img_mode = SimpleCT() half_size = args.size / 2 space = odl.uniform_discr([-half_size, -half_size], [half_size, half_size], [args.size, args.size], dtype='float32') img_mode.space = space if not args.limited_view: geometry = odl.tomo.parallel_beam_geometry(space, num_angles=args.beam_num_angle) elif args.limited_view: geometry = limited_view_parallel_beam_geometry(space, beam_num_angle=args.beam_num_angle) else: raise NotImplementedError img_mode.geometry = geometry operator = odl.tomo.RayTransform(space, geometry) opnorm = odl.power_method_opnorm(operator) img_mode.operator = odl_torch.OperatorModule((1 / opnorm) * operator) img_mode.adjoint = odl_torch.OperatorModule((1 / opnorm) * operator.adjoint) pseudoinverse = odl.tomo.fbp_op(operator) pseudoinverse = odl_torch.OperatorModule(pseudoinverse * opnorm) img_mode.pseudoinverse = pseudoinverse geometry_specs = 'full_view_sparse_' + str(args.beam_num_angle) if not args.limited_view else 'limited_view_' + str(args.beam_num_angle) dataset_name = 'dataset' + '_' + args.img_mode + '_' + str(args.size) \ + '_' + str(args.train_size) + '_' + geometry_specs + '_' + args.dataset_type data_constructor = DatasetConstructor(img_mode, train_size=args.train_size, dataset_name=dataset_name) data = data_constructor.data() else: raise NotImplementedError dataset = DataSet(data, img_mode, args.pseudo_inverse_init) optim_parms = {'epochs':args.epochs, 'initial_lr': args.initial_lr, 'batch_size': args.batch_size} if args.block_type == 'deterministic': from blocks import DeterministicBlock as Block elif args.block_type == 'bayesian_homo': from blocks import BlockHomo as Block elif args.block_type == 'bayesian_hetero': from blocks import BlockHetero as Block else: raise NotImplementedError # results directory path = os.path.dirname(__file__) dir_path = os.path.join(path, 'results', args.img_mode, args.block_type, args.dataset_type, str(args.train_size), geometry_specs, str(args.size), str(args.seed)) if not os.path.isdir(dir_path): os.makedirs(dir_path) # all config print('===========================\n', flush=True) for key, val in vars(args).items(): print('{}: {}'.format(key, val), flush=True) print('===========================\n', flush=True) blocks_history = {'block': [], 'optimizer': []} # savings training procedures filename = 'train_phase' filepath = os.path.join(dir_path, filename) vis = TrainVisualiser(filepath) start_time = time.time() # looping through architecture-blocs for idx in range(1, args.k_max + 1): print('============== training block number: {} ============= \n'.format(idx), flush=True) train_tensor = dataset.construct(flag='train') val_tensor = dataset.construct(flag='validation') train_loader = DataLoader(train_tensor, batch_size=args.batch_size, shuffle=True) val_loader = DataLoader(val_tensor, batch_size=args.val_batch_size, shuffle=True) block = Block(args.arch_args) block = block.to(device) path_block = os.path.join(dir_path, str(idx) + '.pt') if args.load and \ os.path.exists(path_block): block.load_state_dict( torch.load(path_block) ) loaded = True print('============= loaded idx: {} ============='.format(idx), flush=True) else: block.optimise(train_loader, **optim_parms) loaded = False start = time.time() info = next_step_update(dataset, train_tensor, block, device, flag='train') end = time.time() print('============= {} {:.4f} ============= \n'.format('training reconstruction', end-start), flush=True) for key in info.keys(): print('{}: {} \n'.format(key, info[key]), flush=True) start = time.time() info = next_step_update(dataset, val_tensor, block, device, flag='validation') end = time.time() print('============= {} {:.4f} ============= \n'.format('validation reconstruction', end-start), flush=True) for key in info.keys(): print('{}: {} \n'.format(key, info[key]), flush=True) vis.update(dataset, flag='validation') blocks_history['block'].append(block) # reconstruction resonstruction_dir_path = os.path.join(dir_path, str(idx)) if not loaded: if not os.path.isdir(resonstruction_dir_path): os.makedirs(resonstruction_dir_path) get_stats(dataset, blocks_history, device, resonstruction_dir_path) if args.save and not loaded: torch.save(block.state_dict(), os.path.join(dir_path, str(idx) + '.pt')) print('--- training time: %s seconds ---' % (time.time() - start_time), flush=True) vis.generate()
def make_block(self, shard_ID, data, TTL=TTL_CONSTANT): # first we execute the fork choice rule fork_choice = self.fork_choice() prevblock = fork_choice[shard_ID] # then put together the new received log received_log = ReceivedLog() for ID in SHARD_IDS: if ID == shard_ID: continue # we're just going to receive every send that we see from the fork choice (which filtered blocks who don't recieve things before their TTLs) received_log.sources[ID] = fork_choice[ID] received_log.log[ID] = fork_choice[ID].sent_log.log[shard_ID] # which has the following newly received messages: newly_received_messages = {} for ID in SHARD_IDS: if ID == shard_ID: continue previous_received_log_size = len(prevblock.received_log.log[ID]) current_received_log_size = len(received_log.log[ID]) assert current_received_log_size >= previous_received_log_size, "did not expect log size to shrink" newly_received_messages[ID] = received_log.log[ID][ previous_received_log_size:] # which have the following newly received payloads: newly_received_payloads = {} for ID in SHARD_IDS: if ID == shard_ID: continue newly_received_payloads[ID] = [ m.message_payload for m in newly_received_messages[ID] ] ''' KEY INTEGRATION HERE ''' # this is where we have this function that produces the new vm state and the new outgoing payloads # new_vm_state, new_outgoing_payloads = INTEGRATE_HERE(prevblock.vm_state, data, newly_received_payloads) print(prevblock.vm_state) new_vm_state, new_outgoing_payloads = apply_to_state( prevblock.vm_state, data, newly_received_payloads) # need new_outgoing_payloads is a dict of shard id to new payloads print(new_outgoing_payloads) # new_vm_state = prevblock.vm_state # new_outgoing_payloads = {} # for ID in SHARD_IDS: # new_outgoing_payloads[ID] = [] # we now package the sent_log with new messages that deliver these payloads new_sent_messages = [] shard_IDs = [] for ID in SHARD_IDS: for payload in new_outgoing_payloads[ID]: new_sent_messages.append(Message(fork_choice[ID], TTL, payload)) shard_IDs.append(ID) sent_log = prevblock.sent_log.add_sent_messages( shard_IDs, new_sent_messages) return Block(shard_ID, prevblock, data, sent_log, received_log, new_vm_state)
################################################### # this code implements a basic blockchain program # # creaton of chain # # author: and_bac 2019 # ################################################### from blocks import Block import datetime as date add_blocks = int(input("please enter the number of blocks to create the blockhain: ")) bac_chain = [Block.big_ben_block()] print("The genesis block or block 0 is created with the following Hash and timestamp: " % bac_chain[0].hash, bac_chain[0].timestamp) for i in range(1, add_blocks): bac_chain.append(Block(bac_chain[i-1].hash,"Block number %d" % i, date.datetime.now())) print("Block #%d created" % i) print("Hash: %s" % bac_chain[-1].hash) print("timestamp: %s" % bac_chain[-1].timestamp +"\n")
return res.set() def power_set(variables): if not variables: return BooleConstant(1) variables = sorted(set(variables), reverse=True, key=top_index) res = Polynomial(1, variables[0].ring()).set() for v in variables: res = if_then_else(v, res, res) return res if __name__ == '__main__': from blocks import declare_ring, Block r = declare_ring([Block("x", 10000)], globals()) print list(all_monomials_of_degree_d(0, [Variable(i) for i in range(100)])) print list(all_monomials_of_degree_d(1, [Variable(i) for i in range(10)])) print list(all_monomials_of_degree_d(2, [Variable(i) for i in range(4)])) print list(all_monomials_of_degree_d(3, [Variable(i) for i in range(4)])) print list(all_monomials_of_degree_d(4, [Variable(i) for i in range(4)])) print list(all_monomials_of_degree_d(0, [])) print list(all_monomials_of_degree_d(1, [])) print list(power_set([Variable(i) for i in range(2)])) print list(power_set([Variable(i) for i in range(4)])) print list(power_set([])) #every monomial in the first 8 var, which is at most linear in the first 5 print list( mod_mon_set( power_set([Variable(i) for i in range(8)]), all_monomials_of_degree_d(2, [Variable(i) for i in range(5)])))
from validator import ConsensusMessage from validator import UnresolvedDeps from generate_transactions import gen_alice_and_bob_tx from config import * def add_switch_message(parent_shard, child_to_become_parent, child_to_move_down, position): global mempools mempools[parent_shard].insert(position, {'opcode': 'switch', 'child_to_become_parent': child_to_become_parent, 'child_to_move_down': child_to_move_down}) # Setup GENESIS_BLOCKS = {} GENESIS_MESSAGES = [] for ID in SHARD_IDS: GENESIS_BLOCKS[ID] = Block(ID, sources={}) # temporarily set sources to {}, since genesis blocks are not known yet GENESIS_MESSAGES.append(ConsensusMessage(GENESIS_BLOCKS[ID], 0, [])) # The watcher is the sender of the genesis blocks for ID in SHARD_IDS: GENESIS_BLOCKS[ID].sources = {ID : GENESIS_BLOCKS[ID] for ID in SHARD_IDS} GENESIS_BLOCKS[ID].parent_ID = None for _ in SHARD_IDS: if ID in INITIAL_TOPOLOGY[_]: GENESIS_BLOCKS[ID].parent_ID = _ GENESIS_BLOCKS[ID].child_IDs = INITIAL_TOPOLOGY[ID] for ID in SHARD_IDS: GENESIS_BLOCKS[ID].compute_routing_table() validators = {} for name in VALIDATOR_NAMES:
def calculate(): global canvas, startTime fr_control.start() const = 1 / (width) k = 0.051 collided = False canvas.delete(ALL) nrCollisions = 0 text = canvas.create_text(width/2, 72) nrDigits = int(digits.get()) TimeSteps = 1 block1 = Block(10, height, 1, 30, 50) block2 = Block(500, height, 100 **( nrDigits-1), 30, 100, (-100/TimeSteps)) k = - 100 block1.show(canvas) block2.show(canvas) numerator = (width - block2.x + k) oldNumerator = numerator firstTime = True #secondTime = False realNrCollisions = int(math.pi * 10 ** (nrDigits-1)) previousNrCollisions = 0 while(True): block1.setOld() block2.setOld() for i in range(0, TimeSteps): numerator = (width - block2.x + k) if (block1.x <= 0): block1.vx *= -1 #block1.x = block1.x * -1 collided = True elif (block1.collide(block2)): collided = True sumOfMasses = block1.mass + block2.mass newV1 = (block1.mass - block2.mass) * block1.vx newV1 += (2 * block2.mass) * block2.vx newV1 /= sumOfMasses newV2 = (block2.mass - block1.mass) * block2.vx newV2 += (2 * block1.mass) * block1.vx newV2 /= sumOfMasses #block1.x -= block1.size - (x2 - x1) / 2 #block2.x += block1.size - (x2 - x1) / 2 block1.vx = newV1 block2.vx = newV2 if(nrCollisions == realNrCollisions and firstTime): firstTime = False block2.vx *= TimeSteps TimeSteps = 1 break elif(numerator - oldNumerator > 100): #firstTime = False oldNumerator = numerator nrSteps = math.ceil(nrDigits * numerator * const) block2.vx *= TimeSteps block1.vx *= TimeSteps TimeSteps = nrSteps ** (nrSteps - 1) block2.vx /= TimeSteps block1.vx /= TimeSteps if(collided): nrCollisions += 1 collided = False block1.update() block2.update() #print(block2.vx - block1.vx) canvas.itemconfig(text, text=str(nrCollisions)) canvas.itemconfig(text, font=("Courier", 72)) block1.move(canvas) block2.move(canvas) root.update() fr_control.delay() block1 = None block2 = None