def __init__(self, display: Surface): self.display = display self.score = Score(display) self.bullets = [Bullet(display) for _ in range(self.num_bullets)] self.block = Block(display, (display.get_width() // 2 - Block.WIDTH // 2, display.get_height() // 2 - Block.HEIGHT // 2), self.bullets)
def __init__(self, parent): self.baseLvl = LevelConstructor(parent, 6, 3) self.canvas = self.baseLvl.getCanvas() self.blockWidth = self.baseLvl.getBlockWidth() self.blockBaseSettings = [self.canvas, self.blockWidth] self.block_0 = Block(self.blockBaseSettings, size=2, orientation="H", position=[0, 2], color="red", isMain=True) self.block_2 = Block(self.blockBaseSettings, size=3, orientation="H", position=[3, 5], color="green") self.block_3 = Block(self.blockBaseSettings, size=2, orientation="H", position=[4, 3], color="lightBlue") self.block_4 = Block(self.blockBaseSettings, size=3, orientation="V", position=[0, 3], color="orange") self.block_5 = Block(self.blockBaseSettings, size=3, orientation="V", position=[5, 0], color="black") self.block_6 = Block(self.blockBaseSettings, size=2, orientation="V", position=[3, 2], color="blue") self.block_7 = Block(self.blockBaseSettings, size=3, orientation="V", position=[2, 0], color="purple") self.block_8 = Block(self.blockBaseSettings, size=3, orientation="H", position=[3, 4], color="maroon1") self.block_9 = Block(self.blockBaseSettings, size=2, orientation="H", position=[0, 0], color="navy") self.block_10 = Block(self.blockBaseSettings, size=2, orientation="H", position=[3, 1], color="grey")
def test_create_block(self): b = Block(0, 0, 0) self.assertIsInstance(b, Block) t = Transaction(0, 1, 2) self.assertTrue(b.add_transaction(t)) header = b.get_header() header = json.loads(header) self.assertIsInstance(header, dict) self.assertIsInstance(header["trans_tree"], str)
def test_block_serialize(self): a = Block(0, 1, datetime.datetime.utcnow().timestamp()) t1 = Transaction(0, 1, 2) a.add_transaction(t1) t2 = Transaction(0, 1, 2) a.add_transaction(t2) b = Block.from_json(a.to_json()) self.assertEqual(a.get_hash(), b.get_hash())
def __init__(self, pmax, nets_size, nodes_block_id): self.iteration = 1 self.__blocks = [Block(pmax), Block(pmax)] self.__nets_distribution = [[0, 0]] * nets_size self.__nodes_locked = [False] * len(nodes_block_id) self.__nodes_gain = [0] * len(nodes_block_id) self.__nodes_block_id = nodes_block_id self.__best_partition = None self.cutsize = None self.prev_mincut = None self.mincut = None
def test_walk_transactions(self): b = Block(0, 0, 0) t = Transaction(0, 1, 2) b.add_transaction(t) self.assertEqual(len(list(b.get_transactions())), 1) t = Transaction(0, 1, 2) b.add_transaction(t) self.assertEqual(len(list(b.get_transactions())), 2) for x in b.get_transactions(): self.assertIsInstance(x, Transaction)
def __init__(self): super().__init__() for rec in self.db: self.blocks = [] block = Block(rec['index'], rec['timestamp'], rec['hash'], rec['previousHash'], rec['data']) self.blocks.append(block)
def create_block(): """Add and broadcast the given block. Returns HTTP 400 if the block is considered invalid.""" try: # retrieve block from request body jso = request.get_data(as_text=True) b = Block.from_json(jso) # add block to local blockchain success = app.p2p.bc.discard_block(b) if success: # broadcast block to p2p network app.p2p.broadcast_block(b) logger.debug(f"block {b.index} added") return Response(b.to_json(), status=HTTP_CREATED) else: logger.debug("failed to add block (discard)") raise BadRequest() except BadRequest: raise except BaseException as e: logger.debug(e) raise BadRequest()
def output(base_key_full_path): """Performs the clean up of AltFS applicable values from the Registry""" hive, base_key_path = split_key_path_to_hive_and_path(base_key_full_path) with _winreg.OpenKey(HIVES[hive], base_key_path, _winreg.KEY_SET_VALUE) as base_key: buckets_names = get_sub_keys(base_key) for bucket_name in buckets_names: print "[key] %s" % bucket_name altfs_applicable_values = [] with get_bucket_key(HIVES[hive], "%s\\%s" % (base_key_path, bucket_name)) as key: for value_name in get_sub_values(key): if is_value_name_applicable(buckets_names, value_name): altfs_applicable_values.append(value_name) with get_bucket_key(HIVES[hive], "%s\\%s" % (base_key_path, bucket_name), desired_access=_winreg.KEY_QUERY_VALUE) as key: for value_name in altfs_applicable_values: print "\t[val] %s:" % value_name data, _type = _winreg.QueryValueEx(key, value_name) block = Block.generate_block_from_packed_str(data) # print pprint.pformat(block.__dict__, indent=4) print "\t " + \ json.dumps( block.__dict__, indent=4, sort_keys=True ).replace("\n", "\n\t ")
def test_post_transaction(self): self.prepare_app() # prepare a valid transaction w = Wallet() w.create_keys() tx = Transaction("0", w.get_address(), 1) w.sign_transaction(tx) client = app.test_client() response = client.post("/block/new", data=tx.to_json()) self.assertEqual(response.status_code, 201) b = Block.from_json(response.get_data()) mine(b) response = client.post("/block", data=b.to_json()) self.assertEqual(response.status_code, 201) tx = Transaction(w.get_address(), 1, 0.5) # test without signature response = client.post("/transaction", data=tx.to_json()) self.assertEqual(response.status_code, 400) # test with signature w.sign_transaction(tx) response = client.post("/transaction", data=tx.to_json()) self.assertEqual(response.status_code, 201)
def create_chain_from_dump(chain_dump): bc = Blockchain() for idx, block_data in enumerate(chain_dump): block = Block(transactions=block_data["transactions"], timestamp=block_data["timestamp"], previous_hash=block_data["previous_hash"]) block.block_hash = block_data["block_hash"] proof = block_data["block_hash"] if idx > 0: added = bc.add_block(block, proof) if not added: raise Exception("The chain dump is tampered!") else: # the block is a genesis block, no verification needed bc.chain.append(block) #TODO are you sure this makes sense? return bc
def new_block(self, proof, previous_hash=None): block = Block(index=len(self.block_chain) + 1, proof=proof, previous_hash=previous_hash or hash_block(self.last_block), transactions=self.current_transactions) self.block_chain.append(block) self.current_transactions = [] return block
def get_next_block(previous_block): next_block = Block() next_block.index = previous_block.index + 1 next_block.timestamp = datetime.now() next_block.data = "Block Number: " + str(next_block.index) next_block.hash = previous_block.generate_hash() next_block.previous_hash = previous_block.hash return next_block
def add_block(): data = request.args.get("data") previous_block = block_chain.getlatestBlock() index = previous_block.index + 1 time_stamp = str(datetime.datetime.now()) previous_hash = previous_block.hash block = Block(index, time_stamp, calculateHash(index, previous_hash, time_stamp, data), previous_hash, data) block_chain.addBlock(block) return jsonify({"status": "Success"})
def do_block_raw(self, arg): """Send raw block from JSON""" block_json = input("enter block JSON : ") try: blk = Block.from_json(block_json) except ValueError: print("Invalid block JSON") return if self.api.push_block(blk): print("OK") else: print("KO")
def verify_and_add_block(): """Endpoint to add a block mined by someone else to the node's chain. The node first verifies the block and then adds it to the chain.""" print("New block received from network...", sys.stdout) block_data = request.get_json(force=True) print(block_data, sys.stdout) block = Block(transactions=block_data["transactions"], timestamp=block_data["timestamp"], previous_hash=block_data["previous_hash"]) block.block_hash = block_data["block_hash"] #TODO why are we generating the block hash once and then redefining it? Stupid. proof = block_data['block_hash'] added = blockchain.add_block(block, proof) if not added: print("block discarded by node", sys.stdout) return "The block was discarded by the node", 400 print("block added to chain", sys.stdout) return "Block added to the chain", 201
def do_block_raw(args): """Send raw block from JSON input""" block_json = sys.stdin.read() try: blk = Block.from_json(block_json) except ValueError: print("Invalid block JSON") return if args.api.push_block(blk): print("OK") else: print("KO")
def _generate_descriptor_block(self): """ Returns a Block instance of type TYPE_DESCRIPTOR. The current descriptor object is saved to it. Note: The next block ID field is redundant so it's given a constant 1. """ return Block(block_id=0, block_type=Block.TYPE_DESCRIPTOR, data_length=len(self._descriptor.serialize()), next_block_id=1, data=self._descriptor.__dict__)
def _generate_data_termination_block(self, data="", block_id=None): """ Returns a Block instance to be used as the last data block of a file. It closes the chain of data blocks by pointing to the superblock as next block. """ new_block_id = block_id if block_id is not None else \ self._get_next_available_block_id() return Block(block_id=new_block_id, block_type=Block.TYPE_DATA, data_length=len(data), next_block_id=0, data=data)
def __init__(self, vocab_size, emb_dim, pos_dim, n_blocks, rnn, rnn_dim, n_rnn_layers, rnn_dropout, h_dim, n_heads, n_sub_layers_first, n_sub_layers_second, device, dropout=0.1, emb_dropout=0.5, self_loop=True, emb_matrix=None, topn=5): super().__init__() self.embedding = Embeddings(vocab_size, emb_dim, pos_dim, device, emb_dropout, emb_matrix, topn) self.in_dim = emb_dim + pos_dim self.rnn_dim = rnn_dim self.n_rnn_layers = n_rnn_layers self.rnn_dropout = nn.Dropout(rnn_dropout) self.device = device self.rnn = rnn self.n_blocks = n_blocks if rnn: self.fc_rnn = nn.Linear(self.in_dim, rnn_dim) self.rnn_eoncder = nn.LSTM(rnn_dim, rnn_dim, n_rnn_layers, batch_first=True, dropout=rnn_dropout, bidirectional=True) self.in_dim = self.rnn_dim * 2 self.fc = nn.Linear(self.in_dim, h_dim) self.dropout = nn.Dropout(dropout) self.blocks = nn.ModuleList() self.attention_guide_layer = MultiHeadAttentionLayer(h_dim, n_heads, dropout, device) for i in range(self.n_blocks): if i == 0: self.blocks.append(Block(h_dim, n_heads, n_sub_layers_first, n_sub_layers_second, device, None, dropout, self_loop)) else: self.blocks.append(Block(h_dim, n_heads, n_sub_layers_first, n_sub_layers_second, device, self.attention_guide_layer, dropout, self_loop)) self.aggregate_W = nn.Linear(2 * self.n_blocks * h_dim, h_dim)
def deserialize(self, data): nb = data[self.NB] objs = data[self.OBJECTS] if nb != len(objs): logger.error("bad number of objects") raise DeserializationError("bad number of objects") self.objects = [] for ot in objs: otype = ot[0] obj = ot[1] if otype == MsgInv.TYPE_BLOCK: obj = Block.from_json(json.dumps(obj)) elif otype == MsgInv.TYPE_TX: obj = Transaction.from_json(json.dumps(obj)) self.objects.append((otype, obj))
def test_post_new_block(self): self.prepare_app() # prepare a valid transaction w = Wallet() w.create_keys() tx = Transaction(0, w.get_address(), 1) w.sign_transaction(tx) client = app.test_client() response = client.post("/block/new", data=tx.to_json()) self.assertEqual(response.status_code, 201) b = Block.from_json(response.get_data()) self.assertTrue(type(b) == Block)
def verify_and_add_block(): """Endpoint to add a block mined by someone else to the node's chain. The node first verifies the block and then adds it to the chain.""" block_data = request.get_json(force=True) block = Block(transactions=block_data["transactions"], timestamp=block_data["timestamp"], previous_hash=block_data["previous_hash"]) proof = block_data['hash'] added = blockchain.add_block(block, proof) if not added: return "The block was discarded by the node", 400 return "Block added to the chain", 201
def _get_block(self, bucket_id, value_id): """ Loads the block the data from the desired value. Returns it as aBlock instance. Raises InternalStorageOperationException if provider has failed to read """ try: block = Block.generate_block_from_packed_str( self._storage_provider.get_block(bucket_id, value_id)) except Exception as e: logger.error("reading of block at (%s:%s) has failed: %s" % (bucket_id, value_id, str(e))) raise InternalStorageOperationException( InternalStorageOperationException.OPERATION_READ, str(e)) logger.debug("a block was read at (%s:%s):%s" % (bucket_id, value_id, block.__dict__)) return block
def _load_descriptor(self): """ Loads the descriptor instance from the superblock. Creates an empty descriptor if such block does not exist, and writes it to storage. """ self._descriptor = Descriptor() try: # try load the existing descriptor from superblock first_block_data = self._storage_provider.get_block( self._first_bucket_id, 0) block = Block.generate_block_from_packed_str(first_block_data) self._descriptor.__dict__ = block.data except BucketValueMissingException: # superblock does not exist logger.error("superblock does not exist. Creating a new empty one") # create an empty descriptor and write it as a superblock (id=0) self._write_block(self._first_bucket_id, 0, self._generate_descriptor_block())
def test_network(network): pygame.init() pygame.display.set_caption('Test') clock = pygame.time.Clock() surface = pygame.display.set_mode((surface_width, surface_height)) bullets = [Bullet(surface) for _ in range(3)] score = 0 score_font = pygame.font.SysFont("Arial", 30) block = Block(surface, (surface_width // 2, surface_height // 2), bullets, network=network) while True: surface.fill((0, 0, 0)) for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit(0) block.predict() if block.is_alive: block.update() block.draw() score += 0.1 for bullet in bullets: bullet.update() bullet.draw() if not block.is_alive: print(round(score)) break text = score_font.render("Score: " + str(round(score)), True, (255, 255, 0)) text_rect = text.get_rect() text_rect.center = (surface_width / 2, 100) surface.blit(text, text_rect) pygame.display.flip() clock.tick(200)
def initialize_database(): print("Initializing database!") db = Base('block_chain.pdl') if (db.exists()): print("Delete block chain database") db.delete db.create('index', 'timestamp', 'hash', 'previousHash', 'data', mode="override") genesisData = "It all begins here!" timestamp = datetime.now() index = 0 hash = calculateHash(index, "", timestamp, genesisData) genesisBlock = Block(index, str(timestamp), hash, None, genesisData) db.insert(genesisBlock.index, genesisBlock.timestamp, genesisBlock.hash, genesisBlock.previousHash, genesisBlock.data) db.commit()
def _create_data_blocks(self, data, terminating_at=None): """ Writes a chain of data blocks to hold the given data. Optional terminating_at parameter defines the next_block_id of the last data block in the chain. If omitted, the chain ends at the superblock. """ if len(data) == 0: return [] chunks = list(split_string_by_max_size(data, self.max_block_size)) new_block_ids = self._get_next_available_block_id(count=len(chunks)) if isinstance(new_block_ids, int): new_block_ids = [new_block_ids] if terminating_at: new_block_ids.append(terminating_at) else: new_block_ids.append( self._get_next_available_block_id(count=1, blacklist=new_block_ids)) chunk = "" for chunk_id, chunk in zip(range(len(chunks)), chunks): new_block = Block(block_id=new_block_ids[chunk_id], block_type=Block.TYPE_DATA, data_length=len(chunk), next_block_id=new_block_ids[chunk_id + 1], data=chunk) bucket_id = calculate_bits_sum(chunk) % self._buckets_count self._write_block(bucket_id, None, new_block) if not terminating_at: new_block = self._generate_data_termination_block( block_id=new_block_ids[-1]) bucket_id = calculate_bits_sum(chunk) % self._buckets_count self._write_block(bucket_id, None, new_block) return new_block_ids
def test_import_transactions(self): b = Block(0, 0) w = Wallet() w.create_keys() t = Transaction(w.get_address(), 1, 1) w.sign_transaction(t) b.add_transaction(t) tp = TransactionPool() self.assertTrue(tp.import_transactions(b)) self.assertEqual(len(tp), 1) t2 = tp.pull_transaction() self.assertEqual(t.get_hash(), t2.get_hash()) # Importing unsigned transactions returns False tp = TransactionPool() t2 = Transaction(0, 1, 1) b.add_transaction(t2) self.assertFalse(tp.import_transactions(b))
def initialize_genesis_block(): return Block(0, datetime.now(), "Genesis Bloc","0")