def get_by_addr(self, address): """ Lookup a set of notifications by address Args: address (UInt160 or str): hash of address for notifications Returns: list: a list of notifications """ addr = address if isinstance(address, str) and len(address) == 34: addr = Helper.AddrStrToScriptHash(address) if not isinstance(addr, UInt160): raise Exception("Incorrect address format") addrlist_snapshot = self.db.getPrefixedDB( NotificationPrefix.PREFIX_ADDR).createSnapshot() results = [] with addrlist_snapshot.db.openIter( DBProperties(prefix=bytes(addr.Data), include_key=False)) as it: for val in it: if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) results.append(event) except Exception as e: logger.error("could not parse event: %s %s" % (e, val)) return results
def get_by_contract(self, contract_hash): """ Look up a set of notifications by the contract they are associated with Args: contract_hash (UInt160 or str): hash of contract for notifications to be retreived Returns: list: a list of notifications """ hash = contract_hash if isinstance(contract_hash, str) and len(contract_hash) == 40: hash = UInt160.ParseString(contract_hash) if not isinstance(hash, UInt160): raise Exception("Incorrect address format") contractlist_snapshot = self.db.getPrefixedDB( NotificationPrefix.PREFIX_CONTRACT).createSnapshot() results = [] with contractlist_snapshot.db.openIter( DBProperties(prefix=bytes(hash.Data), include_key=False)) as it: for val in it: if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) results.append(event) except Exception as e: logger.error("could not parse event: %s %s" % (e, val)) return results
def SearchAssetState(self, query): res = [] snapshot = self._db.createSnapshot() keys = [] with self._db.openIter( DBProperties(DBPrefix.ST_Asset, include_value=False)) as it: for key in it: keys.append(key[1:]) # remove prefix byte if query.lower() == "neo": query = "AntShare" if query.lower() in {"gas", "neogas"}: query = "AntCoin" for item in keys: asset = snapshot.Assets.TryGet(item) if query in asset.Name.decode('utf-8'): res.append(asset) elif query in Crypto.ToAddress(asset.Issuer): res.append(asset) elif query in Crypto.ToAddress(asset.Admin): res.append(asset) return res
def SearchContracts(self, query): res = [] snapshot = self._db.createSnapshot() keys = [] with self._db.openIter( DBProperties(DBPrefix.ST_Contract, include_value=False)) as it: for key in it: keys.append(key[1:]) # remove prefix byte query = query.casefold() for item in keys: contract = snapshot.Contracts.TryGet(item) try: if query in contract.Name.decode('utf-8').casefold(): res.append(contract) elif query in contract.Author.decode('utf-8').casefold(): res.append(contract) elif query in contract.Description.decode('utf-8').casefold(): res.append(contract) elif query in contract.Email.decode('utf-8').casefold(): res.append(contract) except Exception as e: logger.info("Could not query contract: %s " % e) return res
def ShowAllAssets(self): res = [] with self._db.openIter( DBProperties(DBPrefix.ST_Asset, include_value=False)) as it: for key in it: res.append(key[1:]) # remove prefix byte return res
def ShowAllContracts(self): keys = [] with self._db.openIter( DBProperties(DBPrefix.ST_Contract, include_value=False)) as it: for key in it: keys.append(key[1:]) # remove prefix byte return keys
def get_tokens(self): """ Looks up all tokens Returns: list: A list of smart contract events with contracts that are NEP5 Tokens """ tokens_snapshot = self.db.getPrefixedDB( NotificationPrefix.PREFIX_TOKEN).createSnapshot() results = [] with tokens_snapshot.db.openIter( DBProperties(include_key=False)) as it: for val in it: event = SmartContractEvent.FromByteArray(val) results.append(event) return results
def FindInternal(self, key_prefix): try: intermediate_prefix = bytearray(binascii.unhexlify(key_prefix)) except binascii.Error: intermediate_prefix = bytearray(key_prefix) key_prefix = self.prefix + intermediate_prefix res = {} with self.db.openIter(DBProperties(key_prefix, include_value=True)) as it: for key, val in it: # we want the storage item, not the raw bytes item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)) # also here we need to skip the 1 byte storage prefix res_key = key[1:] res[res_key] = item # yielding outside of iterator to make sure the db iterator is closed for k, v in res.items(): yield k, v
def get_by_block(self, block_number): """ Look up notifications for a block Args: block_number (int): height of block to search for notifications Returns: list: a list of notifications """ blocklist_snapshot = self.db.getPrefixedDB( NotificationPrefix.PREFIX_BLOCK).createSnapshot() block_bytes = block_number.to_bytes(4, 'little') results = [] with blocklist_snapshot.db.openIter( DBProperties(prefix=block_bytes, include_key=False)) as it: for val in it: event = SmartContractEvent.FromByteArray(val) results.append(event) return results
def test_iterator(self): self._db.write(b'00001.x', b'x') self._db.write(b'00001.y', b'y') self._db.write(b'00001.z', b'z') self._db.write(b'00002.w', b'w') self._db.write(b'00002.x', b'x') self._db.write(b'00002.y', b'y') self._db.write(b'00002.z', b'z') from neo.Storage.Interface.DBProperties import DBProperties ''' Has to be converted as leveldb returns a custom iterator object, rocksdb just uses lists/dicts. Should not matter, still tests the same. ''' def make_compatible(obj, to): if not isinstance(obj, to): new_obj = to(obj) if isinstance(new_obj, dict): return new_obj.items() return new_obj return obj with self._db.openIter( DBProperties(prefix=b'00001', include_value=True, include_key=False)) as iterator: iterator = make_compatible(iterator, list) self.assertEqual(iterator[0], b'x') self.assertEqual(iterator[1], b'y') self.assertEqual(iterator[2], b'z') with self.assertRaises(Exception) as context: self.assertEqual(iterator[3], b'z') self.assertTrue( 'list index out of range' in str(context.exception)) self.assertEqual(len(iterator), 3) self.assertIsInstance(iterator, list) with self._db.openIter( DBProperties(prefix=b'00002', include_value=False, include_key=True)) as iterator: iterator = make_compatible(iterator, list) self.assertEqual(iterator[0], b'00002.w') self.assertEqual(iterator[1], b'00002.x') self.assertEqual(iterator[2], b'00002.y') self.assertEqual(iterator[3], b'00002.z') with self.assertRaises(Exception) as context: self.assertEqual(iterator[4], b'XXX') self.assertTrue( 'list index out of range' in str(context.exception)) self.assertEqual(len(iterator), 4) self.assertIsInstance(iterator, list) with self._db.openIter( DBProperties(prefix=b'00002', include_value=True, include_key=True)) as iterator: iterator = make_compatible(iterator, dict) self.assertEqual(dict(iterator).get(b'00002.w'), b'w') self.assertEqual(dict(iterator).get(b'00002.x'), b'x') self.assertEqual(dict(iterator).get(b'00002.y'), b'y') self.assertEqual(dict(iterator).get(b'00002.z'), b'z') self.assertEqual(dict(iterator).get(b'00002.A'), None) self.assertEqual(len(iterator), 4) self.assertIsInstance(iterator, abc.ItemsView) with self._db.openIter( DBProperties(prefix=None, include_value=True, include_key=True)) as iterator: iterator = make_compatible(iterator, dict) self.assertEqual(dict(iterator).get(b'00001.x'), b'x') self.assertEqual(dict(iterator).get(b'00001.y'), b'y') self.assertEqual(dict(iterator).get(b'00001.z'), b'z') self.assertEqual(dict(iterator).get(b'00002.w'), b'w') self.assertEqual(dict(iterator).get(b'00002.x'), b'x') self.assertEqual(dict(iterator).get(b'00002.y'), b'y') self.assertEqual(dict(iterator).get(b'00002.z'), b'z') self.assertEqual(dict(iterator).get(b'00002.A'), None) self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, abc.ItemsView) with self._db.openIter( DBProperties(prefix=None, include_value=False, include_key=True)) as iterator: iterator = make_compatible(iterator, list) self.assertEqual(iterator[0], b'00001.x') self.assertEqual(iterator[1], b'00001.y') self.assertEqual(iterator[2], b'00001.z') self.assertEqual(iterator[3], b'00002.w') self.assertEqual(iterator[4], b'00002.x') self.assertEqual(iterator[5], b'00002.y') self.assertEqual(iterator[6], b'00002.z') self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, list) with self._db.openIter( DBProperties(prefix=None, include_value=True, include_key=False)) as iterator: iterator = make_compatible(iterator, list) self.assertEqual(iterator[0], b'x') self.assertEqual(iterator[1], b'y') self.assertEqual(iterator[2], b'z') self.assertEqual(iterator[3], b'w') self.assertEqual(iterator[4], b'x') self.assertEqual(iterator[5], b'y') self.assertEqual(iterator[6], b'z') self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, list) with self.assertRaises(Exception) as context: with self._db.openIter( DBProperties(prefix=None, include_value=False, include_key=False)) as iterator: pass self.assertTrue( 'Either key or value have to be true' in str(context.exception))
def cloneDatabaseStorage(self, clone_storage): db_snapshot = self.createSnapshot() with db_snapshot.db.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: for key, value in iterator: clone_storage.write(key, value) return clone_storage
def reset(self): with self._db.openIter( DBProperties(prefix=DBPrefix.ST_Storage, include_value=False)) as it: for key in it: self._db.delete(key)
def __init__(self, db, skip_version_check=False, skip_header_check=False): self._db = db self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) self.TXProcessed = 0 version = self._db.get(DBPrefix.SYS_Version) if skip_version_check: self._db.write(DBPrefix.SYS_Version, self._sysversion) version = self._sysversion if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') if not skip_header_check: ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') hashes = [] try: with self._db.openIter( DBProperties(DBPrefix.IX_HeaderHashList)) as it: for key, value in it: ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: logger.info( "Current stored headers empty, re-creating from stored blocks..." ) headers = [] logger.info('Recreate headers') with self._db.openIter(DBProperties( DBPrefix.DATA_Block)) as it: for key, value in it: dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData( binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) if len(headers): self.OnAddHeader(headers[-1]) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass elif version is None: wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: logger.error("\n\n") logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) logger.warning( "You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain." ) res = prompt( "Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> " ) if res == 'continue': with self._db.getBatch() as wb: with self._db.openIter( DBProperties(include_value=False)) as it: for key in it: wb.delete(key) wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: raise Exception("Database schema changed")