def verify(self, chunkname): try: data = self.get(chunkname) except FileNotFoundError: return False digest = get_pynode_digest_int(data) return chunkname == digest
def store_chunk_in_storage(self, chunkid, data): """ :param chunkid: integer :param data: bytes """ if chunkid != get_pynode_digest_int(data): raise ValueError("data does not match chunkid!") self.__storage.put(chunkid, data)
def setUp(self): CHUNK_SIZE = 1024 * 1024 random.seed(236823823) self.chunk_data = random.getrandbits(CHUNK_SIZE * 8).to_bytes( CHUNK_SIZE, byteorder="big") self.chunk_digest = get_pynode_digest_int(self.chunk_data) self.test_dir = tempfile.mkdtemp() self.cs = ChunkStorage(self.test_dir, mode=0o0700)
def calculate_xor_distance(pastelid, chunkid): """ This method is single point of calculating XOR distance between pastelid and chunkid. PastelID is initially string. There is a number of ways to convert string to some integer value (depending on the input string), but it's important to select one way and keep it. `chunkid` is expected to be integer. """ # there is node_digest = get_pynode_digest_int(pastelid.encode()) xor_distance = node_digest ^ chunkid return xor_distance
def __init__(self, nodenum, nodeid, mn_manager): self.__logger = initlogging('', __name__) self.__nodeid = nodeid self.__mn_manager = mn_manager # helper lookup table for alias generation and other nodes aliases = [] for i in range(NetWorkSettings.REPLICATION_FACTOR): digest_int = get_pynode_digest_int( i.to_bytes(1, byteorder='big') + NetWorkSettings.ALIAS_SEED) # self.__logger.debug("Alias digest %s -> %s" % (i, chunkid_to_hex(digest_int))) aliases.append(digest_int) self.__alias_digests = tuple(aliases)
async def fetch_single_chunk_via_rpc(chunkid): for masternode in get_chunk_owners(chunkid): if masternode.pastel_id == get_blockchain_connection().pastelid: # don't attempt to connect ourselves continue mn = masternode.get_rpc_client() try: data = await mn.send_rpc_fetchchunk(chunkid) except RPCException as exc: tasks_logger.exception( "FETCHCHUNK RPC FAILED for node %s with exception %s" % (mn.server_ip, exc)) continue except (ClientConnectorError, ServerTimeoutError) as clien_ex: tasks_logger.error('{} for {}'.format(str(clien_ex), mn.server_ip)) continue except Exception as ex: tasks_logger.exception( "FETCHCHUNK RPC FAILED for node %s with exception %s" % (mn.server_ip, ex)) continue if data is None: tasks_logger.debug("MN %s returned None for fetchchunk %s" % (mn.server_ip, chunkid)) # chunk was not found continue # if chunk is received: # verify that digest matches digest = get_pynode_digest_int(data) if chunkid != str(digest): tasks_logger.info( "MN %s returned bad chunk for fetchchunk %s, mismatched digest: %s" % (mn.server_ip, chunkid, digest)) continue return data # nobody has this chunk tasks_logger.error("Unable to fetch chunk %s" % chunkid_to_hex(int(chunkid)))
async def fetch_single_chunk_via_rpc(chunkid): # we need to fetch it found = False for owner in self.__aliasmanager.find_other_owners_for_chunk( chunkid): mn = self.__mn_manager.get(owner) try: data = await mn.send_rpc_fetchchunk(chunkid) except RPCException as exc: self.__logger.info( "FETCHCHUNK RPC FAILED for node %s with exception %s" % (owner, exc)) continue if data is None: self.__logger.info( "MN %s returned None for fetchchunk %s" % (owner, chunkid)) # chunk was not found continue # verify that digest matches digest = get_pynode_digest_int(data) if chunkid != digest: self.__logger.info( "MN %s returned bad chunk for fetchchunk %s, mismatched digest: %s" % (owner, chunkid, digest)) continue # we have the chunk, store it! self.__chunkmanager.store_missing_chunk(chunkid, data) break # nobody has this chunk if not found: # TODO: fall back to reconstruct it from luby blocks self.__logger.error( "Unable to fetch chunk %s, luby reconstruction is not yet implemented!" % chunkid_to_hex(chunkid)) self.__chunkmanager.failed_to_fetch_chunk(chunkid)
def store_chunk_in_temp_storage(self, chunkid, data): if chunkid != get_pynode_digest_int(data): raise ValueError("data does not match chunkid!") self.__tmpstorage.put(chunkid, data)