def get_tree_hash(self): """ Returns the tree hash of the archive .. note:: only works with a local archive """ # This process takes some time for bigger files, please be patient # "The progress bar is moving but the remaining time is going up!" # - CollegeHumor, Matrix runs on WinXP chunk_size = 1024 * 1024 # 1 MB chunks chunk_count = int(math.ceil(float(self.size) / float(chunk_size))) chunk_hashes = [] self.file.seek(0) for _ in range(chunk_count): # Read and immediately hash the chunk portion, we just need # 32 bytes stored for each 1 MB chunk. Now that's reducing # the memory footprint! chunk_hashes.append(utils.sha256_digest( self.file.read(chunk_size))) return utils.get_tree_hash(chunk_hashes)
def calculate_tree_hash(self, part=None): """ Returns the tree hash of the archive, the entire file if the part number is not given, or just the part if so .. note:: only works with a local archive :param part: The part number if hashing just one part :type inp: integer """ # This process takes some time for bigger files, please be patient # "The progress bar is moving but the remaining time is going up!" # - CollegeHumor, Matrix runs on WinXP chunk_hashes = [] chunk_size = 1024*1024 # 1 MB chunks if part != None: chunk_count = int(math.ceil(float(self.part_size(part))/float(chunk_size))) self.file.seek(self.partsize * part) else: chunk_count = int(math.ceil(float(self.size)/float(chunk_size))) self.file.seek(0) for _ in range(chunk_count): # Read and immediately hash the chunk portion, we just need # 32 bytes stored for each 1 MB chunk. Now that's reducing # the memory footprint! chunk_hashes.append(utils.sha256_digest(self.file.read(chunk_size))) return utils.get_tree_hash(chunk_hashes)
def calculate_tree_hash(self, part=None): """ Returns the tree hash of the archive, the entire file if the part number is not given, or just the part if so .. note:: only works with a local archive :param part: The part number if hashing just one part :type inp: integer """ # This process takes some time for bigger files, please be patient # "The progress bar is moving but the remaining time is going up!" # - CollegeHumor, Matrix runs on WinXP chunk_hashes = [] chunk_size = 1024 * 1024 # 1 MB chunks if part != None: chunk_count = int( math.ceil(float(self.part_size(part)) / float(chunk_size))) self.file.seek(self.partsize * part) else: chunk_count = int(math.ceil(float(self.size) / float(chunk_size))) self.file.seek(0) for _ in range(chunk_count): # Read and immediately hash the chunk portion, we just need # 32 bytes stored for each 1 MB chunk. Now that's reducing # the memory footprint! chunk_hashes.append(utils.sha256_digest( self.file.read(chunk_size))) return utils.get_tree_hash(chunk_hashes)
def get_tree_hash(self): """ Returns the tree hash of the archive .. note:: only works with a local archive """ # This process takes some time for bigger files, please be patient # "The progress bar is moving but the remaining time is going up!" # - CollegeHumor, Matrix runs on WinXP chunk_size = 1024 * 1024 # 1 MB chunks chunk_count = int(math.ceil(float(self.size) / float(chunk_size))) chunk_hashes = [] self.file.seek(0) for _ in range(chunk_count): # Read and immediately hash the chunk portion, we just need # 32 bytes stored for each 1 MB chunk. Now that's reducing # the memory footprint! chunk_hashes.append(utils.sha256_digest(self.file.read(chunk_size))) return utils.get_tree_hash(chunk_hashes)