def _checksum_file(full_path): """@returns The hex checksum of a file given its pathname.""" inputfile = open(full_path, 'rb') try: hex_sum = utils.hash('sha1', inputfile.read()).hexdigest() finally: inputfile.close() return hex_sum
def compute_hash(base, hashes): """Compute a hash given the base string and hashes for each patch. @param base: A string representing the kernel base. @param hashes: A list of hashes, where each hash is associated with a patch of this kernel. @return A string representing the computed hash. """ key_string = ','.join([base] + hashes) return utils.hash('md5', key_string).hexdigest()
def test_get_kernel_ignores_out_of_sequence_patches(self): line = version_1.status_line(0, "GOOD", "subdir", "testname", "reason text", {"kernel": "2.6.24-rc40", "patch0": "first_patch 0 0", "patch2": "another_patch 0 0"}) kern = line.get_kernel() kernel_hash = utils.hash("md5", "2.6.24-rc40,0").hexdigest() self.assertEquals(kern.base, "2.6.24-rc40") self.assertEquals(kern.patches[0].spec, "first_patch") self.assertEquals(len(kern.patches), 1) self.assertEquals(kern.kernel_hash, kernel_hash)
def test_get_kernel_ignores_out_of_sequence_patches(self): line = version_1.status_line( 0, "GOOD", "subdir", "testname", "reason text", { "kernel": "2.6.24-rc40", "patch0": "first_patch 0 0", "patch2": "another_patch 0 0" }) kern = line.get_kernel() kernel_hash = utils.hash("md5", "2.6.24-rc40,0").hexdigest() self.assertEquals(kern.base, "2.6.24-rc40") self.assertEquals(kern.patches[0].spec, "first_patch") self.assertEquals(len(kern.patches), 1) self.assertEquals(kern.kernel_hash, kernel_hash)
def test_get_kernel_ignores_out_of_sequence_patches(self): """Tests that get_kernel ignores patches that are out of sequence.""" line = version_1.status_line(0, 'GOOD', 'subdir', 'testname', 'reason text', {'kernel': '2.6.24-rc40', 'patch0': 'first_patch 0 0', 'patch2': 'another_patch 0 0'}) kern = line.get_kernel() kernel_hash = utils.hash('md5', '2.6.24-rc40,0').hexdigest() self.assertEquals(kern.base, '2.6.24-rc40') self.assertEquals(kern.patches[0].spec, 'first_patch') self.assertEquals(len(kern.patches), 1) self.assertEquals(kern.kernel_hash, kernel_hash)
def test_get_kernel_returns_kernel_plus_patches(self): """Tests that get_kernel returns the appropriate info.""" line = version_1.status_line(0, 'GOOD', 'subdir', 'testname', 'reason text', {'kernel': '2.6.24-rc40', 'patch0': 'first_patch 0 0', 'patch1': 'another_patch 0 0'}) kern = line.get_kernel() kernel_hash = utils.hash('md5', '2.6.24-rc40,0,0').hexdigest() self.assertEquals(kern.base, '2.6.24-rc40') self.assertEquals(kern.patches[0].spec, 'first_patch') self.assertEquals(kern.patches[1].spec, 'another_patch') self.assertEquals(len(kern.patches), 2) self.assertEquals(kern.kernel_hash, kernel_hash)
def test_get_kernel_returns_kernel_plus_patches(self): line = version_1.status_line( 0, "GOOD", "subdir", "testname", "reason text", {"kernel": "2.6.24-rc40", "patch0": "first_patch 0 0", "patch1": "another_patch 0 0"}, ) kern = line.get_kernel() kernel_hash = utils.hash("md5", "2.6.24-rc40,0,0").hexdigest() self.assertEquals(kern.base, "2.6.24-rc40") self.assertEquals(kern.patches[0].spec, "first_patch") self.assertEquals(kern.patches[1].spec, "another_patch") self.assertEquals(len(kern.patches), 2) self.assertEquals(kern.kernel_hash, kernel_hash)
def hash_file(filename, size=None, method="md5"): """ Calculate the hash of filename. If size is not None, limit to first size bytes. Throw exception if something is wrong with filename. Can be also implemented with bash one-liner (assuming size%1024==0): dd if=filename bs=1024 count=size/1024 | sha1sum - @param filename: Path of the file that will have its hash calculated. @param method: Method used to calculate the hash. Supported methods: * md5 * sha1 @returns: Hash of the file, if something goes wrong, return None. """ chunksize = 4096 fsize = os.path.getsize(filename) if not size or size > fsize: size = fsize f = open(filename, 'rb') try: hash = utils.hash(method) except ValueError: logging.error("Unknown hash type %s, returning None" % method) while size > 0: if chunksize > size: chunksize = size data = f.read(chunksize) if len(data) == 0: logging.debug("Nothing left to read but size=%d" % size) break hash.update(data) size -= len(data) f.close() return hash.hexdigest()
def _compute_hash(cls, **kwargs): round1 = utils.hash('sha1', kwargs['key']).hexdigest() return utils.hash('sha1', round1 + kwargs['value']).hexdigest()
def _compute_hash(cls, **kwargs): return utils.hash('sha1', kwargs['contents']).hexdigest()
def compute_hash(base, hashes): key_string = ','.join([base] + hashes) return utils.hash('md5', key_string).hexdigest()
def fetch(self, dest_dir): """ Fetch the package from one its URLs and save it in dest_dir. If the the package already exists in dest_dir and the checksum matches this code will not fetch it again. Sets the 'verified_package' attribute with the destination pathname. @param dest_dir - The destination directory to save the local file. If it does not exist it will be created. @returns A boolean indicating if we the package is now in dest_dir. @raises FetchError - When something unexpected happens. """ if not os.path.exists(dest_dir): os.makedirs(dest_dir) local_path = os.path.join(dest_dir, self.local_filename) # If the package exists, verify its checksum and be happy if it is good. if os.path.exists(local_path): actual_hex_sum = _checksum_file(local_path) if self.hex_sum == actual_hex_sum: logging.info('Good checksum for existing %s package.', self.name) self.verified_package = local_path return True logging.warning('Bad checksum for existing %s package. ' 'Re-downloading', self.name) os.rename(local_path, local_path + '.wrong-checksum') # Download the package from one of its urls, rejecting any if the # checksum does not match. for url in self.urls: logging.info('Fetching %s', url) try: url_file = urllib2.urlopen(url) except (urllib2.URLError, EnvironmentError): logging.warning('Could not fetch %s package from %s.', self.name, url) continue data_length = int(url_file.info().get('Content-Length', _MAX_PACKAGE_SIZE)) if data_length <= 0 or data_length > _MAX_PACKAGE_SIZE: raise FetchError('%s from %s fails Content-Length %d ' 'sanity check.' % (self.name, url, data_length)) checksum = utils.hash('sha1') total_read = 0 output = open(local_path, 'wb') try: while total_read < data_length: data = url_file.read(_READ_SIZE) if not data: break output.write(data) checksum.update(data) total_read += len(data) finally: output.close() if self.hex_sum != checksum.hexdigest(): logging.warning('Bad checksum for %s fetched from %s.', self.name, url) logging.warning('Got %s', checksum.hexdigest()) logging.warning('Expected %s', self.hex_sum) os.unlink(local_path) continue logging.info('Good checksum.') self.verified_package = local_path return True else: return False
def fetch(self, dest_dir): """ Fetch the package from one its URLs and save it in dest_dir. If the the package already exists in dest_dir and the checksum matches this code will not fetch it again. Sets the 'verified_package' attribute with the destination pathname. @param dest_dir - The destination directory to save the local file. If it does not exist it will be created. @returns A boolean indicating if we the package is now in dest_dir. @raises FetchError - When something unexpected happens. """ if not os.path.exists(dest_dir): os.makedirs(dest_dir) local_path = os.path.join(dest_dir, self.local_filename) # If the package exists, verify its checksum and be happy if it is good. if os.path.exists(local_path): actual_hex_sum = _checksum_file(local_path) if self.hex_sum == actual_hex_sum: logging.info('Good checksum for existing %s package.', self.name) self.verified_package = local_path return True logging.warning( 'Bad checksum for existing %s package. ' 'Re-downloading', self.name) os.rename(local_path, local_path + '.wrong-checksum') # Download the package from one of its urls, rejecting any if the # checksum does not match. for url in self.urls: logging.info('Fetching %s', url) try: url_file = urllib2.urlopen(url) except (urllib2.URLError, EnvironmentError): logging.warning('Could not fetch %s package from %s.', self.name, url) continue data_length = int(url_file.info().get('Content-Length', _MAX_PACKAGE_SIZE)) if data_length <= 0 or data_length > _MAX_PACKAGE_SIZE: raise FetchError('%s from %s fails Content-Length %d ' 'sanity check.' % (self.name, url, data_length)) checksum = utils.hash('sha1') total_read = 0 output = open(local_path, 'wb') try: while total_read < data_length: data = url_file.read(_READ_SIZE) if not data: break output.write(data) checksum.update(data) total_read += len(data) finally: output.close() if self.hex_sum != checksum.hexdigest(): logging.warning('Bad checksum for %s fetched from %s.', self.name, url) logging.warning('Got %s', checksum.hexdigest()) logging.warning('Expected %s', self.hex_sum) os.unlink(local_path) continue logging.info('Good checksum.') self.verified_package = local_path return True else: return False