def _local_checksum(self, filename): if self.compressed: tmp_uncompressed_file = temp_file.make_temp_file() compressed_file.uncompress(filename, tmp_uncompressed_file) result = file_util.checksum('sha256', tmp_uncompressed_file) file_util.remove(tmp_uncompressed_file) else: result = file_util.checksum('sha256', filename)
def test_file_first_time(self): tmp_dir = self.make_temp_dir() db = file_checksum_db(tmp_dir) tmp_file = temp_file.make_temp_file(suffix='.txt', content='this is foo\n') self.assertEqual(0, db.count) self.assertEqual(file_util.checksum('sha256', tmp_file), db.checksum('sha256', tmp_file)) self.assertEqual(1, db.count) self.assertEqual(file_util.checksum('sha256', tmp_file), db.checksum('sha256', tmp_file)) self.assertEqual(1, db.count)
def _save(self): if path.exists(self.filename): old_checksum = file_util.checksum('sha256', self.filename) else: old_checksum = None tmp_file = temp_file.make_temp_file() self._properties.save(tmp_file, self._formatter) new_checksum = file_util.checksum('sha256', tmp_file) if old_checksum == new_checksum: return False if self._backup and not file_util.is_empty(self.filename): file_util.backup(self.filename) file_util.copy(tmp_file, self.filename) return True
def _downlod_url(clazz, url, debug=False): if not url_util.exists(url): raise python_installer_error( 'No python.org package found: "{}"'.format(url)) tmp_dir = temp_file.make_temp_dir(suffix='-python-download', delete=not debug) basename = path.basename(url) tmp_package = path.join(tmp_dir, basename) url_util.download_to_file(url, tmp_package) if debug: print('tmp python package download: {}'.format(tmp_package)) expected_checksum = clazz._fetch_checksum(url) if not expected_checksum: raise python_installer_error( 'Failed to determine checksum for: {}'.format(url)) actual_checksum = file_util.checksum('md5', tmp_package) if expected_checksum != actual_checksum: msg = ''' CHECKSUM MISMATCH: url={url} CHECKSUM MISMATCH: expected={expected} CHECKSUM MISMATCH: actual={actual} CHECKSUM MISMATCH: filename={filename} CHECKSUM MISMATCH: run with --debug to keep and debug the download '''.format(url=url, expected=expected_checksum, actual=actual_checksum, filename=tmp_package) raise python_installer_error(msg) return tmp_package
def filename(self): build_script_checksum = file_util.checksum('sha256', self.build_script)[0:32] basename = '{}-{}-{}-{}.tar.gz'.format(self.name, self.version, self.system_name, self.system_version) return path.join(self.build_dir, 'artifacts', self.name, build_script_checksum, basename)
def test_file_changed(self): tmp_dir = self.make_temp_dir() db = file_checksum_db(tmp_dir) tmp_file = temp_file.make_temp_file(suffix='.txt', content='this is foo\n') self.assertEqual(0, db.count) self.assertEqual(file_util.checksum('sha256', tmp_file), db.checksum('sha256', tmp_file)) self.assertEqual(1, db.count) time.sleep(0.100) # need to sleep to let the mtime change with open(tmp_file, 'a') as fout: fout.write('changed') fout.flush() self.assertEqual(file_util.checksum('sha256', tmp_file), db.checksum('sha256', tmp_file)) self.assertEqual(2, db.count) self.assertEqual(file_util.checksum('sha256', tmp_file), db.checksum('sha256', tmp_file)) self.assertEqual(2, db.count)
def test_split_file_basic(self): NUM_ITEMS = 10 CONTENT_SIZE = 1024 * 100 items = [] for i in range(0, NUM_ITEMS): arcname = 'item{}.txt'.format(i) item = temp_archive.item(arcname, content = self._make_content(CONTENT_SIZE)) items.append(item) tmp_archive = temp_archive.make_temp_archive(items, 'zip') files = file_split.split_file(tmp_archive, int(math.floor(file_util.size(tmp_archive) / 1))) unsplit_tmp_archive = self.make_temp_file() file_split.unsplit_files(unsplit_tmp_archive, files) self.assertEqual( file_util.checksum('sha256', tmp_archive), file_util.checksum('sha256', unsplit_tmp_archive) ) file_util.remove(files) files = file_split.split_file(tmp_archive, int(math.floor(file_util.size(tmp_archive) / 2))) unsplit_tmp_archive = self.make_temp_file() file_split.unsplit_files(unsplit_tmp_archive, files) self.assertEqual( file_util.checksum('sha256', tmp_archive), file_util.checksum('sha256', unsplit_tmp_archive) ) file_util.remove(files) files = file_split.split_file(tmp_archive, int(math.floor(file_util.size(tmp_archive) / 3))) unsplit_tmp_archive = self.make_temp_file() file_split.unsplit_files(unsplit_tmp_archive, files) self.assertEqual( file_util.checksum('sha256', tmp_archive), file_util.checksum('sha256', unsplit_tmp_archive) ) file_util.remove(files) files = file_split.split_file(tmp_archive, int(math.floor(file_util.size(tmp_archive) / 4))) unsplit_tmp_archive = self.make_temp_file() file_split.unsplit_files(unsplit_tmp_archive, files) self.assertEqual( file_util.checksum('sha256', tmp_archive), file_util.checksum('sha256', unsplit_tmp_archive) ) file_util.remove(files) files = file_split.split_file(tmp_archive, int(math.floor(file_util.size(tmp_archive) / 5))) unsplit_tmp_archive = self.make_temp_file() file_split.unsplit_files(unsplit_tmp_archive, files) self.assertEqual( file_util.checksum('sha256', tmp_archive), file_util.checksum('sha256', unsplit_tmp_archive) ) file_util.remove(files)
def member_checksums(clazz, archive, members, debug=False): 'Return a dict of checksums for the given members in archive.' members = object_util.listify(members) tmp_dir = archiver.extract_all_temp_dir(archive, delete=not debug) if debug: print('tmp_dir: {}'.format(tmp_dir)) result = {} for member in members: assert not member in result p = path.join(tmp_dir, member) if not path.exists(p): raise IOError('member not found: {}'.format(member)) if not path.isfile(p): raise IOError('member is not a file: {}'.format(member)) result[member] = file_util.checksum('sha256', path.join(tmp_dir, member)) return result
def _install_one_requirements_file(self, requirements_file): 'Install packages from a requirements file' new_checksum = file_util.checksum('sha256', requirements_file) checksum_file = self._requirements_checksum_file(requirements_file) if path.exists(checksum_file): old_checksum = file_util.read(checksum_file, codec = 'utf-8').strip() if old_checksum == new_checksum: self._log.log_d(f'{requirements_file}: Old and new checksum are the same') return else: self._log.log_d(f'{requirements_file}: Checksum changed') args = [ 'install', '-r', requirements_file, ] rv = self.call_pip(args, raise_error = False) if rv.exit_code != 0: msg = 'Failed to install requirements: "{}"\n{}\n'.format(requirements_file, rv.stdout) self._log.log_w('install: {}'.format(msg)) raise pip_error(msg) self._log.log_d(f'{requirements_file}: Saving new checksum {new_checksum} to {checksum_file}') file_util.save(checksum_file, content = new_checksum)
def __init__(self, archive, member): super(archive_member_cache_item, self).__init__() self._archive = path.abspath(path.normpath(archive)) self._member = member self._checksum = file_util.checksum('sha256', self._archive)
def file_checksum(self, filename): return file_util.checksum('sha256', self.file_path(filename))
def get_url(self, url, checksum=None, cookies=None, debug=False, auth=None, uncompress=True): 'Return the local filesystem path to the tarball with address and revision.' self.log.log_d('get_url: url=%s; checksum=%s; cookies=%s' % (url, checksum, cookies)) local_cached_path = self._local_path_for_url(url) local_cached_path_rel = path.relpath(local_cached_path) self.log.log_d('get_url: local_cached_path=%s' % (local_cached_path_rel)) if checksum: if path.exists(local_cached_path): if self._local_checksum(local_cached_path) == checksum: self.log.log_d( 'get_url: found in cache with good checksum. using: %s' % (local_cached_path_rel)) result = self._uncompress_if_needed( local_cached_path, uncompress) self.log.log_d('get_url: 1 result={}'.format(result)) return result else: self.log.log_w( 'get_url: found in cache with BAD checksum. removing: %s' % (local_cached_path_rel)) file_util.remove(local_cached_path) else: if path.exists(local_cached_path): self.log.log_d('get_url: found in cache. using: %s' % (local_cached_path_rel)) result = self._uncompress_if_needed(local_cached_path, uncompress) self.log.log_d('get_url: 2 result={}'.format(result)) return result tmp = self._download_to_tmp_file(url, cookies=cookies, debug=debug, auth=auth) self.download_count += 1 self.log.log_d('get_url: downloaded url to %s' % (tmp)) if not tmp: self.log.log_d('get_url: failed to download: %s' % (url)) self.log.log_d('get_url: 3 result={}'.format(None)) return None if not checksum: if self.compressed: compressed_file.compress(tmp, local_cached_path) if uncompress: result = tmp else: result = local_cached_path self.log.log_d('get_url: 4 result={}'.format(result)) return result else: file_util.rename(tmp, local_cached_path) self.log.log_d( 'get_url: 5 result={}'.format(local_cached_path)) return local_cached_path actual_checksum = file_util.checksum('sha256', tmp) if actual_checksum == checksum: self.log.log_d( 'get_url: download succesful and checksum is good. using: %s' % (local_cached_path_rel)) if self.compressed: compressed_file.compress(tmp, local_cached_path) if uncompress: result = tmp else: result = local_cached_path self.log.log_d('get_url: 6 result={}'.format(result)) return result else: file_util.rename(tmp, local_cached_path) self.log.log_d( 'get_url: 7 result={}'.format(local_cached_path)) return local_cached_path else: self.log.log_e( 'get_url: download worked but checksum was WRONG: {}'.format( url)) self.log.log_e('get_url: cookies: %s' % (cookies)) self.log.log_e('get_url: expected: %s' % (checksum)) self.log.log_e('get_url: actual: %s' % (actual_checksum)) #self.log.log_e('content:\n{}\n'.format(file_util.read(tmp, codec = 'utf8'))) self.log.log_d('get_url: 8 result={}'.format(None)) return None
def member_checksum(clazz, archive, member): tmp_file = clazz.extract_member_to_temp_file(archive, member) chk = file_util.checksum('sha256', tmp_file) file_util.remove(tmp_file) return chk