def test1(self): data = EasyAccessDict(self.version_data) assert (get_highest_version("Acme", "mac", "alpha", data, strict=True) == "4.4.2.0.5") assert (get_highest_version("Acme", "mac", "beta", data, strict=True) == "4.4.1.1.0") assert (get_highest_version("Acme", "mac", "stable", data, strict=True) == "4.4.3.2.0")
def test1(self): data = EasyAccessDict(self.version_data) assert get_highest_version('Acme', 'mac', 'alpha', data, strict=True) == '4.4.2.0.5' assert get_highest_version('Acme', 'mac', 'beta', data, strict=True) == '4.4.1.1.0' assert get_highest_version('Acme', 'mac', 'stable', data, strict=True) == '4.4.3.2.0'
def __init__(self, **kwargs): self.name = kwargs.get("name") self.channel = kwargs.get("channel") self.json_data = kwargs.get("json_data") self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get("current_version")) self.latest_version = kwargs.get("latest_version") self.update_folder = kwargs.get("update_folder") self.update_urls = kwargs.get("update_urls", []) self.verify = kwargs.get("verify", True) self.max_download_retries = kwargs.get("max_download_retries") self.headers = kwargs.get("headers") self.downloader = kwargs.get("downloader") self.http_timeout = kwargs.get("http_timeout") # Progress hooks to be called self.progress_hooks = kwargs.get("progress_hooks", []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # Used for testing. self.platform = kwargs.get("platform", _PLATFORM) self.current_filename = kwargs.get("current_filename") self.current_file_hash = kwargs.get("current_file_hash") file_info = self._get_info(self.name, self.current_version, option="file") if self.current_filename is None: self.current_filename = file_info["filename"] if self.current_file_hash is None: self.current_file_hash = file_info["file_hash"]
def __init__(self, **kwargs): self.name = kwargs.get('name') self.json_data = kwargs.get('json_data') self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get('current_version')) self.latest_version = kwargs.get('latest_version') self.update_folder = kwargs.get('update_folder') self.update_urls = kwargs.get('update_urls', []) self.verify = kwargs.get('verify', True) self.max_download_retries = kwargs.get('max_download_retries') self.urllib3_headers = kwargs.get('urllib3_headers') # Progress hooks to be called self.progress_hooks = kwargs.get('progress_hooks', []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # ToDo: Update tests with linux archives. # Used for testing. self.platform = kwargs.get('platform', _PLATFORM) self.current_filename = kwargs.get('current_filename') self.current_file_hash = kwargs.get('current_file_hash') file_info = self._get_info(self.name, self.current_version, option='file') if self.current_filename is None: self.current_filename = file_info['filename'] if self.current_file_hash is None: self.current_file_hash = file_info['file_hash']
def _update_version_file(self, json_data, package_manifest): # Adding version metadata from scanned packages to our # version manifest log.info('Adding package meta-data to version manifest') easy_dict = EasyAccessDict(json_data) for p in package_manifest: info = self._manifest_to_version_file_compat(p) version_key = '{}*{}*{}'.format(settings.UPDATES_KEY, p.name, p.version) version = easy_dict.get(version_key) log.debug('Package Info: %s', version) # If we cannot get a version number this must be the first version # of its kind. if version is None: log.debug('Adding new version to file') # First version with this package name json_data[settings.UPDATES_KEY][p.name][p.version] = {} platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, p.name, p.version, 'platform') platform = easy_dict.get(platform_key) if platform is None: _name = json_data[settings.UPDATES_KEY][p.name] _name[p.version][p.platform] = info else: # package already present, adding another version to it log.debug('Appending info data to version file') _updates = json_data[settings.UPDATES_KEY] _updates[p.name][p.version][p.platform] = info # Add each package to latests section separated by release channel json_data['latest'][p.name][p.channel][p.platform] = p.version return json_data
def _update_version_file(json_data, package_manifest): # Adding version metadata from scanned packages to our # version manifest log.info('Adding package meta-data to version manifest') easy_dict = EasyAccessDict(json_data) for p in package_manifest: info = PackageHandler._manifest_to_version_file_compat(p) version_key = '{}*{}*{}'.format(settings.UPDATES_KEY, p.name, p.version) version = easy_dict.get(version_key) log.debug('Package Info: %s', version) # If we cannot get a version number this must be the first version # of its kind. if version is None: log.debug('Adding new version to file') # First version with this package name json_data[settings.UPDATES_KEY][p.name][p.version] = {} platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, p.name, p.version, 'platform') platform = easy_dict.get(platform_key) if platform is None: _name = json_data[settings.UPDATES_KEY][p.name] _name[p.version][p.platform] = info else: # package already present, adding another version to it log.debug('Appending info data to version file') _updates = json_data[settings.UPDATES_KEY] _updates[p.name][p.version][p.platform] = info # Add each package to latest section separated by release channel json_data['latest'][p.name][p.channel][p.platform] = p.version return json_data
def __init__(self, **kwargs): self.name = kwargs.get('name') self.json_data = kwargs.get('json_data') self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get('current_version')) self.latest_version = kwargs.get('latest_version') self.update_folder = kwargs.get('update_folder') self.update_urls = kwargs.get('update_urls', []) self.verify = kwargs.get('verify', True) self.max_download_retries = kwargs.get('max_download_retries') self.urllib3_headers = kwargs.get('urllib3_headers') # Progress hooks to be called self.progress_hooks = kwargs.get('progress_hooks', []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # Used for testing. self.platform = kwargs.get('platform', _PLATFORM) self.current_filename = kwargs.get('current_filename') self.current_file_hash = kwargs.get('current_file_hash') file_info = self._get_info(self.name, self.current_version, option='file') if self.current_filename is None: self.current_filename = file_info['filename'] if self.current_file_hash is None: self.current_file_hash = file_info['file_hash']
class Patcher(object): """Downloads, verifies, and patches binaries Kwargs: name (str): Name of binary to patch json_data (dict): Info dict with all package meta data current_version (str): Version number of currently installed binary latest_version (str): Newest version available update_folder (str): Path to update folder to place updated binary in update_urls (list): List of urls to use for file download verify (bool): True: Verify https connection False: Don't verify https connection max_download_retries (int): Number of times to retry a download urllib3_headers (dict): Headers to be used with http request """ def __init__(self, **kwargs): self.name = kwargs.get('name') self.json_data = kwargs.get('json_data') self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get('current_version')) self.latest_version = kwargs.get('latest_version') self.update_folder = kwargs.get('update_folder') self.update_urls = kwargs.get('update_urls', []) self.verify = kwargs.get('verify', True) self.max_download_retries = kwargs.get('max_download_retries') self.urllib3_headers = kwargs.get('urllib3_headers') # Progress hooks to be called self.progress_hooks = kwargs.get('progress_hooks', []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # Used for testing. self.platform = kwargs.get('platform', _PLATFORM) self.current_filename = kwargs.get('current_filename') self.current_file_hash = kwargs.get('current_file_hash') file_info = self._get_info(self.name, self.current_version, option='file') if self.current_filename is None: self.current_filename = file_info['filename'] if self.current_file_hash is None: self.current_file_hash = file_info['file_hash'] def start(self): """Starts patching process""" log.debug('Starting patch updater...') # Check hash on installed binary to begin patching binary_check = self._verify_installed_binary() if not binary_check: log.debug('Binary check failed...') return False # Getting all required patch meta-data all_patches = self._get_patch_info() if all_patches is False: log.debug('Cannot find all patches...') return False # Download and verify patches in 1 go download_check = self._download_verify_patches() if download_check is False: log.debug('Patch check failed...') return False try: self._apply_patches_in_memory() except PatcherError: log.debug('Failed to apply patches in memory') return False else: try: self._write_update_to_disk() except PatcherError as err: log.debug(err, exc_info=True) return False # Looks like all is well return True def _verify_installed_binary(self): # Verifies latest downloaded archive against known hash log.debug('Checking for current installed binary to patch') status = True with ChDir(self.update_folder): if not os.path.exists(self.current_filename): log.debug('Cannot find archive to patch') status = False else: installed_file_hash = get_package_hashes(self.current_filename) if self.current_file_hash != installed_file_hash: log.debug('Binary hash mismatch') status = False else: # Read binary into memory to begin patching try: with open(self.current_filename, 'rb') as f: self.og_binary = f.read() except FileNotFoundError: status = False log.debug("Current archive missing") except Exception as err: status = False log.debug(err, exc_info=True) if status: log.debug('Binary found and verified') return status # We will take all versions. Then append any version # that is greater then the current version to the list # of needed patches. def _get_patch_info(self): # Taking the list of needed patches and extracting the # patch data from it. If any loop fails, will return False # and start full binary update. log.debug('Getting patch meta-data') required_patches = self._get_required_patches(self.name) if len(required_patches) == 0: log.debug('No patches to process') return False # If we can't get the file size for all patches & the latest # full update we fall back to the old patch update limit of 4 # We will only patch update if the total size of all needed # patches are less than the size of a full update fall_back = False total_patch_size = 0 # Loop through all required patches and get file name, hash # and file size. for p in required_patches: info = {} platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, self.name, str(p), self.platform) platform_info = self.star_access_update_data.get(platform_key) try: info['patch_name'] = platform_info['patch_name'] info['patch_urls'] = self.update_urls info['patch_hash'] = platform_info['patch_hash'] patch_size = platform_info.get('patch_size') if patch_size is None: # Since we are missing the patch size we cannot # compare the total size of all patches to the size # of a full update. Used for backwards compatibility # before we added patch size to version manifest. fall_back = True else: try: total_patch_size += int(patch_size) except Exception as err: log.debug(err, exc_info=True) fall_back = True self.patch_data.append(info) except Exception as err: # pragma: no cover # Missing some required patch data log.debug(err, exc_info=True) return False latest_info = self._get_info(self.name, self.latest_version, option='file') latest_file_size = latest_info.get('file_size') if latest_file_size is None: # Since we are missing the full update size we cannot # compare the total size of all patches to the full update. fall_back = True if fall_back is True: if len(required_patches) > 4: return False else: return True else: return Patcher._calc_diff(total_patch_size, latest_file_size) @staticmethod def _calc_diff(patch_size, file_size): if patch_size < file_size: return True else: return False def _get_required_patches(self, name): # Gathers patch name, hash & URL needed_patches = [] try: # Get list of Version objects initialized with keys # from update manifest version_key = '{}*{}'.format(settings.UPDATES_KEY, name) version_info = self.star_access_update_data(version_key) versions = map(Version, version_info.keys()) except KeyError: # pragma: no cover log.debug('No updates found in updates dict') # Will cause error to be thrown in _get_patch_info # which will cause patch update to return False versions = [1] # Only stable packages have patch info versions = [v for v in versions if v.channel == 'stable'] log.debug('Getting required patches') for i in versions: if i > self.current_version: needed_patches.append(i) # Used to guarantee patches are only added once needed_patches = list(set(needed_patches)) # Ensuring we apply patches in correct order return sorted(needed_patches) def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): fd = FileDownloader(p['patch_name'], p['patch_urls'], hexdigest=p['patch_hash'], verify=self.verify, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = '{0:.1f}'.format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'downloading'} self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'failed to download all patches'} self._call_progress_hooks(status) return False status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'finished'} self._call_progress_hooks(status) return True def _call_progress_hooks(self, data): for ph in self.progress_hooks: try: ph(data) except Exception as err: log.debug('Exception in callback: %s', ph.__name__) log.debug(err, exc_info=True) def _apply_patches_in_memory(self): # Applies a sequence of patches in memory log.debug('Applying patches') for i in self.patch_binary_data: try: self.og_binary = bsdiff4.patch(self.og_binary, i) log.debug('Applied patch successfully') except Exception as err: log.debug(err, exc_info=True) raise PatcherError('Patch failed to apply') def _write_update_to_disk(self): # pragma: no cover # Writes updated binary to disk log.debug('Writing update to disk') filename_key = '{}*{}*{}*{}*{}'.format(settings.UPDATES_KEY, self.name, self.latest_version, self.platform, 'filename') filename = self.star_access_update_data.get(filename_key) if filename is None: raise PatcherError('Filename missing in version file') with ChDir(self.update_folder): try: with open(filename, 'wb') as f: f.write(self.og_binary) log.debug('Wrote update file') except IOError: # Removes file if it got created if os.path.exists(filename): remove_any(filename) log.debug('Failed to open file for writing') raise PatcherError('Failed to open file for writing') else: file_info = self._get_info(self.name, self.latest_version, option='file') new_file_hash = file_info['file_hash'] log.debug('checking file hash match') if new_file_hash != get_package_hashes(filename): log.debug('Version file hash: %s', new_file_hash) log.debug('Actual file hash: %s', get_package_hashes(filename)) log.debug('File hash does not match') remove_any(filename) raise PatcherError('Bad hash on patched file', expected=True) def _get_info(self, name, version, option='file'): if option == 'file': _name = 'filename' _hash = 'file_hash' _size = 'file_size' else: _name = 'patch_name' _hash = 'patch_hash' _size = 'patch_size' # Returns filename and hash for given name and version platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, name, version, self.platform) platform_info = self.star_access_update_data.get(platform_key) info = {} if platform_info is not None: filename = platform_info.get(_name) log.debug('Current Info - Filename: %s', filename) file_hash = platform_info.get(_hash, '') log.debug('Current Info - File hash: %s', file_hash) file_size = platform_info.get(_size) log.debug('Current Info - File size: %s', file_size) _info = dict(filename=filename, file_hash=file_hash, file_size=file_size) info.update(_info) return info
def test1(self): data = EasyAccessDict(self.version_data) assert get_highest_version('Acme', 'mac', 'stable', data, strict=True) is None
def test1(self): data = EasyAccessDict(self.version_data) assert get_highest_version('Acme', 'mac', 'alpha', data, strict=False) == '4.4.3.2.0'
class Patcher(object): """Downloads, verifies, and patches binaries Kwargs: name (str): Name of binary to patch json_data (dict): Info dict with all package meta data current_version (str): Version number of currently installed binary latest_version (str): Newest version available update_folder (str): Path to update folder to place updated binary in update_urls (list): List of urls to use for file download verify (bool): True: Verify https connection False: Don't verify https connection max_download_retries (int): Number of times to retry a download urllib3_headers (dict): Headers to be used with http request """ def __init__(self, **kwargs): self.name = kwargs.get('name') self.json_data = kwargs.get('json_data') self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get('current_version')) self.latest_version = kwargs.get('latest_version') self.update_folder = kwargs.get('update_folder') self.update_urls = kwargs.get('update_urls', []) self.verify = kwargs.get('verify', True) self.max_download_retries = kwargs.get('max_download_retries') self.urllib3_headers = kwargs.get('urllib3_headers') # Progress hooks to be called self.progress_hooks = kwargs.get('progress_hooks', []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # ToDo: Update tests with linux archives. # Used for testing. self.platform = kwargs.get('platform', _PLATFORM) self.current_filename = kwargs.get('current_filename') self.current_file_hash = kwargs.get('current_file_hash') file_info = self._get_info(self.name, self.current_version, option='file') if self.current_filename is None: self.current_filename = file_info['filename'] if self.current_file_hash is None: self.current_file_hash = file_info['file_hash'] def start(self): """Starts patching process""" log.debug('Starting patch updater...') # Check hash on installed binary to begin patching binary_check = self._verify_installed_binary() if not binary_check: log.debug('Binary check failed...') return False # Getting all required patch meta-data all_patches = self._get_patch_info() if all_patches is False: log.debug('Cannot find all patches...') return False # Download and verify patches in 1 go download_check = self._download_verify_patches() if download_check is False: log.debug('Patch check failed...') return False try: self._apply_patches_in_memory() except PatcherError: log.debug('Failed to apply patches in memory') return False else: try: self._write_update_to_disk() except PatcherError as err: log.debug(err, exc_info=True) return False # Looks like all is well return True def _verify_installed_binary(self): # Verifies latest downloaded archive against known hash log.debug('Checking for current installed binary to patch') status = True with ChDir(self.update_folder): if not os.path.exists(self.current_filename): log.debug('Cannot find archive to patch') status = False else: installed_file_hash = get_package_hashes(self.current_filename) if self.current_file_hash != installed_file_hash: log.debug('Binary hash mismatch') status = False else: # Read binary into memory to begin patching try: with open(self.current_filename, 'rb') as f: self.og_binary = f.read() except FileNotFoundError: status = False log.debug("Current archive missing") except Exception as err: status = False log.debug(err, exc_info=True) if status: log.debug('Binary found and verified') return status # We will take all versions. Then append any version # that is greater then the current version to the list # of needed patches. def _get_patch_info(self): # Taking the list of needed patches and extracting the # patch data from it. If any loop fails, will return False # and start full binary update. log.debug('Getting patch meta-data') required_patches = self._get_required_patches(self.name) if len(required_patches) == 0: log.debug('No patches to process') return False # If we can't get the file size for all patches & the latest # full update we fall back to the old patch update limit of 4 # We will only patch update if the total size of all needed # patches are less than the size of a full update fall_back = False total_patch_size = 0 # Loop through all required patches and get file name, hash # and file size. for p in required_patches: info = {} platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, self.name, str(p), self.platform) platform_info = self.star_access_update_data.get(platform_key) try: info['patch_name'] = platform_info['patch_name'] info['patch_urls'] = self.update_urls info['patch_hash'] = platform_info['patch_hash'] patch_size = platform_info.get('patch_size') if patch_size is None: # Since we are missing the patch size we cannot # compare the total size of all patches to the size # of a full update. Used for backwards compatibility # before we added patch size to version manifest. fall_back = True else: try: total_patch_size += int(patch_size) except Exception as err: log.debug(err, exc_info=True) fall_back = True self.patch_data.append(info) except Exception as err: # pragma: no cover # Missing some required patch data log.debug(err, exc_info=True) return False latest_info = self._get_info(self.name, self.latest_version, option='file') latest_file_size = latest_info.get('file_size') if latest_file_size is None: # Since we are missing the full update size we cannot # compare the total size of all patches to the full update. fall_back = True if fall_back is True: if len(required_patches) > 4: return False else: return True else: return Patcher._calc_diff(total_patch_size, latest_file_size) @staticmethod def _calc_diff(patch_size, file_size): if patch_size < file_size: return True else: return False def _get_required_patches(self, name): # Gathers patch name, hash & URL needed_patches = [] try: # Get list of Version objects initialized with keys # from update manifest version_key = '{}*{}'.format(settings.UPDATES_KEY, name) version_info = self.star_access_update_data(version_key) versions = map(Version, version_info.keys()) except KeyError: # pragma: no cover log.debug('No updates found in updates dict') # Will cause error to be thrown in _get_patch_info # which will cause patch update to return False versions = [1] # Only stable packages have patch info versions = [v for v in versions if v.channel == 'stable'] log.debug('Getting required patches') for i in versions: if i > self.current_version: needed_patches.append(i) # Used to guarantee patches are only added once needed_patches = list(set(needed_patches)) # Ensuring we apply patches in correct order return sorted(needed_patches) def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): fd = FileDownloader( p['patch_name'], p['patch_urls'], hexdigest=p['patch_hash'], verify=self.verify, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = '{0:.1f}'.format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'downloading' } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'failed to download all patches' } self._call_progress_hooks(status) return False status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'finished' } self._call_progress_hooks(status) return True def _call_progress_hooks(self, data): for ph in self.progress_hooks: try: ph(data) except Exception as err: log.debug('Exception in callback: %s', ph.__name__) log.debug(err, exc_info=True) def _apply_patches_in_memory(self): # Applies a sequence of patches in memory log.debug('Applying patches') for i in self.patch_binary_data: try: self.og_binary = bsdiff4.patch(self.og_binary, i) log.debug('Applied patch successfully') except Exception as err: log.debug(err, exc_info=True) raise PatcherError('Patch failed to apply') def _write_update_to_disk(self): # pragma: no cover # Writes updated binary to disk log.debug('Writing update to disk') filename_key = '{}*{}*{}*{}*{}'.format(settings.UPDATES_KEY, self.name, self.latest_version, self.platform, 'filename') filename = self.star_access_update_data.get(filename_key) if filename is None: raise PatcherError('Filename missing in version file') with ChDir(self.update_folder): try: with open(filename, 'wb') as f: f.write(self.og_binary) log.debug('Wrote update file') except IOError: # Removes file if it got created if os.path.exists(filename): remove_any(filename) log.debug('Failed to open file for writing') raise PatcherError('Failed to open file for writing') else: file_info = self._get_info(self.name, self.latest_version, option='file') new_file_hash = file_info['file_hash'] log.debug('checking file hash match') if new_file_hash != get_package_hashes(filename): log.debug('Version file hash: %s', new_file_hash) log.debug('Actual file hash: %s', get_package_hashes(filename)) log.debug('File hash does not match') remove_any(filename) raise PatcherError('Bad hash on patched file', expected=True) def _get_info(self, name, version, option='file'): if option == 'file': _name = 'filename' _hash = 'file_hash' _size = 'file_size' else: _name = 'patch_name' _hash = 'patch_hash' _size = 'patch_size' # Returns filename and hash for given name and version platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY, name, version, self.platform) platform_info = self.star_access_update_data.get(platform_key) info = {} if platform_info is not None: filename = platform_info.get(_name) log.debug('Current Info - Filename: %s', filename) file_hash = platform_info.get(_hash, '') log.debug('Current Info - File hash: %s', file_hash) file_size = platform_info.get(_size) log.debug('Current Info - File size: %s', file_size) _info = dict(filename=filename, file_hash=file_hash, file_size=file_size) info.update(_info) return info
def test_easy_access(self): key = 'carson*da*park' data = {'carson': {'da': {'park': 'mills'}}} easy_data = EasyAccessDict(data) assert 'mills' == easy_data.get(key)
class Patcher(object): """Downloads, verifies, and patches binaries Kwargs: name (str): Name of binary to patch json_data (dict): Info dict with all package meta data current_version (str): Version number of currently installed binary latest_version (str): Newest version available update_folder (str): Path to update folder to place updated binary in update_urls (list): List of urls to use for file download verify (bool): True: Verify https connection False: Don't verify https connection max_download_retries (int): Number of times to retry a download headers (dict): Headers to be used with http request. Accepts urllib3 and generic headers. http_timeout (int): HTTP timeout or None """ def __init__(self, **kwargs): self.name = kwargs.get("name") self.channel = kwargs.get("channel") self.json_data = kwargs.get("json_data") self.star_access_update_data = EasyAccessDict(self.json_data) self.current_version = Version(kwargs.get("current_version")) self.latest_version = kwargs.get("latest_version") self.update_folder = kwargs.get("update_folder") self.update_urls = kwargs.get("update_urls", []) self.verify = kwargs.get("verify", True) self.max_download_retries = kwargs.get("max_download_retries") self.headers = kwargs.get("headers") self.downloader = kwargs.get("downloader") self.http_timeout = kwargs.get("http_timeout") # Progress hooks to be called self.progress_hooks = kwargs.get("progress_hooks", []) # List of dicts with urls, filename & hash of each patch self.patch_data = [] # List of binary blobs of patch data self.patch_binary_data = [] # binary blob of original archive to patch self.og_binary = None # Used for testing. self.platform = kwargs.get("platform", _PLATFORM) self.current_filename = kwargs.get("current_filename") self.current_file_hash = kwargs.get("current_file_hash") file_info = self._get_info(self.name, self.current_version, option="file") if self.current_filename is None: self.current_filename = file_info["filename"] if self.current_file_hash is None: self.current_file_hash = file_info["file_hash"] def start(self): """Starts patching process""" log.debug("Starting patch updater...") # Check hash on installed binary to begin patching if self._verify_installed_binary() is False: log.debug("Binary check failed...") return False # Getting all required patch meta-data if self._get_patch_info() is False: log.debug("Cannot find all patches...") return False # Download and verify patches in 1 go if self._download_verify_patches() is False: log.debug("Patch check failed...") return False try: self._apply_patches_in_memory() except PatcherError: log.debug("Failed to apply patches in memory") return False else: try: self._write_update_to_disk() except PatcherError as err: log.debug(err, exc_info=True) return False # Looks like all is well return True def _verify_installed_binary(self): # Verifies latest downloaded archive against known hash log.debug("Checking for current installed binary to patch") status = True with ChDir(self.update_folder): if not os.path.exists(self.current_filename): log.debug("Cannot find archive to patch") status = False else: installed_file_hash = get_package_hashes(self.current_filename) if self.current_file_hash != installed_file_hash: log.debug("Binary hash mismatch") status = False else: # Read binary into memory to begin patching try: file_path = os.path.join(self.update_folder, self.current_filename) with open(file_path, "rb") as f: self.og_binary = f.read() except FileNotFoundError: status = False log.debug("Current archive missing: %s", self.current_filename) log.debug("%s", ' '.join(os.listdir(os.getcwd()))) except Exception as err: status = False log.debug(err, exc_info=True) if status: log.debug("Binary found and verified") return status # We will take all versions. Then append any version # that is greater then the current version to the list # of needed patches. def _get_patch_info(self): # Taking the list of needed patches and extracting the # patch data from it. If any loop fails, will return False # and start full binary update. log.debug("Getting patch meta-data") required_patches = self._get_required_patches(self.name) if len(required_patches) == 0: log.debug("No patches to process") return False # If we can't get the file size for all patches & the latest # full update we fall back to the old patch update limit of 4 # We will only patch update if the total size of all needed # patches are less than the size of a full update fall_back = False total_patch_size = 0 # Loop through all required patches and get file name, hash # and file size. for p in required_patches: info = {} platform_key = "{}*{}*{}*{}".format(settings.UPDATES_KEY, self.name, str(p), self.platform) platform_info = self.star_access_update_data.get(platform_key) try: info["patch_name"] = platform_info["patch_name"] info["patch_urls"] = self.update_urls info["patch_hash"] = platform_info["patch_hash"] patch_size = platform_info.get("patch_size") if patch_size is None: # Since we are missing the patch size we cannot # compare the total size of all patches to the size # of a full update. Used for backwards compatibility # before we added patch size to version manifest. fall_back = True else: try: total_patch_size += int(patch_size) except Exception as err: log.debug(err, exc_info=True) fall_back = True self.patch_data.append(info) except Exception as err: # pragma: no cover # Missing some required patch data log.debug(err, exc_info=True) return False latest_info = self._get_info(self.name, self.latest_version, option="file") latest_file_size = latest_info.get("file_size") if latest_file_size is None: # Since we are missing the full update size we cannot # compare the total size of all patches to the full update. fall_back = True if fall_back is True: if len(required_patches) > 4: return False else: return True else: return Patcher._calc_diff(total_patch_size, latest_file_size) @staticmethod def _calc_diff(patch_size, file_size): if patch_size < file_size: return True else: return False def _get_required_patches(self, name): # Gathers patch name, hash & URL needed_patches = [] try: # Get list of Version objects initialized with keys # from update manifest version_key = "{}*{}".format(settings.UPDATES_KEY, name) version_info = self.star_access_update_data(version_key) versions = map(Version, version_info.keys()) except KeyError: # pragma: no cover log.debug("No updates found in updates dict") # Will cause error to be thrown in _get_patch_info # which will cause patch update to return False versions = [1] # We only care about the current channel versions = [v for v in versions if v.channel == self.channel] log.debug("Getting required patches") for i in versions: if i > self.current_version: needed_patches.append(i) # Used to guarantee patches are only added once needed_patches = list(set(needed_patches)) # Ensuring we apply patches in correct order return sorted(needed_patches) def _download_verify_patches(self): # Downloads & verifies all patches log.debug("Downloading patches") downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): if self.downloader: fd = self.downloader(p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"]) else: fd = FileDownloader( p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"], verify=self.verify, max_download_retries=self.max_download_retries, headers=self.headers, http_timeout=self.http_timeout) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = "{0:.1f}".format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "downloading", } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "failed to download all patches", } self._call_progress_hooks(status) return False status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "finished", } self._call_progress_hooks(status) return True def _call_progress_hooks(self, data): for ph in self.progress_hooks: try: ph(data) except Exception as err: log.debug("Exception in callback: %s", ph.__name__) log.debug(err, exc_info=True) def _apply_patches_in_memory(self): # Applies a sequence of patches in memory log.debug("Applying patches") for i in self.patch_binary_data: try: self.og_binary = bsdiff4.patch(self.og_binary, i) log.debug("Applied patch successfully") except Exception as err: log.debug(err, exc_info=True) raise PatcherError("Patch failed to apply") def _write_update_to_disk(self): # pragma: no cover # Writes updated binary to disk log.debug("Writing update to disk") filename_key = "{}*{}*{}*{}*{}".format( settings.UPDATES_KEY, self.name, self.latest_version, self.platform, "filename", ) filename = self.star_access_update_data.get(filename_key) if filename is None: raise PatcherError("Filename missing in version file") with ChDir(self.update_folder): try: with open(filename, "wb") as f: f.write(self.og_binary) log.debug("Wrote update file") except IOError: # Removes file if it got created if os.path.exists(filename): remove_any(filename) log.debug("Failed to open file for writing") raise PatcherError("Failed to open file for writing") else: file_info = self._get_info(self.name, self.latest_version, option="file") new_file_hash = file_info["file_hash"] log.debug("checking file hash match") if new_file_hash != get_package_hashes(filename): log.debug("Version file hash: %s", new_file_hash) log.debug("Actual file hash: %s", get_package_hashes(filename)) log.debug("File hash does not match") remove_any(filename) raise PatcherError("Bad hash on patched file", expected=True) def _get_info(self, name, version, option="file"): if option == "file": _name = "filename" _hash = "file_hash" _size = "file_size" else: _name = "patch_name" _hash = "patch_hash" _size = "patch_size" # Returns filename and hash for given name and version platform_key = "{}*{}*{}*{}".format(settings.UPDATES_KEY, name, version, self.platform) platform_info = self.star_access_update_data.get(platform_key) info = {} if platform_info is not None: filename = platform_info.get(_name) log.debug("Current Info - Filename: %s", filename) file_hash = platform_info.get(_hash, "") log.debug("Current Info - File hash: %s", file_hash) file_size = platform_info.get(_size) log.debug("Current Info - File size: %s", file_size) _info = dict(filename=filename, file_hash=file_hash, file_size=file_size) info.update(_info) return info
def test1(self): data = EasyAccessDict(self.version_data) assert get_highest_version("Acme", "mac", "stable", data, strict=True) is None
def test1(self): data = EasyAccessDict(self.version_data) assert (get_highest_version("Acme", "mac", "alpha", data, strict=False) == "4.4.3.2.0")