def test_execution_one_file_restart( self, cleandir, shared_datadir, simpleserver, pyu, custom_dir, port, windowed, split_version, ): data_dir = shared_datadir / "update_repo_restart" pyu.setup() # We are moving all of the files from the deploy directory to the # cwd. We will start a simple http server to use for updates with ChDir(data_dir): simpleserver.start(port) with open("pyu.log", "w") as f: f.write("") cmd = "python build_onefile_restart.py %s %s %s %s" % ( custom_dir, port, windowed, split_version, ) os.system(cmd) # Moving all files from the deploy directory to the cwd # since that is where we will start the simple server deploy_dir = os.path.join("pyu-data", "deploy") assert os.path.exists(deploy_dir) test_cwd = os.getcwd() with ChDir(deploy_dir): files = os.listdir(os.getcwd()) for f in files: if f == ".DS_Store": continue shutil.move(f, test_cwd) app_name = "Acme" if sys.platform == "win32": app_name += ".exe" app_run_command = app_name if sys.platform != "win32": app_run_command = "./{}".format(app_name) if sys.platform == "darwin" and windowed: app_run_command = "./{}.app/Contents/MacOS/{}".format( app_name, app_name) app_name = "{}.app".format(app_name) if custom_dir: # update with custom_dir is multiprocessing-safe lock_path = "pyu.lock" else: if not os.path.exists(appdirs.user_data_dir(APP_NAME)): os.makedirs(appdirs.user_data_dir(APP_NAME)) lock_path = os.path.join(appdirs.user_data_dir(APP_NAME), "pyu.lock") update_lock = filelock.FileLock(lock_path, LOCK_TIMEOUT) version_file = "version2.txt" with update_lock.acquire(LOCK_TIMEOUT, 5): count = 0 while count < 5: # Call the binary to self update subprocess.call(app_run_command, shell=True) if os.path.exists(version_file): break count += 1 print("Retrying app launch!") # Allow enough time for update process to complete. time.sleep(AUTO_UPDATE_PAUSE) simpleserver.stop() # Detect if it was an overwrite error assert os.path.exists(app_name) assert os.path.exists(version_file) with open(version_file, "r") as f: output = f.read().strip() assert output == "4.2" if os.path.exists(app_name): if os.path.isdir(app_name): remove_any(app_name) else: remove_any(app_name) if os.path.exists(version_file): remove_any(version_file)
def _is_downloaded(self): # Comparing file hashes to ensure security with ChDir(self.update_folder): verified = self._verify_file_hash() return verified
def remove_previous_versions(directory, filename): """Removes previous version of named file""" if filename is None: log.debug('Cleanup Failed - Filename is None') return log.debug('Filename: %s', filename) if directory is None: log.debug('Cleanup Failed - Directory is None') return log.debug('Directory: %s', directory) try: current_version = Version(filename) except (UtilsError, VersionError): # pragma: no cover log.debug('Cleanup Failed: %s - Cannot parse version info.', filename) return try: # We set the full path here because Package() checks if filename exists package_info = Package(os.path.join(directory, filename)) except (UtilsError, VersionError): log.debug('Cleanup Failed: %s - Cannot parse package info.', filename) return if package_info.info['status'] is False: log.debug('Not an archive format: %s', package_info.name) return log.debug('Current version: %s', str(current_version)) assert package_info.name is not None log.debug('Name to search for: %s', package_info.name) with ChDir(directory): temp = os.listdir(os.getcwd()) for t in temp: log.debug('Checking: %s', t) # Only attempt to remove old files of the one we # are updating if package_info.name not in t: log.debug('File does not match name of current binary') continue else: log.debug('Found possible match') log.debug('Latest name: %s', package_info.name) log.debug('Old name: %s', t) try: old_version = Version(t) except (UtilsError, VersionError): # pragma: no cover log.warning('Cannot parse version info') # Skip file since we can't parse continue log.debug('Found version: %s', str(old_version)) if old_version < current_version: old_path = os.path.join(directory, t) log.debug('Removing old update: %s', old_path) remove_any(old_path) else: log.debug('Old version: %s', old_version) log.debug('Current version: %s', current_version)
def _download_verify_patches(self): # Downloads & verifies all patches log.debug("Downloading patches") downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): if self.downloader: fd = self.downloader( p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"] ) else: fd = FileDownloader( p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"], verify=self.verify, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers, ) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = "{0:.1f}".format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "downloading", } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "failed to download all patches", } self._call_progress_hooks(status) return False status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "finished", } self._call_progress_hooks(status) return True
def _cmd_collect_debug_info(*args): # pragma: no cover log.info('Starting log export') # A helper function that adds the filename & data to the # payload in preparation for upload. def _add_file(payload, filename): with io.open(filename, 'r', encoding='utf-8') as f: data = f.read() payload['files'][filename] = {'content': data} # A helper function that uploads the data to a private gist. def _upload(data): api = 'https://api.github.com/' gist_url = api + 'gists' http = get_http_pool() headers = { "Accept": "application/vnd.github.v3+json", "User-Agent": "PyUpdater", } r = http.urlopen('POST', gist_url, headers=headers, body=json.dumps(data)) try: data = json.loads(r.data) url = data['html_url'] except Exception as err: log.debug(err, exc_info=True) log.debug(json.dumps(r.data, indent=2)) url = None return url # Payload skeleton per githubs specification. # https://developer.github.com/v3/gists/ upload_data = { 'files': {}, 'description': 'PyUpdater debug logs', 'public': False, } if LOG_DIR is None: log.error('LOG_DIR is not set') log.debug('Something is mis-configured. ' 'Might be running in a test or by hand.') return with ChDir(LOG_DIR): # Get a list of all files in the log directory. temp_files = os.listdir(os.getcwd()) # Exited quickly if no files present. if len(temp_files) == 0: log.info('No log files to collect') return log.info('Collecting logs') for t in temp_files: # If the file matches the base name add it to the payload if t.startswith(settings.LOG_FILENAME_DEBUG): log.debug('Adding %s to log', t) _add_file(upload_data, t) log.info('Found all logs') # Attempt upload of debug files. url = _upload(upload_data) if url is None: log.error('Could not upload debug info to github') else: log.info('Log export complete') log.info('Logs uploaded to %s', url)
def test_execution_one_file_extract(self, cleandir, datadir, simpleserver, pyu, custom_dir, port, windowed): data_dir = datadir['update_repo_extract'] pyu.setup() # We are moving all of the files from the deploy directory to the # cwd. We will start a simple http server to use for updates with ChDir(data_dir): simpleserver.start(port) cmd = 'python build_onefile_extract.py %s %s %s' % (custom_dir, port, windowed) os.system(cmd) # Moving all files from the deploy directory to the cwd # since that is where we will start the simple server deploy_dir = os.path.join('pyu-data', 'deploy') assert os.path.exists(deploy_dir) test_cwd = os.getcwd() with ChDir(deploy_dir): files = os.listdir(os.getcwd()) for f in files: if f == '.DS_Store': continue shutil.move(f, test_cwd) app_name = 'Acme' if sys.platform == 'win32': app_name += '.exe' with open('pyu.log', 'w') as f: f.write('') app_run_command = app_name if sys.platform != 'win32': app_run_command = './{}'.format(app_name) if (sys.platform == 'darwin' and windowed): app_run_command = './{}.app/Contents/MacOS/{}'.format( app_name, app_name) app_name = '{}.app'.format(app_name) if custom_dir: # update with custom_dir is multiprocessing-safe lock_path = 'pyu.lock' else: if not os.path.exists(appdirs.user_data_dir(APP_NAME)): os.makedirs(appdirs.user_data_dir(APP_NAME)) lock_path = os.path.join(appdirs.user_data_dir(APP_NAME), 'pyu.lock') update_lock = filelock.FileLock(lock_path, LOCK_TIMEOUT) output_file = 'version1.txt' with update_lock.acquire(LOCK_TIMEOUT, 5): count = 0 while count < 5: # Call the binary to self update subprocess.call(app_run_command, shell=True) if os.path.exists(output_file): break count += 1 print("Retrying app launch") # Allow enough time for update process to complete. time.sleep(AUTO_UPDATE_PAUSE) # Call the binary to ensure it's # the updated binary subprocess.call(app_run_command, shell=True) simpleserver.stop() # Detect if it was an overwrite error assert os.path.exists(app_name) assert os.path.exists(output_file) with open(output_file, 'r') as f: output = f.read().strip() assert output == '4.2' if os.path.exists(app_name): if (os.path.isdir(app_name)): shutil.rmtree(app_name) else: os.remove(app_name) if os.path.exists(output_file): os.remove(output_file)