def test_folder_layout(): with ChDir(u'tests'): assert os.path.exists(u'pyi-data') is True with ChDir(u'tests/pyi-data'): assert os.path.exists(u'new') is True assert os.path.exists(u'deploy') is True assert os.path.exists(u'files') is True
def _move_packages(self, package_manifest): # Moves all packages to their destination folder. # Destination is figured by lib name and version number. # Since we are copying files to the deploy folder then # moving the updates to the files folder, we can safely # delete files in deploy folder after uploading. log.debug(u'Moving packages to deploy folder') for p in package_manifest: patch = p.patch_info.get(u'patch_name', None) version_path = p.version_path with ChDir(self.new_dir): if patch: shutil.copy(patch, self.deploy_dir) log.debug(u'Copying {} to {}'.format( patch, self.deploy_dir)) shutil.move(patch, version_path) log.debug(u'Moving {} to {}'.format(patch, version_path)) shutil.copy(p.filename, self.deploy_dir) log.debug(u'Copying {} to {}'.format(p.filename, self.deploy_dir)) if os.path.exists(os.path.join(version_path, p.filename)): os.remove(os.path.join(version_path, p.filename)) shutil.move(p.filename, version_path) log.debug(u'Moving {} to {}'.format(p.filename, version_path))
def _write_update_to_disk(self): # Writes updated binary to disk log.debug('Writing update to disk') filename_key = '{}*{}*{}*{}*{}'.format(u'updates', self.name, self.highest_version, self.plat, u'filename') filename = self.star_access_update_data.get(filename_key) if filename is None: raise PatcherError('Filename missing in version file') with ChDir(self.update_folder): try: with open(filename, u'wb') as f: f.write(self.new_binary) except IOError: # Removes file is it somehow got created if os.path.exists(filename): os.remove(filename) log.error(u'Failed to open file for writing') raise PatcherError(u'Failed to open file for writing') else: file_info = self._current_file_info(self.name, self.highest_version) new_file_hash = file_info['file_hash'] log.debug(u'checking file hash match') if new_file_hash != get_package_hashes(filename): log.error(u'File hash does not match') os.remove(filename) raise PatcherError(u'Patched file hash bad checksum') log.debug('Wrote update file')
def test_make_spec(self): t_config = TConfig() t_config.DATA_DIR = os.getcwd() pyu = PyUpdater(t_config) pyu.setup() spec_cmd = [ 'make-spec', 'app.py', '-F', '--app-name', 'MyApp', '--app-version', '0.1.0' ] spec_file_name = get_system() + '.spec' build_cmd = [ 'build', '--app-name', 'MyApp', '--app-version', '0.1.0', spec_file_name ] build_cmd = [str(b) for b in build_cmd] parser = get_parser() with open('app.py', 'w') as f: f.write('print "Hello, World!"') args, pyu_args = parser.parse_known_args(spec_cmd) b = Builder(args, pyu_args) b.make_spec() assert os.path.exists(spec_file_name) args, pyu_args = parser.parse_known_args(build_cmd) b = Builder(args, pyu_args) b.build() with ChDir(new_folder): assert len(os.listdir(os.getcwd())) == 1
def test_execution(self, pyu, db): archive_name = 'myapp-{}-0.1.0{}'.format(get_system(), ext) parser = get_parser() data_dir = pyu.config['DATA_DIR'] pyu_data_dir = os.path.join(data_dir, 'pyu-data') pyu.setup() pyu.make_keys(3) with ChDir(data_dir): loader = Loader(db) loader.save_config(pyu.config.copy()) with open('app.py', 'w') as f: f.write('print "Hello World"') args, pyu_args = parser.parse_known_args(create_build_cmd(0)) b = Builder(args, pyu_args) b.build() assert os.path.exists(os.path.join(pyu_data_dir, 'new', archive_name)) pyu.process_packages() assert os.path.exists(os.path.join(pyu_data_dir, 'deploy', archive_name)) assert os.path.exists(os.path.join(pyu_data_dir, 'files', archive_name)) pyu.sign_update() assert os.path.exists(os.path.join(pyu_data_dir, 'deploy', 'versions.gz'))
def _full_update(self, name): # Updates the binary by downloading full update # # Args: # name (str): Name of file to update # # version (str): Current version number of file to update # # Returns: # (bool) Meanings:: # # True - Update Successful # # False - Update Failed log.debug(u'Starting full update') latest = self._get_highest_version(name) filename = self._get_filename(name, latest) hash_key = u'{}*{}*{}*{}*{}'.format(self.updates_key, name, latest, self.platform, u'file_hash') _hash = self.star_access_update_data.get(hash_key) with ChDir(self.update_folder): log.debug(u'Downloading update...') fd = FileDownloader(filename, self.update_urls, _hash, self.verify) result = fd.download_verify_write() if result: log.debug(u'Update Complete') return True else: log.error(u'Failed To Updated To Latest Version') return False
def _remove_old_updates(self): # Removes old updates from cache. Patch updates # start from currently installed version. # ToDo: Better filename comparison # Please chime in if this is sufficient # Will remove todo if so... temp = os.listdir(self.update_folder) try: filename = self._get_filename(self.name, self.version) except KeyError: filename = u'0.0.0' try: current_version_str = get_version_number(filename) except UtilsError: log.debug(u'Cannot parse version info') current_version_str = u'0.0.0' current_version = version_string_to_tuple(current_version_str) with ChDir(self.update_folder): for t in temp: try: t_versoin_str = get_version_number(t) except UtilsError: log.debug(u'Cannot parse version info') t_versoin_str = u'0.0.0' t_version = version_string_to_tuple(t_versoin_str) if self.name in t and t_version < current_version: log.debug(u'Removing old update: {}'.format(t)) os.remove(t)
def cleanup_old_archives(filename=None, directory=None): if filename is None: log.debug('Cleanup Failed - Filename is None') return log.debug('Filename: %s', filename) if directory is None: log.debug('Cleanup Failed - Directory is None') return log.debug('Directory: %s', directory) try: current_version = Version(filename) except (UtilsError, VersionError): # pragma: no cover log.warning('Cleanup Failed - Cannot parse version info.') return try: # We set the full path here because Package() checks if filename exists package_info = Package(os.path.join(directory, filename)) except (UtilsError, VersionError): log.warning('Cleanup Failed - Cannot parse package info.') return if package_info.info['status'] is False: log.debug('Not an archive format: %s', package_info.name) return log.debug('Current version: %s', str(current_version)) assert package_info.name is not None log.debug('Name to search for: %s', package_info.name) with ChDir(directory): temp = os.listdir(os.getcwd()) for t in temp: log.debug('Checking: %s', t) # Only attempt to remove old files of the one we # are updating if package_info.name not in t: log.debug('File does not match name of current binary') continue else: log.debug('Found possible match') log.debug('Latest name: %s', package_info.name) log.debug('Old name: %s', t) try: old_version = Version(t) except (UtilsError, VersionError): # pragma: no cover log.warning('Cannot parse version info') # Skip file since we can't parse continue log.debug('Found version: %s', str(old_version)) if old_version < current_version: old_path = os.path.join(directory, t) log.info('Removing old update: %s', old_path) remove_any(old_path) else: log.debug('Old version: %s', old_version) log.debug('Current version: %s', current_version)
def test_bad_content_length(): with ChDir(u'tests'): class FakeHeaders(object): headers = {} fd = FileDownloader(FILENAME, URL, FILE_HASH) data = FakeHeaders() assert fd._get_content_length(data) == 100000
def test_move_to_deploy(): deploy_dir = os.path.join(PYI_DATA, u'deploy') with ChDir(deploy_dir): files = os.listdir(os.getcwd()) assert u'version.json' in files assert u'Not So TUF-arm-0.5.0.zip' in files assert u'Not So TUF-mac-0.5.0.zip' in files assert u'Not So TUF-arm-0.5.3.zip' in files assert u'Not So TUF-mac-0.5.3.zip' in files
def test_extract_no_file(self, client): update = client.update_check(client.app_name, '0.0.1') assert update is not None assert update.download() is True with ChDir(update.update_folder): files = os.listdir(os.getcwd()) for f in files: remove_any(f) if get_system() != 'win': assert update.extract() is False
def test_package_1(self): test_file_1 = 'jms-mac-0.0.1.zip' with ChDir(TEST_DATA_DIR): p1 = Package(test_file_1) assert p1.name == 'jms' assert p1.version == '0.0.1.2.0' assert p1.filename == test_file_1 assert p1.platform == 'mac' assert p1.info['status'] is True
def test_package_bad_extension(self): test_file_2 = 'pyu-win-0.0.2.bzip2' with ChDir(TEST_DATA_DIR): p2 = Package(test_file_2) assert p2.filename == test_file_2 assert p2.name is None assert p2.version is None assert p2.info['status'] is False assert p2.info['reason'] == ('Not a supported archive format: ' '{}'.format(test_file_2))
def test_package_name_with_spaces(self): test_file = 'with spaces-nix-0.0.1b1.zip' with ChDir(TEST_DATA_DIR): p1 = Package(test_file) assert p1.name == 'with spaces' assert p1.version == '0.0.1.1.1' assert p1.filename == test_file assert p1.platform == 'nix' assert p1.channel == 'beta' assert p1.info['status'] is True
def setup_func(): ph.setup() kh.make_keys() test_data_dir = os.path.abspath( os.path.join(u'tests', u'test data', u'5.0')) with ChDir(test_data_dir): files = remove_dot_files(os.listdir(os.getcwd())) for f in files: shutil.copy(f, ph.new_dir) ph.process_packages() kh.sign_update()
def test_package_alpha(self): test_file = 'with spaces-win-0.0.1a2.zip' with ChDir(TEST_DATA_DIR): p1 = Package(test_file) assert p1.name == 'with spaces' assert p1.version == '0.0.1.0.2' assert p1.filename == test_file assert p1.platform == 'win' assert p1.channel == 'alpha' assert p1.info['status'] is True
def test_package_1(self): test_file = 'Acme-mac-4.1.tar.gz' with ChDir(TEST_DATA_DIR): p1 = Package(test_file) assert p1.name == 'Acme' assert p1.version == '4.1.0.2.0' assert p1.filename == test_file assert p1.platform == 'mac' assert p1.channel == 'stable' assert p1.info['status'] is True
def _write_update_data(self): # Write version file "with new sig" to disk log.debug(u'Wrote version data') if self.update_data: with open(self.version_file, u'w') as f: f.write(json.dumps(self.update_data, indent=2, sort_keys=True)) with ChDir(self.data_dir): shutil.copy(u'version.json', self.deploy_dir) else: msg = u'You must sign update data first' raise KeyHandlerError(msg, expected=True)
def main(): with ChDir(DEST_DIR): files = os.listdir(os.getcwd()) for f in files: if f.startswith(u'.'): continue elif f in [u'Procfile', 'Staticfile']: continue elif os.path.isfile(f): os.remove(f) elif os.path.isdir(f): shutil.rmtree(f, ignore_errors=True) with ChDir(HTML_DIR): files = os.listdir(os.getcwd()) for f in files: if f.startswith(u'.'): continue if os.path.isfile(f): shutil.copy(f, os.path.join(DEST_DIR, f)) elif os.path.isdir(f): shutil.copytree(f, DEST_DIR + os.sep + f)
def test_patch_creation(): test_data_dir = os.path.abspath( os.path.join(u'tests', u'test data', u'5.3')) with ChDir(test_data_dir): files = remove_dot_files(os.listdir(os.getcwd())) for f in files: shutil.copy(f, ph.new_dir) ph.process_packages() kh.sign_update() assert os.path.exists( os.path.join(PYI_DATA, u'deploy', u'Not So TUF-arm-1')) is True assert os.path.exists( os.path.join(PYI_DATA, u'deploy', u'Not So TUF-mac-1')) is True
def test_execution(): global test_data_dir config = TConfig() updater = PyiUpdater(config) ph = PackageHandler(updater) kh = KeyHandler(updater) ph.setup() kh.test = True kh.make_keys() # Make zipfile with ChDir(test_data_dir): os.mkdir(u'test-app') with ChDir(u'test-app'): with open(u'app.txt', u'w') as f: f.write(u'I am so happy' * 1000) shutil.make_archive(u'Test App-mac-0.2.0', u'zip', u'test-app') shutil.move(u'Test App-mac-0.2.0.zip', u'new') ph.process_packages() kh.sign_update()
def upload_debug_info(): # pragma: no cover log.info('Starting log export') def _add_file(payload, filename): with io.open(filename, 'r', encoding='utf-8') as f: data = f.read() payload['files'][filename] = {'content': data} def _upload(data): api = 'https://api.github.com/' gist_url = api + 'gists' http = get_http_pool() headers = {"Accept": "application/vnd.github.v3+json"} r = http.request('POST', gist_url, headers=headers, data=json.dumps(data)) try: r_string = r.read() data = json.loads(r_string) url = data['html_url'] except Exception as err: log.debug(err, exc_info=True) log.debug(json.dumps(r.json(), indent=2)) url = None return url upload_data = {'files': {}} with ChDir(LOG_DIR): temp_files = os.listdir(os.getcwd()) if len(temp_files) == 0: log.info('No log files to collect') return log.info('Collecting logs') for t in temp_files: if t.startswith(settings.LOG_FILENAME_DEBUG): log.debug('Adding %s to log', t) _add_file(upload_data, t) log.info('Found all logs') url = _upload(upload_data) if url is None: log.error('Could not upload debug info to github') else: log.info('Log export complete') log.info('Logs uploaded to %s', url)
def test_build(self): t_config = TConfig() t_config.DATA_DIR = os.getcwd() pyu = PyUpdater(t_config) pyu.setup() build_cmd = [ 'build', '--app-name', 'MyApp', '--app-version', '0.1.0', 'app.py' ] build_cmd = [str(b) for b in build_cmd] parser = get_parser() with open('app.py', 'w') as f: f.write('print "Hello, World!"') args, pyu_args = parser.parse_known_args(build_cmd) b = Builder(args, pyu_args) b.build() with ChDir(new_folder): assert len(os.listdir(os.getcwd())) == 1
def archive(args): check = check_repo() if check is False: _repo_error() new_dir = os.path.join(os.getcwd(), settings.USER_DATA_FOLDER, 'new') name = args.name target_name = args.target_name version = args.version with ChDir(new_dir): if not os.path.exists(target_name): log.error('%s does not exists', target_name) return ex_lib = ExternalLib(name, target_name, version) ex_lib.archive() if args.keep is False: remove_any(target_name) log.info('Removed: %s', target_name)
def _verify_installed_binary(self): # Verifies currently installed binary against known hash log.debug(u'Checking for current installed binary to patch') # I just really like using this ChDir context # manager. Even sent the developer a cup of coffee with ChDir(self.update_folder): if not os.path.exists(self.current_filename): log.debug(u'Cannot find binary to patch') return False installed_file_hash = get_package_hashes(self.current_filename) if self.current_file_hash != installed_file_hash: log.debug(u'Binary hash mismatch') return False with open(self.current_filename, u'rb') as f: self.og_binary = f.read() os.remove(self.current_filename) log.debug(u'Binary found and verified') return True
def upload_debug_info(args): # pragma: no cover log.info('Starting log export') def _add_file(payload, filename): with open(filename, 'r') as f: data = f.read() payload['files'][filename] = {'content': data} def _upload(data): log.debug(json.dumps(data, indent=2)) api = 'https://api.github.com/' gist_url = api + 'gists' headers = {"Accept": "application/vnd.github.v3+json"} r = requests.post(gist_url, headers=headers, data=json.dumps(data)) try: url = r.json()['html_url'] except Exception as err: log.debug(str(err), exc_info=True) log.debug(json.dumps(r.json(), indent=2)) url = None return url upload_data = {'files': {}} with ChDir(LOG_DIR): temp_files = os.listdir(os.getcwd()) if len(temp_files) == 0: log.info('No log files to collect') return log.info('Collecting logs') for t in temp_files: if t.startswith(settings.LOG_FILENAME_DEBUG): log.debug('Adding {} to log'.format(t)) _add_file(upload_data, t) log.info('Found all logs') url = _upload(upload_data) if url is None: log.error('Could not upload debug info to github') else: log.info('Log export complete') log.info(url)
def _archive_installed_binary(self): # Archives current app and places in cache for future patch updates current_archive_filename = self._get_filename(self.name, self.version) current_archvie_path = os.path.join(self.update_folder, current_archive_filename) if not os.path.exists(current_archvie_path): log.debug(u'Adding base binary v{} to updates ' u'folder'.format(self.version)) # Changing in to directory of currently running exe with ChDir(os.path.dirname(sys.argv[0])): name = self.name if get_system() == u'win': name += u'.exe' if get_system() == u'mac': # If not found must be a mac gui app if not os.path.exists(name): name += u'.app' archive_ext = os.path.splitext(current_archive_filename)[1] if u'gz' in archive_ext: archive_format = u'gztar' else: archive_format = u'zip' try: plat = get_system() filename = make_archive(self.name, self.version, name, archive_format, platfrom=plat) except Exception as err: filename = None log.error(str(err), exc_info=True) if filename is not None: shutil.move(filename, self.update_folder)
def _extract_update(self): with ChDir(self.update_folder): platform_name = self.name if sys.platform == u'win32' and self.name == self.app_name: # We only add .exe to app executable. Not libs or dll log.debug(u'Adding .exe to filename for windows main ' 'app udpate.') platform_name += u'.exe' latest = self._get_highest_version(self.name) filename = self._get_filename(self.name, latest) if not os.path.exists(filename): raise ClientError(u'File does not exists') log.debug(u'Extracting Update') archive_ext = os.path.splitext(filename)[1].lower() if archive_ext == u'.gz': try: with tarfile.open(filename, u'r:gz') as tfile: # Extract file update to current # directory. tfile.extractall() except Exception as err: log.error(err, exc_info=True) raise ClientError(u'Error reading gzip file') elif archive_ext == u'.zip': try: with ZipFile(filename, u'r') as zfile: # Extract update file to current # directory. zfile.extractall() except Exception as err: log.error(err, exc_info=True) raise ClientError(u'Error reading zip file') else: raise ClientError(u'Unknown filetype')
def test_good_conent_length(): with ChDir(u'tests'): fd = FileDownloader(FILENAME, URL, FILE_HASH) fd.download_verify_write() assert fd.content_length == 60000
def test_bad_url(): with ChDir('tests'): fd = FileDownloader(FILENAME, u'bad url', u'bad hash') binary_data = fd.download_verify_return() assert binary_data is None