def backup(self, module: str, path: Path) -> Optional[Path]: """ Take backup of path if it exists and is not created by Astrality. :param module: Module requesting file to be backed up. :param path: Path to file to back up. :return: Optional path to backup file. """ if path in self or not path.exists(): return None filepath_hash = hashlib.md5( str(path).encode('utf-8'), ).hexdigest()[:7] backup_filename = path.name + '-' + filepath_hash backup = XDG().data(f'backups/{module}/{backup_filename}') utils.move( source=path, destination=backup, follow_symlinks=False, ) self.creations.setdefault( module, {})[str(path)] = { # type: ignore 'backup': str(backup), } utils.dump_yaml(data=self.creations, path=self.path) return backup
def test_missing_global_configuration_file(self, monkeypatch, tmpdir): """Missing astrality.yml should result in default values.""" # Create directory used as astrality config directory config_home = Path(tmpdir) monkeypatch.setattr( os, 'environ', {'ASTRALITY_CONFIG_HOME': str(config_home)}, ) # Sanity check assert len(list(config_home.iterdir())) == 0 # Create modules and context files, but *not* astrality.yml modules = {'A': {'enabled': False}} dump_yaml(path=config_home / 'modules.yml', data=modules) context = {'section': {'key': 'value'}} dump_yaml(path=config_home / 'context.yml', data=context) ( global_config, global_modules, global_context, inferred_path, ) = user_configuration() assert global_config == ASTRALITY_DEFAULT_GLOBAL_SETTINGS assert global_modules == modules assert global_context == context assert inferred_path == config_home
def cleanup(self, module: str, dry_run: bool = False) -> None: """ Delete files created by module. :param module: Name of module, file creation of which will be deleted. :param dry_run: If True, no files will be deleted, only logging will occur. """ logger = logging.getLogger(__name__) module_creations = self.creations.get(module, {}) # This dictionary will be populated with all directories which we can't # delete. Those should still be tracked after cleaning up the module. dangling_directories: Dict[str, CreationInfo] = {} for creation, info in sorted( module_creations.items(), key=lambda item: -len(Path(item[0]).parts ), # depth-first order ): creation_method = info['method'] content = info['content'] backup = info['backup'] log_msg = (f'[Cleanup] Deleting "{creation}" ' f'({creation_method} content from "{content}"). ' f'Backup replacement: {backup}.') if dry_run: logger.info('SKIPPED: ' + log_msg) continue creation_path = Path(creation) if creation_path.is_dir(): try: logger.info(log_msg) creation_path.rmdir() except OSError: logger.warning( f'Failed to remove created directory "{creation}", ' 'as it contains new non-module files since creation! ' 'Try to delete files manually and then cleanup again.', ) dangling_directories[creation] = info elif creation_path.exists(): logger.info(log_msg) creation_path.unlink() else: logger.info(log_msg + ' [No longer exists!]') if backup and Path(backup).exists(): utils.move( source=info['backup'], destination=creation, follow_symlinks=False, ) if not dry_run: self.creations.pop(module, None) if dangling_directories: self.creations[module] = dangling_directories utils.dump_yaml(data=self.creations, path=self.path)
def path(self) -> Path: """Return path to file which stores files created by modules.""" if hasattr(self, '_path'): return self._path xdg = XDG('astrality') self._path = xdg.data(resource='created_files.yml') if os.stat(self._path).st_size == 0: self._path.touch() utils.dump_yaml(data={}, path=self._path) return self._path
def test_trying_to_kill_process_no_longer_running(self): """No longer running processes should be handled gracefully.""" finished_process = psutil.Popen(['echo', 'Done!']) process_data = finished_process.as_dict( attrs=['pid', 'create_time', 'username'], ) finished_process.wait() utils.dump_yaml( data=process_data, path=XDG().data('astrality.pid'), ) kill_old_astrality_processes()
def insert( self, module: str, creation_method: CreationMethod, contents: Iterable[Optional[Path]], targets: Iterable[Path], ) -> None: """ Insert files created by a module. :param module: Name of module which has created the files. :param creation_method: Type of action which has created the file. :param contents: The source files used in creating the files. None if a created directory. :param targets: The files that have be created. """ # We do not want to insert empty sections, to reduce reduntant clutter if not contents: return modified = False module_section = self.creations.setdefault(module, {}) for content, target in zip(contents, targets): # Do not insert files that actually do not exist if not target.exists(): continue creation = module_section.setdefault( str(target), {}, # type: ignore ) if creation.get('content') != str(content): modified = True creation['content'] = str(content) creation['method'] = creation_method.value creation.setdefault('backup', None) try: creation['hash'] = hashlib.md5( target.read_bytes(), ).hexdigest() except (PermissionError, IsADirectoryError): # Currently, we do not hash directories or content with # lacking permissions to read. creation['hash'] = None if modified: utils.dump_yaml(data=self.creations, path=self.path)
def test_killing_old_running_process(self): """The same running process should be killed.""" perpetual_process = psutil.Popen([ 'python', '-c', '"from time import sleep; sleep(9999999999999)"', ]) pidfile = XDG().data('astrality.pid') utils.dump_yaml( data=perpetual_process.as_dict( attrs=['pid', 'create_time', 'username'], ), path=pidfile, ) kill_old_astrality_processes() assert Retry()(lambda: not perpetual_process.is_running())
def write(self) -> None: """Persist all actions that have been checked in object lifetime.""" if not self.new_actions: return file_data = utils.load_yaml(path=self.path) file_data.setdefault(self.module, {}) for action_type, action_options in self.new_actions.items(): file_data[self.module].setdefault( action_type, [], ).extend(action_options) utils.dump_yaml( path=self.path, data=file_data, )
def reset(self) -> None: """Delete all executed module actions.""" file_data = utils.load_yaml(path=self.path) reset_actions = file_data.pop(self.module, None) logger = logging.getLogger(__name__) if not reset_actions: logger.error( 'No saved executed on_setup actions for module ' f'"{self.module}"!', ) else: logger.info( f'Reset the following actions for module "{self.module}":\n' + utils.yaml_str({self.module: reset_actions}), ) utils.dump_yaml( path=self.path, data=file_data, ) self.old_actions = {}
def test_not_killing_new_procces_with_same_pid(self): """The process should not be killed when it is not the original saved""" perpetual_process = psutil.Popen([ 'python', '-c', '"from time import sleep; sleep(9999999999999)"', ]) process_data = perpetual_process.as_dict( attrs=['pid', 'create_time', 'username'], ) process_data['create_time'] += 1 utils.dump_yaml( data=process_data, path=XDG().data('astrality.pid'), ) kill_old_astrality_processes() assert Retry()(lambda: perpetual_process.is_running()) perpetual_process.kill()
def kill_old_astrality_processes() -> None: """ Kill any previous Astrality process instance. This process kills the last process which invoked this function. If the process is no longer running, it is owned by another user, or has a new create_time, it will *not* be killed. """ # The current process new_process = psutil.Process() # Fetch info of possible previous process instance pidfile = XDG().data('astrality.pid') old_process_info = utils.load_yaml(path=pidfile) utils.dump_yaml( data=new_process.as_dict(attrs=['pid', 'create_time', 'username']), path=pidfile, ) if not old_process_info or not psutil.pid_exists(old_process_info['pid']): return try: old_process = psutil.Process(pid=old_process_info['pid']) except BaseException: return if not old_process.as_dict(attrs=['pid', 'create_time', 'username' ], ) == old_process_info: return try: logger.info( 'Killing duplicate Astrality process with pid: ' f'{old_process.pid}.', ) old_process.terminate() old_process.wait() except BaseException: logger.error( f'Could not kill old instance of astrality with pid: ' f'{old_process.pid}. Continuing anyway...', )
def cleanup(self, module: str, dry_run: bool = False) -> None: """ Delete files created by module. :param module: Name of module, file creation of which will be deleted. :param dry_run: If True, no files will be deleted, only logging will occur. """ logger = logging.getLogger(__name__) module_creations = self.creations.get(module, {}) for creation, info in module_creations.items(): creation_method = info['method'] content = info['content'] backup = info['backup'] log_msg = (f'[Cleanup] Deleting "{creation}" ' f'({creation_method} content from "{content}"). ' f'Backup replacement: {backup}.') if dry_run: logger.info('SKIPPED: ' + log_msg) continue creation_path = Path(creation) if creation_path.exists(): logger.info(log_msg) creation_path.unlink() else: logger.info(log_msg + ' [No longer exists!]') if backup and Path(backup).exists(): utils.move( # type: ignore source=info['backup'], destination=creation, follow_symlinks=False, ) if not dry_run: self.creations.pop(module, None) utils.dump_yaml(data=self.creations, path=self.path)