def _list_upgradable_apps(): app_list_installed = os.listdir(APPS_SETTING_PATH) for app_id in app_list_installed: app_dict = app_info(app_id, full=True) if app_dict["upgradable"] == "yes": # FIXME : would make more sense for these infos to be computed # directly in app_info and used to check the upgradability of # the app... current_version = app_dict.get("manifest", {}).get("version", "?") current_commit = app_dict.get("settings", {}).get("current_revision", "?")[:7] new_version = (app_dict.get("from_catalog", {}).get("manifest", {}).get("version", "?")) new_commit = (app_dict.get("from_catalog", {}).get("git", {}).get("revision", "?")[:7]) if current_version == new_version: current_version += " (" + current_commit + ")" new_version += " (" + new_commit + ")" yield { "id": app_id, "label": app_dict["label"], "current_version": current_version, "new_version": new_version, }
def tools_update(ignore_apps=False, ignore_packages=False): """ Update apps & package cache, then display changelog Keyword arguments: ignore_apps -- Ignore app list update and changelog ignore_packages -- Ignore apt cache update and changelog """ packages = [] if not ignore_packages: cache = apt.Cache() # Update APT cache logger.info(m18n.n('updating_apt_cache')) if not cache.update(): raise MoulinetteError(errno.EPERM, m18n.n('update_cache_failed')) logger.info(m18n.n('done')) cache.open(None) cache.upgrade(True) # Add changelogs to the result for pkg in cache.get_changes(): packages.append({ 'name': pkg.name, 'fullname': pkg.fullname, 'changelog': pkg.get_changelog() }) apps = [] if not ignore_apps: try: app_fetchlist() except MoulinetteError: pass app_list = os.listdir(apps_setting_path) if len(app_list) > 0: for app_id in app_list: if '__' in app_id: original_app_id = app_id[:app_id.index('__')] else: original_app_id = app_id current_app_dict = app_info(app_id, raw=True) new_app_dict = app_info(original_app_id, raw=True) # Custom app if new_app_dict is None or 'lastUpdate' not in new_app_dict or 'git' not in new_app_dict: continue if (new_app_dict['lastUpdate'] > current_app_dict['lastUpdate']) \ or ('update_time' not in current_app_dict['settings'] \ and (new_app_dict['lastUpdate'] > current_app_dict['settings']['install_time'])) \ or ('update_time' in current_app_dict['settings'] \ and (new_app_dict['lastUpdate'] > current_app_dict['settings']['update_time'])): apps.append({ 'id': app_id, 'label': current_app_dict['settings']['label'] }) if len(apps) == 0 and len(packages) == 0: logger.info(m18n.n('packages_no_upgrade')) return {'packages': packages, 'apps': apps}
def domain_remove(operation_logger, domain, remove_apps=False, force=False): """ Delete domains Keyword argument: domain -- Domain to delete remove_apps -- Remove applications installed on the domain force -- Force the domain removal and don't not ask confirmation to remove apps if remove_apps is specified """ from yunohost.hook import hook_callback from yunohost.app import app_ssowatconf, app_info, app_remove from yunohost.utils.ldap import _get_ldap_interface # the 'force' here is related to the exception happening in domain_add ... # we don't want to check the domain exists because the ldap add may have # failed if not force and domain not in domain_list()['domains']: raise YunohostError('domain_name_unknown', domain=domain) # Check domain is not the main domain if domain == _get_maindomain(): other_domains = domain_list()["domains"] other_domains.remove(domain) if other_domains: raise YunohostError( "domain_cannot_remove_main", domain=domain, other_domains="\n * " + ("\n * ".join(other_domains)), ) else: raise YunohostError("domain_cannot_remove_main_add_new_one", domain=domain) # Check if apps are installed on the domain apps_on_that_domain = [] for app in _installed_apps(): settings = _get_app_settings(app) label = app_info(app)["name"] if settings.get("domain") == domain: apps_on_that_domain.append( (app, " - %s \"%s\" on https://%s%s" % (app, label, domain, settings["path"]) if "path" in settings else app)) if apps_on_that_domain: if remove_apps: if msettings.get('interface') == "cli" and not force: answer = msignals.prompt(m18n.n( 'domain_remove_confirm_apps_removal', apps="\n".join([x[1] for x in apps_on_that_domain]), answers='y/N'), color="yellow") if answer.upper() != "Y": raise YunohostError("aborting") for app, _ in apps_on_that_domain: app_remove(app) else: raise YunohostError('domain_uninstall_app_first', apps="\n".join( [x[1] for x in apps_on_that_domain])) operation_logger.start() ldap = _get_ldap_interface() try: ldap.remove("virtualdomain=" + domain + ",ou=domains") except Exception as e: raise YunohostError("domain_deletion_failed", domain=domain, error=e) os.system("rm -rf /etc/yunohost/certs/%s" % domain) # Sometime we have weird issues with the regenconf where some files # appears as manually modified even though they weren't touched ... # There are a few ideas why this happens (like backup/restore nginx # conf ... which we shouldnt do ...). This in turns creates funky # situation where the regenconf may refuse to re-create the conf # (when re-creating a domain..) # # So here we force-clear the has out of the regenconf if it exists. # This is a pretty ad hoc solution and only applied to nginx # because it's one of the major service, but in the long term we # should identify the root of this bug... _force_clear_hashes(["/etc/nginx/conf.d/%s.conf" % domain]) # And in addition we even force-delete the file Otherwise, if the file was # manually modified, it may not get removed by the regenconf which leads to # catastrophic consequences of nginx breaking because it can't load the # cert file which disappeared etc.. if os.path.exists("/etc/nginx/conf.d/%s.conf" % domain): _process_regen_conf("/etc/nginx/conf.d/%s.conf" % domain, new_conf=None, save=True) regen_conf(names=["nginx", "metronome", "dnsmasq", "postfix"]) app_ssowatconf() hook_callback("post_domain_remove", args=[domain]) logger.success(m18n.n("domain_deleted"))
def backup_create(name=None, description=None, output_directory=None, no_compress=False, ignore_hooks=False, hooks=[], ignore_apps=False, apps=[]): """ Create a backup local archive Keyword arguments: name -- Name of the backup archive description -- Short description of the backup output_directory -- Output directory for the backup no_compress -- Do not create an archive file hooks -- List of backup hooks names to execute ignore_hooks -- Do not execute backup hooks apps -- List of application names to backup ignore_apps -- Do not backup apps """ # TODO: Add a 'clean' argument to clean output directory tmp_dir = None env_var = {} # Validate what to backup if ignore_hooks and ignore_apps: raise MoulinetteError(errno.EINVAL, m18n.n('backup_action_required')) # Validate and define backup name timestamp = int(time.time()) if not name: name = time.strftime('%Y%m%d-%H%M%S') if name in backup_list()['archives']: raise MoulinetteError(errno.EINVAL, m18n.n('backup_archive_name_exists')) # Validate additional arguments if no_compress and not output_directory: raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_required')) if output_directory: output_directory = os.path.abspath(output_directory) # Check for forbidden folders if output_directory.startswith(archives_path) or \ re.match(r'^/(|(bin|boot|dev|etc|lib|root|run|sbin|sys|usr|var)(|/.*))$', output_directory): raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_forbidden')) # Create the output directory if not os.path.isdir(output_directory): logger.debug("creating output directory '%s'", output_directory) os.makedirs(output_directory, 0750) # Check that output directory is empty elif no_compress and os.listdir(output_directory): raise MoulinetteError(errno.EIO, m18n.n('backup_output_directory_not_empty')) # Do not compress, so set temporary directory to output one and # disable bind mounting to prevent data loss in case of a rm # See: https://dev.yunohost.org/issues/298 if no_compress: logger.debug('bind mounting will be disabled') tmp_dir = output_directory env_var['CAN_BIND'] = 0 else: output_directory = archives_path # Create archives directory if it does not exists if not os.path.isdir(archives_path): os.mkdir(archives_path, 0750) def _clean_tmp_dir(retcode=0): ret = hook_callback('post_backup_create', args=[tmp_dir, retcode]) if not ret['failed']: filesystem.rm(tmp_dir, True, True) return True else: logger.warning(m18n.n('backup_cleaning_failed')) return False # Create temporary directory if not tmp_dir: tmp_dir = "%s/tmp/%s" % (backup_path, name) if os.path.isdir(tmp_dir): logger.debug("temporary directory for backup '%s' already exists", tmp_dir) if not _clean_tmp_dir(): raise MoulinetteError( errno.EIO, m18n.n('backup_output_directory_not_empty')) filesystem.mkdir(tmp_dir, 0750, parents=True, uid='admin') # Initialize backup info info = { 'description': description or '', 'created_at': timestamp, 'apps': {}, 'hooks': {}, } # Run system hooks if not ignore_hooks: # Check hooks availibility hooks_filtered = set() if hooks: for hook in hooks: try: hook_info('backup', hook) except: logger.error(m18n.n('backup_hook_unknown', hook=hook)) else: hooks_filtered.add(hook) if not hooks or hooks_filtered: logger.info(m18n.n('backup_running_hooks')) ret = hook_callback('backup', hooks_filtered, args=[tmp_dir], env=env_var) if ret['succeed']: info['hooks'] = ret['succeed'] # Save relevant restoration hooks tmp_hooks_dir = tmp_dir + '/hooks/restore' filesystem.mkdir(tmp_hooks_dir, 0750, True, uid='admin') for h in ret['succeed'].keys(): try: i = hook_info('restore', h) except: logger.warning(m18n.n('restore_hook_unavailable', hook=h), exc_info=1) else: for f in i['hooks']: shutil.copy(f['path'], tmp_hooks_dir) # Backup apps if not ignore_apps: # Filter applications to backup apps_list = set(os.listdir('/etc/yunohost/apps')) apps_filtered = set() if apps: for a in apps: if a not in apps_list: logger.warning(m18n.n('unbackup_app', app=a)) else: apps_filtered.add(a) else: apps_filtered = apps_list # Run apps backup scripts tmp_script = '/tmp/backup_' + str(timestamp) for app_instance_name in apps_filtered: app_setting_path = '/etc/yunohost/apps/' + app_instance_name # Check if the app has a backup and restore script app_script = app_setting_path + '/scripts/backup' app_restore_script = app_setting_path + '/scripts/restore' if not os.path.isfile(app_script): logger.warning(m18n.n('unbackup_app', app=app_instance_name)) continue elif not os.path.isfile(app_restore_script): logger.warning(m18n.n('unrestore_app', app=app_instance_name)) tmp_app_dir = '{:s}/apps/{:s}'.format(tmp_dir, app_instance_name) tmp_app_bkp_dir = tmp_app_dir + '/backup' logger.info( m18n.n('backup_running_app_script', app=app_instance_name)) try: # Prepare backup directory for the app filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin') shutil.copytree(app_setting_path, tmp_app_dir + '/settings') # Copy app backup script in a temporary folder and execute it subprocess.call(['install', '-Dm555', app_script, tmp_script]) # Prepare env. var. to pass to script app_id, app_instance_nb = _parse_app_instance_name( app_instance_name) env_dict = env_var.copy() env_dict["YNH_APP_ID"] = app_id env_dict["YNH_APP_INSTANCE_NAME"] = app_instance_name env_dict["YNH_APP_INSTANCE_NUMBER"] = str(app_instance_nb) env_dict["YNH_APP_BACKUP_DIR"] = tmp_app_bkp_dir hook_exec(tmp_script, args=[tmp_app_bkp_dir, app_instance_name], raise_on_error=True, chdir=tmp_app_bkp_dir, env=env_dict) except: logger.exception( m18n.n('backup_app_failed', app=app_instance_name)) # Cleaning app backup directory shutil.rmtree(tmp_app_dir, ignore_errors=True) else: # Add app info i = app_info(app_instance_name) info['apps'][app_instance_name] = { 'version': i['version'], 'name': i['name'], 'description': i['description'], } finally: filesystem.rm(tmp_script, force=True) # Check if something has been saved if not info['hooks'] and not info['apps']: _clean_tmp_dir(1) raise MoulinetteError(errno.EINVAL, m18n.n('backup_nothings_done')) # Calculate total size backup_size = int( subprocess.check_output(['du', '-sb', tmp_dir]).split()[0].decode('utf-8')) info['size'] = backup_size # Create backup info file with open("%s/info.json" % tmp_dir, 'w') as f: f.write(json.dumps(info)) # Create the archive if not no_compress: logger.info(m18n.n('backup_creating_archive')) # Check free space in output directory at first avail_output = subprocess.check_output( ['df', '--block-size=1', '--output=avail', tmp_dir]).split() if len(avail_output) < 2 or int(avail_output[1]) < backup_size: logger.debug('not enough space at %s (free: %s / needed: %d)', output_directory, avail_output[1], backup_size) _clean_tmp_dir(3) raise MoulinetteError( errno.EIO, m18n.n('not_enough_disk_space', path=output_directory)) # Open archive file for writing archive_file = "%s/%s.tar.gz" % (output_directory, name) try: tar = tarfile.open(archive_file, "w:gz") except: logger.debug("unable to open '%s' for writing", archive_file, exc_info=1) _clean_tmp_dir(2) raise MoulinetteError(errno.EIO, m18n.n('backup_archive_open_failed')) # Add files to the archive try: tar.add(tmp_dir, arcname='') tar.close() except IOError as e: logger.error(m18n.n('backup_archive_writing_error'), exc_info=1) _clean_tmp_dir(3) raise MoulinetteError(errno.EIO, m18n.n('backup_creation_failed')) # FIXME : it looks weird that the "move info file" is not enabled if # user activated "no_compress" ... or does it really means # "dont_keep_track_of_this_backup_in_history" ? # Move info file shutil.move(tmp_dir + '/info.json', '{:s}/{:s}.info.json'.format(archives_path, name)) # If backuped to a non-default location, keep a symlink of the archive # to that location if output_directory != archives_path: link = "%s/%s.tar.gz" % (archives_path, name) os.symlink(archive_file, link) # Clean temporary directory if tmp_dir != output_directory: _clean_tmp_dir() logger.success(m18n.n('backup_created')) # Return backup info info['name'] = name return {'archive': info}
def backup_create(name=None, description=None, output_directory=None, no_compress=False, ignore_hooks=False, hooks=[], ignore_apps=False, apps=[]): """ Create a backup local archive Keyword arguments: name -- Name of the backup archive description -- Short description of the backup output_directory -- Output directory for the backup no_compress -- Do not create an archive file hooks -- List of backup hooks names to execute ignore_hooks -- Do not execute backup hooks apps -- List of application names to backup ignore_apps -- Do not backup apps """ # TODO: Add a 'clean' argument to clean output directory tmp_dir = None # Validate what to backup if ignore_hooks and ignore_apps: raise MoulinetteError(errno.EINVAL, m18n.n('backup_action_required')) # Validate and define backup name timestamp = int(time.time()) if not name: name = time.strftime('%Y%m%d-%H%M%S') if name in backup_list()['archives']: raise MoulinetteError(errno.EINVAL, m18n.n('backup_archive_name_exists')) # Validate additional arguments if no_compress and not output_directory: raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_required')) if output_directory: output_directory = os.path.abspath(output_directory) # Check for forbidden folders if output_directory.startswith(archives_path) or \ re.match(r'^/(|(bin|boot|dev|etc|lib|root|run|sbin|sys|usr|var)(|/.*))$', output_directory): raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_forbidden')) # Create the output directory if not os.path.isdir(output_directory): logger.debug("creating output directory '%s'", output_directory) os.makedirs(output_directory, 0750) # Check that output directory is empty elif no_compress and os.listdir(output_directory): raise MoulinetteError(errno.EIO, m18n.n('backup_output_directory_not_empty')) # Define temporary directory if no_compress: tmp_dir = output_directory else: output_directory = archives_path # Create temporary directory if not tmp_dir: tmp_dir = "%s/tmp/%s" % (backup_path, name) if os.path.isdir(tmp_dir): logger.debug("temporary directory for backup '%s' already exists", tmp_dir) filesystem.rm(tmp_dir, recursive=True) filesystem.mkdir(tmp_dir, 0750, parents=True, uid='admin') def _clean_tmp_dir(retcode=0): ret = hook_callback('post_backup_create', args=[tmp_dir, retcode]) if not ret['failed']: filesystem.rm(tmp_dir, True, True) else: logger.warning(m18n.n('backup_cleaning_failed')) # Initialize backup info info = { 'description': description or '', 'created_at': timestamp, 'apps': {}, 'hooks': {}, } # Run system hooks if not ignore_hooks: # Check hooks availibility hooks_filtered = set() if hooks: for hook in hooks: try: hook_info('backup', hook) except: logger.error(m18n.n('backup_hook_unknown', hook=hook)) else: hooks_filtered.add(hook) if not hooks or hooks_filtered: logger.info(m18n.n('backup_running_hooks')) ret = hook_callback('backup', hooks_filtered, args=[tmp_dir]) if ret['succeed']: info['hooks'] = ret['succeed'] # Save relevant restoration hooks tmp_hooks_dir = tmp_dir + '/hooks/restore' filesystem.mkdir(tmp_hooks_dir, 0750, True, uid='admin') for h in ret['succeed'].keys(): try: i = hook_info('restore', h) except: logger.warning(m18n.n('restore_hook_unavailable', hook=h), exc_info=1) else: for f in i['hooks']: shutil.copy(f['path'], tmp_hooks_dir) # Backup apps if not ignore_apps: # Filter applications to backup apps_list = set(os.listdir('/etc/yunohost/apps')) apps_filtered = set() if apps: for a in apps: if a not in apps_list: logger.warning(m18n.n('unbackup_app', app=a)) else: apps_filtered.add(a) else: apps_filtered = apps_list # Run apps backup scripts tmp_script = '/tmp/backup_' + str(timestamp) for app_instance_name in apps_filtered: app_setting_path = '/etc/yunohost/apps/' + app_instance_name # Check if the app has a backup and restore script app_script = app_setting_path + '/scripts/backup' app_restore_script = app_setting_path + '/scripts/restore' if not os.path.isfile(app_script): logger.warning(m18n.n('unbackup_app', app=app_instance_name)) continue elif not os.path.isfile(app_restore_script): logger.warning(m18n.n('unrestore_app', app=app_instance_name)) tmp_app_dir = '{:s}/apps/{:s}'.format(tmp_dir, app_instance_name) tmp_app_bkp_dir = tmp_app_dir + '/backup' logger.info(m18n.n('backup_running_app_script', app=app_instance_name)) try: # Prepare backup directory for the app filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin') shutil.copytree(app_setting_path, tmp_app_dir + '/settings') # Copy app backup script in a temporary folder and execute it subprocess.call(['install', '-Dm555', app_script, tmp_script]) # Prepare env. var. to pass to script env_dict = {} app_id, app_instance_nb = _parse_app_instance_name(app_instance_name) env_dict["YNH_APP_ID"] = app_id env_dict["YNH_APP_INSTANCE_NAME"] = app_instance_name env_dict["YNH_APP_INSTANCE_NUMBER"] = str(app_instance_nb) env_dict["YNH_APP_BACKUP_DIR"] = tmp_app_bkp_dir hook_exec(tmp_script, args=[tmp_app_bkp_dir, app_instance_name], raise_on_error=True, chdir=tmp_app_bkp_dir, env=env_dict) except: logger.exception(m18n.n('backup_app_failed', app=app_instance_name)) # Cleaning app backup directory shutil.rmtree(tmp_app_dir, ignore_errors=True) else: # Add app info i = app_info(app_instance_name) info['apps'][app_instance_name] = { 'version': i['version'], 'name': i['name'], 'description': i['description'], } finally: filesystem.rm(tmp_script, force=True) # Check if something has been saved if not info['hooks'] and not info['apps']: _clean_tmp_dir(1) raise MoulinetteError(errno.EINVAL, m18n.n('backup_nothings_done')) # Calculate total size size = subprocess.check_output( ['du','-sb', tmp_dir]).split()[0].decode('utf-8') info['size'] = int(size) # Create backup info file with open("%s/info.json" % tmp_dir, 'w') as f: f.write(json.dumps(info)) # Create the archive if not no_compress: logger.info(m18n.n('backup_creating_archive')) archive_file = "%s/%s.tar.gz" % (output_directory, name) try: tar = tarfile.open(archive_file, "w:gz") except: tar = None # Create the archives directory and retry if not os.path.isdir(archives_path): os.mkdir(archives_path, 0750) try: tar = tarfile.open(archive_file, "w:gz") except: logger.debug("unable to open '%s' for writing", archive_file, exc_info=1) tar = None else: logger.debug("unable to open '%s' for writing", archive_file, exc_info=1) if tar is None: _clean_tmp_dir(2) raise MoulinetteError(errno.EIO, m18n.n('backup_archive_open_failed')) tar.add(tmp_dir, arcname='') tar.close() # Move info file os.rename(tmp_dir + '/info.json', '{:s}/{:s}.info.json'.format(archives_path, name)) # Clean temporary directory if tmp_dir != output_directory: _clean_tmp_dir() logger.success(m18n.n('backup_complete')) # Return backup info info['name'] = name return { 'archive': info }
def tools_update(ignore_apps=False, ignore_packages=False): """ Update apps & package cache, then display changelog Keyword arguments: ignore_apps -- Ignore app list update and changelog ignore_packages -- Ignore apt cache update and changelog """ from yunohost.app import app_fetchlist, app_info packages = [] if not ignore_packages: cache = apt.Cache() # Update APT cache msignals.display(m18n.n('updating_apt_cache')) if not cache.update(): raise MoulinetteError(errno.EPERM, m18n.n('update_cache_failed')) msignals.display(m18n.n('done')) cache.open(None) cache.upgrade(True) # Add changelogs to the result for pkg in cache.get_changes(): packages.append({ 'name': pkg.name, 'fullname': pkg.fullname, 'changelog': pkg.get_changelog() }) apps = [] if not ignore_apps: app_fetchlist() app_list = os.listdir(apps_setting_path) if len(app_list) > 0: for app_id in app_list: if '__' in app_id: original_app_id = app_id[:app_id.index('__')] else: original_app_id = app_id current_app_dict = app_info(app_id, raw=True) new_app_dict = app_info(original_app_id, raw=True) # Custom app if 'lastUpdate' not in new_app_dict or 'git' not in new_app_dict: continue if (new_app_dict['lastUpdate'] > current_app_dict['lastUpdate']) \ or ('update_time' not in current_app_dict['settings'] \ and (new_app_dict['lastUpdate'] > current_app_dict['settings']['install_time'])) \ or ('update_time' in current_app_dict['settings'] \ and (new_app_dict['lastUpdate'] > current_app_dict['settings']['update_time'])): apps.append({ 'id': app_id, 'label': current_app_dict['settings']['label'] }) if len(apps) == 0 and len(packages) == 0: msignals.display(m18n.n('packages_no_upgrade')) return { 'packages': packages, 'apps': apps }
def backup_create(name=None, description=None, output_directory=None, no_compress=False, ignore_apps=False): """ Create a backup local archive Keyword arguments: name -- Name of the backup archive description -- Short description of the backup output_directory -- Output directory for the backup no_compress -- Do not create an archive file ignore_apps -- Do not backup apps """ # TODO: Add a 'clean' argument to clean output directory from yunohost.hook import hook_add from yunohost.hook import hook_callback tmp_dir = None # Validate and define backup name timestamp = int(time.time()) if not name: name = str(timestamp) if name in backup_list()['archives']: raise MoulinetteError(errno.EINVAL, m18n.n('backup_archive_name_exists')) # Validate additional arguments if no_compress and not output_directory: raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_required')) if output_directory: output_directory = os.path.abspath(output_directory) # Check for forbidden folders if output_directory.startswith(archives_path) or \ re.match(r'^/(|(bin|boot|dev|etc|lib|root|run|sbin|sys|usr|var)(|/.*))$', output_directory): logger.error("forbidden output directory '%'", output_directory) raise MoulinetteError(errno.EINVAL, m18n.n('backup_output_directory_forbidden')) # Create the output directory if not os.path.isdir(output_directory): logger.info("creating output directory '%s'", output_directory) os.makedirs(output_directory, 0750) # Check that output directory is empty elif no_compress and os.listdir(output_directory): logger.error("not empty output directory '%'", output_directory) raise MoulinetteError(errno.EIO, m18n.n('backup_output_directory_not_empty')) # Define temporary directory if no_compress: tmp_dir = output_directory else: output_directory = archives_path # Create temporary directory if not tmp_dir: tmp_dir = "%s/tmp/%s" % (backup_path, name) if os.path.isdir(tmp_dir): logger.warning("temporary directory for backup '%s' already exists", tmp_dir) os.system('rm -rf %s' % tmp_dir) try: os.mkdir(tmp_dir, 0750) except OSError: # Create temporary directory recursively os.makedirs(tmp_dir, 0750) os.system('chown -hR admin: %s' % backup_path) else: os.system('chown -hR admin: %s' % tmp_dir) # Initialize backup info info = { 'description': description or '', 'created_at': timestamp, 'apps': {}, } # Add apps backup hook if not ignore_apps: from yunohost.app import app_info try: for app_id in os.listdir('/etc/yunohost/apps'): hook = '/etc/yunohost/apps/%s/scripts/backup' % app_id if os.path.isfile(hook): hook_add(app_id, hook) # Add app info i = app_info(app_id) info['apps'][app_id] = { 'version': i['version'], } else: logger.warning("unable to find app's backup hook '%s'", hook) msignals.display(m18n.n('unbackup_app', app_id), 'warning') except IOError as e: logger.info("unable to add apps backup hook: %s", str(e)) # Run hooks msignals.display(m18n.n('backup_running_hooks')) hook_callback('backup', [tmp_dir]) # Create backup info file with open("%s/info.json" % tmp_dir, 'w') as f: f.write(json.dumps(info)) # Create the archive if not no_compress: msignals.display(m18n.n('backup_creating_archive')) archive_file = "%s/%s.tar.gz" % (output_directory, name) try: tar = tarfile.open(archive_file, "w:gz") except: tar = None # Create the archives directory and retry if not os.path.isdir(archives_path): os.mkdir(archives_path, 0750) try: tar = tarfile.open(archive_file, "w:gz") except: logger.exception("unable to open the archive '%s' for writing " "after creating directory '%s'", archive_file, archives_path) tar = None else: logger.exception("unable to open the archive '%s' for writing", archive_file) if tar is None: raise MoulinetteError(errno.EIO, m18n.n('backup_archive_open_failed')) tar.add(tmp_dir, arcname='') tar.close() # Copy info file os.system('mv %s/info.json %s/%s.info.json' % (tmp_dir, archives_path, name)) # Clean temporary directory if tmp_dir != output_directory: os.system('rm -rf %s' % tmp_dir) msignals.display(m18n.n('backup_complete'), 'success')
def domain_remove(operation_logger, domain, force=False): """ Delete domains Keyword argument: domain -- Domain to delete force -- Force the domain removal """ from yunohost.hook import hook_callback from yunohost.app import app_ssowatconf, app_info from yunohost.utils.ldap import _get_ldap_interface if not force and domain not in domain_list()['domains']: raise YunohostError('domain_name_unknown', domain=domain) # Check domain is not the main domain if domain == _get_maindomain(): other_domains = domain_list()["domains"] other_domains.remove(domain) if other_domains: raise YunohostError('domain_cannot_remove_main', domain=domain, other_domains="\n * " + ("\n * ".join(other_domains))) else: raise YunohostError('domain_cannot_remove_main_add_new_one', domain=domain) # Check if apps are installed on the domain apps_on_that_domain = [] for app in _installed_apps(): settings = _get_app_settings(app) label = app_info(app)["name"] if settings.get("domain") == domain: apps_on_that_domain.append(" - %s \"%s\" on https://%s%s" % (app, label, domain, settings["path"]) if "path" in settings else app) if apps_on_that_domain: raise YunohostError('domain_uninstall_app_first', apps="\n".join(apps_on_that_domain)) operation_logger.start() ldap = _get_ldap_interface() try: ldap.remove('virtualdomain=' + domain + ',ou=domains') except Exception as e: raise YunohostError('domain_deletion_failed', domain=domain, error=e) os.system('rm -rf /etc/yunohost/certs/%s' % domain) # Sometime we have weird issues with the regenconf where some files # appears as manually modified even though they weren't touched ... # There are a few ideas why this happens (like backup/restore nginx # conf ... which we shouldnt do ...). This in turns creates funky # situation where the regenconf may refuse to re-create the conf # (when re-creating a domain..) # # So here we force-clear the has out of the regenconf if it exists. # This is a pretty ad hoc solution and only applied to nginx # because it's one of the major service, but in the long term we # should identify the root of this bug... _force_clear_hashes(["/etc/nginx/conf.d/%s.conf" % domain]) # And in addition we even force-delete the file Otherwise, if the file was # manually modified, it may not get removed by the regenconf which leads to # catastrophic consequences of nginx breaking because it can't load the # cert file which disappeared etc.. if os.path.exists("/etc/nginx/conf.d/%s.conf" % domain): _process_regen_conf("/etc/nginx/conf.d/%s.conf" % domain, new_conf=None, save=True) regen_conf(names=['nginx', 'metronome', 'dnsmasq', 'postfix']) app_ssowatconf() hook_callback('post_domain_remove', args=[domain]) logger.success(m18n.n('domain_deleted'))