Beispiel #1
0
    async def load(self):
        is_freenas = await self.middleware.call("system.is_freenas")

        main_sources_dir = os.path.join(get_middlewared_dir(), "alert",
                                        "source")
        sources_dirs = [
            os.path.join(overlay_dir, "alert", "source")
            for overlay_dir in self.middleware.overlay_dirs
        ]
        sources_dirs.insert(0, main_sources_dir)
        for sources_dir in sources_dirs:
            for module in load_modules(sources_dir):
                for cls in load_classes(
                        module, AlertSource,
                    (FilePresenceAlertSource, ThreadedAlertSource)):
                    source = cls(self.middleware)
                    ALERT_SOURCES[source.name] = source

        main_services_dir = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), os.path.pardir,
            "alert", "service")
        services_dirs = [
            os.path.join(overlay_dir, "alert", "service")
            for overlay_dir in self.middleware.overlay_dirs
        ]
        services_dirs.insert(0, main_services_dir)
        for services_dir in services_dirs:
            for module in load_modules(services_dir):
                for cls in load_classes(
                        module, _AlertService,
                    (ThreadedAlertService, ProThreadedAlertService)):
                    ALERT_SERVICES_FACTORIES[cls.name()] = cls
Beispiel #2
0
    async def load(self):
        main_sources_dir = os.path.join(get_middlewared_dir(), "alert",
                                        "source")
        sources_dirs = [
            os.path.join(overlay_dir, "alert", "source")
            for overlay_dir in self.middleware.overlay_dirs
        ]
        sources_dirs.insert(0, main_sources_dir)
        for sources_dir in sources_dirs:
            for module in load_modules(sources_dir):
                for cls in load_classes(
                        module, AlertSource,
                    (FilePresenceAlertSource, ThreadedAlertSource)):
                    source = cls(self.middleware)
                    if source.name in ALERT_SOURCES:
                        raise RuntimeError(
                            f"Alert source {source.name} is already registered"
                        )
                    ALERT_SOURCES[source.name] = source

        main_services_dir = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), os.path.pardir,
            "alert", "service")
        services_dirs = [
            os.path.join(overlay_dir, "alert", "service")
            for overlay_dir in self.middleware.overlay_dirs
        ]
        services_dirs.insert(0, main_services_dir)
        for services_dir in services_dirs:
            for module in load_modules(services_dir):
                for cls in load_classes(
                        module, _AlertService,
                    (ThreadedAlertService, ProThreadedAlertService)):
                    ALERT_SERVICES_FACTORIES[cls.name()] = cls
Beispiel #3
0
def load_migrations(middleware):
    main_sources_dir = os.path.join(get_middlewared_dir(), "migration")
    sources_dirs = [os.path.join(overlay_dir, "migration") for overlay_dir in middleware.overlay_dirs]
    sources_dirs.insert(0, main_sources_dir)

    modules = []
    for sources_dir in sources_dirs:
        modules.extend(load_modules(sources_dir))

    return sorted(modules, key=lambda module: module.__name__)
Beispiel #4
0
        if provider.fast_list:
            schema.append(
                Bool("fast_list",
                     default=False,
                     title="Use --fast-list",
                     description=textwrap.dedent("""\
                Use fewer transactions in exchange for more RAM. This may also speed up or slow down your
                transfer. See [rclone documentation](https://rclone.org/docs/#fast-list) for more details.
            """).rstrip()))

        return schema


remote_classes = []
for module in load_modules(
        os.path.join(get_middlewared_dir(), "rclone", "remote")):
    for cls in load_classes(module, BaseRcloneRemote, []):
        remote_classes.append(cls)
        for method_name in cls.extra_methods:
            setattr(CloudSyncService, f"{cls.name.lower()}_{method_name}",
                    getattr(cls, method_name))


class CloudSyncFSAttachmentDelegate(LockableFSAttachmentDelegate):
    name = 'cloudsync'
    title = 'CloudSync Task'
    service_class = CloudSyncService
    resource_name = 'path'

    async def restart_reload_services(self, attachments):
        await self.middleware.call('service.restart', 'cron')
Beispiel #5
0
    def __upload(self, config_file_name):
        try:
            """
            First we try to open the file as a tar file.
            We expect the tar file to contain at least the freenas-v1.db.
            It can also contain the pwenc_secret file.
            If we cannot open it as a tar, we try to proceed as it was the
            raw database file.
            """
            try:
                with tarfile.open(config_file_name) as tar:
                    bundle = True
                    tmpdir = tempfile.mkdtemp(dir='/var/tmp/firmware')
                    tar.extractall(path=tmpdir)
                    config_file_name = os.path.join(tmpdir, 'freenas-v1.db')
            except tarfile.ReadError:
                bundle = False
            # Currently we compare only the number of migrations for south and django
            # of new and current installed database.
            # This is not bullet proof as we can eventually have more migrations in a stable
            # release compared to a older nightly and still be considered a downgrade, however
            # this is simple enough and works in most cases.
            alembic_version = None
            conn = sqlite3.connect(config_file_name)
            try:
                cur = conn.cursor()
                try:
                    cur.execute("SELECT version_num FROM alembic_version")
                    alembic_version = cur.fetchone()[0]
                except sqlite3.OperationalError as e:
                    if e.args[0] == "no such table: alembic_version":
                        # FN/TN < 12
                        # Let's just ensure it's not a random SQLite file
                        cur.execute("SELECT 1 FROM django_migrations")
                    else:
                        raise
                finally:
                    cur.close()
            finally:
                conn.close()
            if alembic_version is not None:
                for root, dirs, files in os.walk(
                        os.path.join(get_middlewared_dir(), "alembic",
                                     "versions")):
                    found = False
                    for name in files:
                        if name.endswith(".py"):
                            with open(os.path.join(root, name)) as f:
                                if any(line.strip() ==
                                       f"Revision ID: {alembic_version}"
                                       for line in f.read().splitlines()):
                                    found = True
                                    break
                    if found:
                        break
                else:
                    raise CallError(
                        'Failed to upload config, version newer than the '
                        'current installed.')
        except Exception as e:
            os.unlink(config_file_name)
            if isinstance(e, CallError):
                raise
            else:
                raise CallError(f'The uploaded file is not valid: {e}')

        shutil.move(config_file_name, UPLOADED_DB_PATH)
        if bundle:
            for filename, destination in CONFIG_FILES.items():
                file_path = os.path.join(tmpdir, filename)
                if os.path.exists(file_path):
                    if filename == 'geli':
                        # Let's only copy the geli keys and not overwrite the entire directory
                        os.makedirs(CONFIG_FILES['geli'], exist_ok=True)
                        for key_path in os.listdir(file_path):
                            shutil.move(os.path.join(file_path, key_path),
                                        os.path.join(destination, key_path))
                    elif filename == 'pwenc_secret':
                        shutil.move(file_path, '/data/pwenc_secret_uploaded')
                    else:
                        shutil.move(file_path, destination)

        # Now we must run the migrate operation in the case the db is older
        open(NEED_UPDATE_SENTINEL, 'w+').close()

        if osc.IS_LINUX:
            # For SCALE, we have to enable/disable services based on the uploaded database
            enable_disable_units = {'enable': [], 'disable': []}
            conn = sqlite3.connect(UPLOADED_DB_PATH)
            try:
                cursor = conn.cursor()
                for service, enabled in cursor.execute(
                        "SELECT srv_service, srv_enable FROM services_services"
                ).fetchall():
                    try:
                        units = self.middleware.call_sync(
                            'service.systemd_units', service)
                    except KeyError:
                        # An old service which we don't have currently
                        continue

                    if enabled:
                        enable_disable_units['enable'].extend(units)
                    else:
                        enable_disable_units['disable'].extend(units)
            finally:
                conn.close()

            for action in filter(lambda k: enable_disable_units[k],
                                 enable_disable_units):
                cp = subprocess.Popen(['systemctl', action] +
                                      enable_disable_units[action],
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.PIPE)
                err = cp.communicate()[1]
                if cp.returncode:
                    self.middleware.logger.error(
                        'Failed to %s %r systemctl units: %s', action,
                        ', '.join(enable_disable_units[action]), err.decode())
Beispiel #6
0
        sys.path.append('/usr/local/lib')

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Model.metadata
list(load_modules(os.path.join(get_middlewared_dir(), "plugins"), depth=1))
list(load_modules("/usr/local/lib/middlewared_truenas/plugins", depth=1))

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
DATABASE_URL = f"sqlite:///{os.environ.get('FREENAS_DATABASE', FREENAS_DATABASE)}"


@Operations.register_operation("drop_references")
@BatchOperations.register_operation("drop_references", "batch_drop_references")
class DropReferencesOp(ops.MigrateOperation):
    def __init__(
        self,
        field_name,
Beispiel #7
0
    @private
    def common_task_schema(self, provider):
        schema = []

        if provider.fast_list:
            schema.append(Bool("fast_list", default=False, title="Use --fast-list", description=textwrap.dedent("""\
                Use fewer transactions in exchange for more RAM. This may also speed up or slow down your
                transfer. See [rclone documentation](https://rclone.org/docs/#fast-list) for more details.
            """).rstrip()))

        return schema


remote_classes = []
for module in load_modules(os.path.join(get_middlewared_dir(), "rclone", "remote")):
    for cls in load_classes(module, BaseRcloneRemote, []):
        remote_classes.append(cls)
        for method_name in cls.extra_methods:
            setattr(CloudSyncService, f"{cls.name.lower()}_{method_name}", getattr(cls, method_name))


class CloudSyncFSAttachmentDelegate(LockableFSAttachmentDelegate):
    name = 'cloudsync'
    title = 'CloudSync Task'
    service_class = CloudSyncService
    resource_name = 'path'

    async def restart_reload_services(self, attachments):
        await self.middleware.call('service.restart', 'cron')
Beispiel #8
0
    def __upload(self, config_file_name):
        tar_error = None
        try:
            """
            First we try to open the file as a tar file.
            We expect the tar file to contain at least the freenas-v1.db.
            It can also contain the pwenc_secret file.
            If we cannot open it as a tar, we try to proceed as it was the
            raw database file.
            """
            try:
                with tarfile.open(config_file_name) as tar:
                    bundle = True
                    tmpdir = tempfile.mkdtemp(dir='/var/tmp/firmware')
                    tar.extractall(path=tmpdir)
                    config_file_name = os.path.join(tmpdir, 'freenas-v1.db')
            except tarfile.ReadError as e:
                tar_error = str(e)
                bundle = False
            # Currently we compare only the number of migrations for south and django
            # of new and current installed database.
            # This is not bullet proof as we can eventually have more migrations in a stable
            # release compared to a older nightly and still be considered a downgrade, however
            # this is simple enough and works in most cases.
            alembic_version = None
            conn = sqlite3.connect(config_file_name)
            try:
                cur = conn.cursor()
                try:
                    cur.execute("SELECT version_num FROM alembic_version")
                    alembic_version = cur.fetchone()[0]
                except sqlite3.OperationalError as e:
                    if e.args[0] == "no such table: alembic_version":
                        # FN/TN < 12
                        # Let's just ensure it's not a random SQLite file
                        cur.execute("SELECT 1 FROM django_migrations")
                    else:
                        raise
                finally:
                    cur.close()
            except sqlite3.OperationalError as e:
                if tar_error:
                    raise CallError(
                        f"Uploaded file is neither a valid .tar file ({tar_error}) nor valid FreeNAS/TrueNAS database "
                        f"file ({e}).")
                else:
                    raise CallError(
                        f"Uploaded file is not a valid FreeNAS/TrueNAS database file ({e})."
                    )
            finally:
                conn.close()
            if alembic_version is not None:
                for root, dirs, files in os.walk(
                        os.path.join(get_middlewared_dir(), "alembic",
                                     "versions")):
                    found = False
                    for name in files:
                        if name.endswith(".py"):
                            with open(os.path.join(root, name)) as f:
                                if any(line.strip() ==
                                       f"Revision ID: {alembic_version}"
                                       for line in f.read().splitlines()):
                                    found = True
                                    break
                    if found:
                        break
                else:
                    raise CallError(
                        'Failed to upload config, version newer than the '
                        'current installed.')
        except Exception as e:
            os.unlink(config_file_name)
            if isinstance(e, CallError):
                raise
            else:
                raise CallError(f'The uploaded file is not valid: {e}')

        upload = []

        def move(src, dst):
            shutil.move(src, dst)
            upload.append(dst)

        move(config_file_name, UPLOADED_DB_PATH)
        if bundle:
            for filename, destination in CONFIG_FILES.items():
                file_path = os.path.join(tmpdir, filename)
                if os.path.exists(file_path):
                    if filename == 'geli':
                        # Let's only copy the geli keys and not overwrite the entire directory
                        os.makedirs(CONFIG_FILES['geli'], exist_ok=True)
                        for key_path in os.listdir(file_path):
                            move(os.path.join(file_path, key_path),
                                 os.path.join(destination, key_path))
                    elif filename == 'pwenc_secret':
                        move(file_path, '/data/pwenc_secret_uploaded')
                    else:
                        move(file_path, destination)

        # Now we must run the migrate operation in the case the db is older
        open(NEED_UPDATE_SENTINEL, 'w+').close()
        upload.append(NEED_UPDATE_SENTINEL)

        self.middleware.call_hook_sync('config.on_upload', UPLOADED_DB_PATH)

        if self.middleware.call_sync('failover.licensed'):
            try:
                for path in upload:
                    self.middleware.call_sync('failover.send_small_file', path)

                self.middleware.call_sync(
                    'failover.call_remote',
                    'core.call_hook',
                    ['config.on_upload', [UPLOADED_DB_PATH]],
                )

                self.middleware.run_coroutine(
                    self.middleware.call('failover.call_remote',
                                         'system.reboot'),
                    wait=False,
                )
            except Exception as e:
                raise CallError(
                    f'Config uploaded successfully, but remote node responded with error: {e}. '
                    f'Please use Sync to Peer on the System/Failover page to perform a manual sync after reboot.',
                    CallError.EREMOTENODEERROR,
                )