async def initialize(self): if not await self.middleware.call("system.is_freenas"): if await self.middleware.call("notifier.failover_node") == "B": self.node = "B" for alert in await self.middleware.call("datastore.query", "system.alert"): del alert["id"] alert["level"] = AlertLevel(alert["level"]) alert = Alert(**alert) self.alerts[alert.node][alert.source][alert.key] = alert for policy in self.policies.values(): policy.receive_alerts(datetime.utcnow(), self.alerts) main_sources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "source") sources_dirs = [os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes(module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource, OneShotAlertSource)): source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes(module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls
async def load(self): is_freenas = await self.middleware.call("system.is_freenas") main_sources_dir = os.path.join(get_middlewared_dir(), "alert", "source") sources_dirs = [ os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs ] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes( module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource)): source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [ os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs ] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes( module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls
async def initialize(self): if not await self.middleware.call("system.is_freenas"): if await self.middleware.call("notifier.failover_node") == "B": self.node = "B" for alert in await self.middleware.call("datastore.query", "system.alert"): del alert["id"] alert["level"] = AlertLevel(alert["level"]) alert = Alert(**alert) self.alerts[alert.node][alert.source][alert.key] = alert for policy in self.policies.values(): policy.receive_alerts(datetime.utcnow(), self.alerts) main_sources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "source") sources_dirs = [os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes(module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource)): source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes(module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls
async def initialize(self): if not await self.middleware.call("system.is_freenas"): if await self.middleware.call("failover.node") == "B": self.node = "B" main_sources_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "source") sources_dirs = [ os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs ] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes( module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource)): source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [ os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs ] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes( module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls for alert in await self.middleware.call("datastore.query", "system.alert"): del alert["id"] try: alert["klass"] = AlertClass.class_by_name[alert["klass"]] except KeyError: self.logger.info("Alert class %r is no longer present", alert["klass"]) continue alert["_uuid"] = alert.pop("uuid") alert["_source"] = alert.pop("source") alert["_key"] = alert.pop("key") alert["_text"] = alert.pop("text") alert = Alert(**alert) self.alerts.append(alert) for policy in self.policies.values(): policy.receive_alerts(datetime.utcnow(), self.alerts)
async def initialize(self): is_freenas = await self.middleware.call("system.is_freenas") self.node = "A" if not is_freenas: if await self.middleware.call("failover.node") == "B": self.node = "B" main_sources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "source") sources_dirs = [os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes(module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource)): if not is_freenas and cls.freenas_only: continue source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes(module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls self.alerts = [] for alert in await self.middleware.call("datastore.query", "system.alert"): del alert["id"] try: alert["klass"] = AlertClass.class_by_name[alert["klass"]] except KeyError: self.logger.info("Alert class %r is no longer present", alert["klass"]) continue alert["_uuid"] = alert.pop("uuid") alert["_source"] = alert.pop("source") alert["_key"] = alert.pop("key") alert["_text"] = alert.pop("text") alert = Alert(**alert) self.alerts.append(alert) self.alert_source_last_run = defaultdict(lambda: datetime.min) self.policies = { "IMMEDIATELY": AlertPolicy(), "HOURLY": AlertPolicy(lambda d: (d.date(), d.hour)), "DAILY": AlertPolicy(lambda d: (d.date())), "NEVER": AlertPolicy(lambda d: None), } for policy in self.policies.values(): policy.receive_alerts(datetime.utcnow(), self.alerts)
async def setup(middleware): for module in load_modules( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "rclone", "remote")): for cls in load_classes(module, BaseRcloneRemote, []): remote = cls(middleware) REMOTES[remote.name] = remote
def load_migrations(middleware): main_sources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "migration") sources_dirs = [os.path.join(overlay_dir, "migration") for overlay_dir in middleware.overlay_dirs] sources_dirs.insert(0, main_sources_dir) modules = [] for sources_dir in sources_dirs: modules.extend(load_modules(sources_dir)) return sorted(modules, key=lambda module: module.__name__)
def load_migrations(middleware): main_sources_dir = os.path.join(get_middlewared_dir(), "migration") sources_dirs = [ os.path.join(overlay_dir, "migration") for overlay_dir in middleware.overlay_dirs ] sources_dirs.insert(0, main_sources_dir) modules = [] for sources_dir in sources_dirs: modules.extend(load_modules(sources_dir)) return sorted(modules, key=lambda module: module.__name__)
async def setup(middleware): for module in load_modules(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "rclone", "remote")): for cls in load_classes(module, BaseRcloneRemote, []): remote = cls(middleware) REMOTES[remote.name] = remote
for provider in REMOTES.values() ], key=lambda provider: provider["title"].lower() ) def common_task_schema(self, provider): schema = [] if provider.fast_list: schema.append(Bool("fast_list", default=False, title="Use --fast-list", description=textwrap.dedent("""\ Use fewer transactions in exchange for more RAM. This may also speed up or slow down your transfer. See [rclone documentation](https://rclone.org/docs/#fast-list) for more details. """).rstrip())) return schema remote_classes = [] for module in load_modules(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "rclone", "remote")): for cls in load_classes(module, BaseRcloneRemote, []): remote_classes.append(cls) for method_name in cls.extra_methods: setattr(CloudSyncService, f"{cls.name.lower()}_{method_name}", getattr(cls, method_name)) async def setup(middleware): for cls in remote_classes: remote = cls(middleware) REMOTES[remote.name] = remote
# this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = Model.metadata list( load_modules(os.path.join(os.path.dirname(middlewared.__file__), "plugins"))) list(load_modules("/usr/local/lib/middlewared_truenas/plugins")) # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. DATABASE_URL = f"sqlite:///{os.environ.get('FREENAS_DATABASE', FREENAS_DATABASE)}" @Operations.register_operation("drop_references") @BatchOperations.register_operation("drop_references", "batch_drop_references") class DropReferencesOp(ops.MigrateOperation): def __init__( self, field_name,
async def initialize(self, load=True): is_freenas = await self.middleware.call("system.is_freenas") self.node = "A" if not is_freenas: if await self.middleware.call("failover.node") == "B": self.node = "B" main_sources_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "source") sources_dirs = [ os.path.join(overlay_dir, "alert", "source") for overlay_dir in self.middleware.overlay_dirs ] sources_dirs.insert(0, main_sources_dir) for sources_dir in sources_dirs: for module in load_modules(sources_dir): for cls in load_classes( module, AlertSource, (FilePresenceAlertSource, ThreadedAlertSource)): if not is_freenas and cls.freenas_only: continue source = cls(self.middleware) ALERT_SOURCES[source.name] = source main_services_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.pardir, "alert", "service") services_dirs = [ os.path.join(overlay_dir, "alert", "service") for overlay_dir in self.middleware.overlay_dirs ] services_dirs.insert(0, main_services_dir) for services_dir in services_dirs: for module in load_modules(services_dir): for cls in load_classes( module, _AlertService, (ThreadedAlertService, ProThreadedAlertService)): ALERT_SERVICES_FACTORIES[cls.name()] = cls self.alerts = [] if load: for alert in await self.middleware.call("datastore.query", "system.alert"): del alert["id"] try: alert["klass"] = AlertClass.class_by_name[alert["klass"]] except KeyError: self.logger.info("Alert class %r is no longer present", alert["klass"]) continue alert["_uuid"] = alert.pop("uuid") alert["_source"] = alert.pop("source") alert["_key"] = alert.pop("key") alert["_text"] = alert.pop("text") alert = Alert(**alert) self.alerts.append(alert) self.alert_source_last_run = defaultdict(lambda: datetime.min) self.policies = { "IMMEDIATELY": AlertPolicy(), "HOURLY": AlertPolicy(lambda d: (d.date(), d.hour)), "DAILY": AlertPolicy(lambda d: (d.date())), "NEVER": AlertPolicy(lambda d: None), } for policy in self.policies.values(): policy.receive_alerts(datetime.utcnow(), self.alerts)
if provider.fast_list: schema.append( Bool("fast_list", default=False, title="Use --fast-list", description=textwrap.dedent("""\ Use fewer transactions in exchange for more RAM. This may also speed up or slow down your transfer. See [rclone documentation](https://rclone.org/docs/#fast-list) for more details. """).rstrip())) return schema remote_classes = [] for module in load_modules( os.path.join(get_middlewared_dir(), "rclone", "remote")): for cls in load_classes(module, BaseRcloneRemote, []): remote_classes.append(cls) for method_name in cls.extra_methods: setattr(CloudSyncService, f"{cls.name.lower()}_{method_name}", getattr(cls, method_name)) class CloudSyncFSAttachmentDelegate(LockableFSAttachmentDelegate): name = 'cloudsync' title = 'CloudSync Task' service_class = CloudSyncService resource_name = 'path' async def restart_reload_services(self, attachments): await self.middleware.call('service.restart', 'cron')
sys.path.append('/usr/local/lib') # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = Model.metadata list(load_modules(os.path.join(get_middlewared_dir(), "plugins"), depth=1)) list(load_modules("/usr/local/lib/middlewared_truenas/plugins", depth=1)) # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. DATABASE_URL = f"sqlite:///{os.environ.get('FREENAS_DATABASE', FREENAS_DATABASE)}" @Operations.register_operation("drop_references") @BatchOperations.register_operation("drop_references", "batch_drop_references") class DropReferencesOp(ops.MigrateOperation): def __init__( self, field_name,