def _init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        db.init()

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)

        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        if CONF.firewall.manage_firewall:
            firewall.init()

        periodic_update_ = periodics.periodic(
            spacing=CONF.firewall.firewall_update_period,
            enabled=CONF.firewall.manage_firewall)(periodic_update)
        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period)(periodic_clean_up)

        self._periodics_worker = periodics.PeriodicWorker(
            callables=[(periodic_update_, None, None),
                       (periodic_clean_up_, None, None)],
            executor_factory=periodics.ExistingExecutor(utils.executor()))
        utils.executor().submit(self._periodics_worker.start)
    def _init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        db.init()

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)

        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        driver = pxe_filter.driver()
        driver.init_filter()

        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period)(periodic_clean_up)

        self._periodics_worker = periodics.PeriodicWorker(
            callables=[(driver.get_periodic_sync_task(), None, None),
                       (periodic_clean_up_, None, None)],
            executor_factory=periodics.ExistingExecutor(utils.executor()),
            on_failure=self._periodics_watchdog)
        utils.executor().submit(self._periodics_worker.start)
Exemple #3
0
    def init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        if CONF.processing.store_data == 'none':
            LOG.warning('Introspection data will not be stored. Change '
                        '"[processing] store_data" option if this is not '
                        'the desired behavior')
        else:
            LOG.info('Introspection data will be stored in the %s backend',
                     CONF.processing.store_data)

        db.init()

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)
        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        driver = pxe_filter.driver()
        driver.init_filter()

        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period)(periodic_clean_up)

        self._periodics_worker = periodics.PeriodicWorker(
            callables=[(driver.get_periodic_sync_task(), None, None),
                       (periodic_clean_up_, None, None)],
            executor_factory=periodics.ExistingExecutor(utils.executor()),
            on_failure=self._periodics_watchdog)
        utils.executor().submit(self._periodics_worker.start)

        if CONF.enable_mdns:
            endpoint = keystone.get_endpoint('service_catalog')
            self._zeroconf = mdns.Zeroconf()
            self._zeroconf.register_service('baremetal-introspection',
                                            endpoint)

        if not CONF.standalone:
            try:
                coordinator = coordination.get_coordinator(prefix='conductor')
                coordinator.start(heartbeat=True)
                coordinator.join_group()
            except tooz.ToozError:
                with excutils.save_and_reraise_exception():
                    LOG.critical('Failed when connecting to coordination '
                                 'backend.')
                    self.del_host()
            else:
                LOG.info('Successfully connected to coordination backend.')
Exemple #4
0
    def test_ok(self, mock_mgr):
        mock_mgr.return_value = [
            fake_ext(name='1', obj=mock.Mock(dependencies=[])),
            fake_ext(name='2', obj=mock.Mock(dependencies=['1'])),
            fake_ext(name='3', obj=mock.Mock(dependencies=['2', '1'])),
        ]

        hooks = base.validate_processing_hooks()
        self.assertEqual(mock_mgr.return_value, hooks)
        mock_mgr.assert_called_once_with()
    def test_ok(self, mock_mgr):
        mock_mgr.return_value = [
            fake_ext(name='1', obj=mock.Mock(dependencies=[])),
            fake_ext(name='2', obj=mock.Mock(dependencies=['1'])),
            fake_ext(name='3', obj=mock.Mock(dependencies=['2', '1'])),
        ]

        hooks = base.validate_processing_hooks()
        self.assertEqual(mock_mgr.return_value, hooks)
        mock_mgr.assert_called_once_with()
    def init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        if CONF.processing.store_data == 'none':
            LOG.warning('Introspection data will not be stored. Change '
                        '"[processing] store_data" option if this is not '
                        'the desired behavior')
        elif CONF.processing.store_data == 'swift':
            LOG.info(
                'Introspection data will be stored in Swift in the '
                'container %s', CONF.swift.container)

        db.init()

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)
        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        driver = pxe_filter.driver()
        driver.init_filter()

        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period)(periodic_clean_up)

        self._periodics_worker = periodics.PeriodicWorker(
            callables=[(driver.get_periodic_sync_task(), None, None),
                       (periodic_clean_up_, None, None)],
            executor_factory=periodics.ExistingExecutor(utils.executor()),
            on_failure=self._periodics_watchdog)
        utils.executor().submit(self._periodics_worker.start)

        if CONF.enable_mdns:
            endpoint = keystone.get_endpoint('service_catalog')
            self._zeroconf = mdns.Zeroconf()
            self._zeroconf.register_service('baremetal-introspection',
                                            endpoint)
    def init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        if CONF.processing.store_data == 'none':
            LOG.warning('Introspection data will not be stored. Change '
                        '"[processing] store_data" option if this is not '
                        'the desired behavior')
        elif CONF.processing.store_data == 'swift':
            LOG.info('Introspection data will be stored in Swift in the '
                     'container %s', CONF.swift.container)

        db.init()

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)
        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        driver = pxe_filter.driver()
        driver.init_filter()

        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period
        )(periodic_clean_up)

        self._periodics_worker = periodics.PeriodicWorker(
            callables=[(driver.get_periodic_sync_task(), None, None),
                       (periodic_clean_up_, None, None)],
            executor_factory=periodics.ExistingExecutor(utils.executor()),
            on_failure=self._periodics_watchdog)
        utils.executor().submit(self._periodics_worker.start)
Exemple #8
0
    def init_host(self):
        """Initialize Worker host

        Init db connection, load and validate processing
        hooks, runs periodic tasks.

        :returns None
        """
        if CONF.processing.store_data == 'none':
            LOG.warning('Introspection data will not be stored. Change '
                        '"[processing] store_data" option if this is not '
                        'the desired behavior')
        else:
            LOG.info('Introspection data will be stored in the %s backend',
                     CONF.processing.store_data)

        db.init()

        self.coordinator = None
        try:
            self.coordinator = coordination.get_coordinator(prefix='conductor')
            self.coordinator.start(heartbeat=True)
            self.coordinator.join_group()
        except Exception as exc:
            if CONF.standalone:
                LOG.info(
                    'Coordination backend cannot be started, assuming '
                    'no other instances are running. Error: %s', exc)
                self.coordinator = None
            else:
                with excutils.save_and_reraise_exception():
                    LOG.critical(
                        'Failure when connecting to coordination '
                        'backend',
                        exc_info=True)
                    self.del_host()
        else:
            LOG.info('Successfully connected to coordination backend.')

        try:
            hooks = plugins_base.validate_processing_hooks()
        except Exception as exc:
            LOG.critical(str(exc))
            sys.exit(1)
        LOG.info('Enabled processing hooks: %s', [h.name for h in hooks])

        driver = pxe_filter.driver()
        driver.init_filter()

        periodic_clean_up_ = periodics.periodic(
            spacing=CONF.clean_up_period,
            enabled=(CONF.clean_up_period != 0))(periodic_clean_up)

        sync_with_ironic_ = periodics.periodic(
            spacing=CONF.clean_up_period,
            enabled=(CONF.clean_up_period != 0))(sync_with_ironic)

        callables = [(periodic_clean_up_, None, None),
                     (sync_with_ironic_, (self, ), None)]

        driver_task = driver.get_periodic_sync_task()
        if driver_task is not None:
            callables.append((driver_task, None, None))

        # run elections periodically if we have a coordinator
        # that we were able to start
        if (self.coordinator and self.coordinator.started):
            periodic_leader_election_ = periodics.periodic(
                spacing=CONF.leader_election_interval)(
                    periodic_leader_election)
            callables.append((periodic_leader_election_, (self, ), None))

        self._periodics_worker = periodics.PeriodicWorker(
            callables=callables,
            executor_factory=periodics.ExistingExecutor(utils.executor()),
            on_failure=self._periodics_watchdog)

        utils.executor().submit(self._periodics_worker.start)

        if CONF.enable_mdns:
            endpoint = keystone.get_endpoint('service_catalog')
            self._zeroconf = mdns.Zeroconf()
            self._zeroconf.register_service('baremetal-introspection',
                                            endpoint)