def process(introspection_data): """Process data from the ramdisk. This function heavily relies on the hooks to do the actual data processing. """ hooks = plugins_base.processing_hooks_manager() failures = [] for hook_ext in hooks: # NOTE(dtantsur): catch exceptions, so that we have changes to update # node introspection status after look up try: hook_ext.obj.before_processing(introspection_data) except utils.Error as exc: LOG.error(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}) failures.append('Preprocessing hook %(hook)s: %(error)s' % {'hook': hook_ext.name, 'error': exc}) except Exception as exc: LOG.exception(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}) failures.append(_('Unexpected exception during preprocessing ' 'in hook %s') % hook_ext.name) node_info = _find_node_info(introspection_data, failures) if failures and node_info: msg = _('The following failures happened during running ' 'pre-processing hooks for node %(uuid)s:\n%(failures)s') % { 'uuid': node_info.uuid, 'failures': '\n'.join(failures) } node_info.finished(error=_('Data pre-processing failed')) raise utils.Error(msg) elif not node_info: msg = _('The following failures happened during running ' 'pre-processing hooks for unknown node:\n%(failures)s') % { 'failures': '\n'.join(failures) } raise utils.Error(msg) try: node = node_info.node() except exceptions.NotFound: msg = (_('Node UUID %s was found in cache, but is not found in Ironic') % node_info.uuid) node_info.finished(error=msg) raise utils.Error(msg, code=404) try: return _process_node(node, introspection_data, node_info) except utils.Error as exc: node_info.finished(error=str(exc)) raise except Exception as exc: msg = _('Unexpected exception during processing') LOG.exception(msg) node_info.finished(error=msg) raise utils.Error(msg)
def test_loadable_by_name(self): names = ('raid_device', 'root_device_hint') base.CONF.set_override('processing_hooks', ','.join(names), 'processing') for name in names: ext = base.processing_hooks_manager()[name] self.assertIsInstance(ext.obj, raid_device.RaidDeviceDetection)
def _run_post_hooks(node_info, introspection_data): hooks = plugins_base.processing_hooks_manager() for hook_ext in hooks: LOG.debug('Running post-processing hook %s', hook_ext.name, node_info=node_info, data=introspection_data) hook_ext.obj.before_update(introspection_data, node_info)
def _run_pre_hooks(introspection_data, failures): hooks = plugins_base.processing_hooks_manager() for hook_ext in hooks: LOG.debug('Running pre-processing hook %s', hook_ext.name, data=introspection_data) # NOTE(dtantsur): catch exceptions, so that we have changes to update # node introspection status after look up try: hook_ext.obj.before_processing(introspection_data) except utils.Error as exc: LOG.error(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append('Preprocessing hook %(hook)s: %(error)s' % {'hook': hook_ext.name, 'error': exc}) except Exception as exc: LOG.exception(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append(_('Unexpected exception %(exc_class)s during ' 'preprocessing in hook %(hook)s: %(error)s') % {'hook': hook_ext.name, 'exc_class': exc.__class__.__name__, 'error': exc})
def test_hook(self, mock_post, mock_pre): CONF.set_override('processing_hooks', 'example', 'processing') mgr = plugins_base.processing_hooks_manager() mgr.map_method('before_processing', 'introspection_data') mock_pre.assert_called_once_with(mock.ANY, 'introspection_data') mgr.map_method('before_update', 'node_info', {}) mock_post.assert_called_once_with(mock.ANY, 'node_info', {})
def test_hook_unexpected_exceptions(self, cli, pop_mock, process_mock): for ext in plugins_base.processing_hooks_manager(): patcher = mock.patch.object(ext.obj, 'before_processing', side_effect=RuntimeError('boom')) patcher.start() self.addCleanup(lambda p=patcher: p.stop()) self.assertRaisesRegexp(utils.Error, 'Unexpected exception', process.process, self.data) pop_mock.return_value.finished.assert_called_once_with( error='Data pre-processing failed')
def test_hook_unexpected_exceptions_no_node(self): # Check that error from hooks is raised, not "not found" self.find_mock.side_effect = utils.Error('not found') for ext in plugins_base.processing_hooks_manager(): patcher = mock.patch.object(ext.obj, 'before_processing', side_effect=RuntimeError('boom')) patcher.start() self.addCleanup(lambda p=patcher: p.stop()) self.assertRaisesRegex(utils.Error, 'Unexpected exception', process.process, self.data) self.assertFalse(self.node_info.finished.called)
def test_hook_unexpected_exceptions(self): for ext in plugins_base.processing_hooks_manager(): patcher = mock.patch.object(ext.obj, 'before_processing', side_effect=RuntimeError('boom')) patcher.start() self.addCleanup(lambda p=patcher: p.stop()) self.assertRaisesRegex(utils.Error, 'Unexpected exception', process.process, self.data) self.node_info.finished.assert_called_once_with( istate.Events.error, error=mock.ANY) error_message = self.node_info.finished.call_args[1]['error'] self.assertIn('RuntimeError', error_message) self.assertIn('boom', error_message)
def test_hook_unexpected_exceptions(self): for ext in plugins_base.processing_hooks_manager(): patcher = mock.patch.object(ext.obj, 'before_processing', side_effect=RuntimeError('boom')) patcher.start() self.addCleanup(lambda p=patcher: p.stop()) self.assertRaisesRegex(utils.Error, 'Unexpected exception', process.process, self.data) self.node_info.finished.assert_called_once_with( error=mock.ANY) error_message = self.node_info.finished.call_args[1]['error'] self.assertIn('RuntimeError', error_message) self.assertIn('boom', error_message)
def init(self): if CONF.auth_strategy != 'noauth': utils.add_auth_middleware(app) else: LOG.warning('Starting unauthenticated, please check' ' configuration') if CONF.processing.store_data == 'none': LOG.warning('Introspection data will not be stored. Change ' '"[processing] store_data" option if this is not ' 'the desired behavior') elif CONF.processing.store_data == 'swift': LOG.info( 'Introspection data will be stored in Swift in the ' 'container %s', CONF.swift.container) utils.add_cors_middleware(app) db.init() try: hooks = [ ext.name for ext in plugins_base.processing_hooks_manager() ] except KeyError as exc: # callback function raises MissingHookError derived from KeyError # on missing hook LOG.critical('Hook(s) %s failed to load or was not found', str(exc)) sys.exit(1) LOG.info('Enabled processing hooks: %s', hooks) if CONF.firewall.manage_firewall: firewall.init() periodic_update_ = periodics.periodic( spacing=CONF.firewall.firewall_update_period, enabled=CONF.firewall.manage_firewall)(periodic_update) periodic_clean_up_ = periodics.periodic( spacing=CONF.clean_up_period)(periodic_clean_up) self._periodics_worker = periodics.PeriodicWorker( callables=[(periodic_update_, None, None), (periodic_clean_up_, None, None)], executor_factory=periodics.ExistingExecutor(utils.executor())) utils.executor().submit(self._periodics_worker.start)
def init(self): if utils.get_auth_strategy() != 'noauth': utils.add_auth_middleware(app) else: LOG.warning(_LW('Starting unauthenticated, please check' ' configuration')) if CONF.processing.store_data == 'none': LOG.warning(_LW('Introspection data will not be stored. Change ' '"[processing] store_data" option if this is not ' 'the desired behavior')) elif CONF.processing.store_data == 'swift': LOG.info(_LI('Introspection data will be stored in Swift in the ' 'container %s'), CONF.swift.container) utils.add_cors_middleware(app) db.init() try: hooks = [ext.name for ext in plugins_base.processing_hooks_manager()] except KeyError as exc: # callback function raises MissingHookError derived from KeyError # on missing hook LOG.critical(_LC('Hook(s) %s failed to load or was not found'), str(exc)) sys.exit(1) LOG.info(_LI('Enabled processing hooks: %s'), hooks) if CONF.firewall.manage_firewall: firewall.init() periodic_update_ = periodics.periodic( spacing=CONF.firewall.firewall_update_period, enabled=CONF.firewall.manage_firewall )(periodic_update) periodic_clean_up_ = periodics.periodic( spacing=CONF.clean_up_period )(periodic_clean_up) self._periodics_worker = periodics.PeriodicWorker( callables=[(periodic_update_, None, None), (periodic_clean_up_, None, None)], executor_factory=periodics.ExistingExecutor(utils.executor())) utils.executor().submit(self._periodics_worker.start)
def _run_post_hooks(node_info, introspection_data): hooks = plugins_base.processing_hooks_manager() for hook_ext in hooks: node_patches = [] ports_patches = {} hook_ext.obj.before_update(introspection_data, node_info, node_patches=node_patches, ports_patches=ports_patches) if node_patches: LOG.warning(_LW('Using node_patches is deprecated')) node_info.patch(node_patches) if ports_patches: LOG.warning(_LW('Using ports_patches is deprecated')) for mac, patches in ports_patches.items(): node_info.patch_port(mac, patches)
def init(self): if utils.get_auth_strategy() != 'noauth': utils.add_auth_middleware(app) else: LOG.warning( _LW('Starting unauthenticated, please check' ' configuration')) if CONF.processing.store_data == 'none': LOG.warning( _LW('Introspection data will not be stored. Change ' '"[processing] store_data" option if this is not ' 'the desired behavior')) elif CONF.processing.store_data == 'swift': LOG.info( _LI('Introspection data will be stored in Swift in the ' 'container %s'), CONF.swift.container) utils.add_cors_middleware(app) db.init() try: hooks = [ ext.name for ext in plugins_base.processing_hooks_manager() ] except KeyError as exc: # stevedore raises KeyError on missing hook LOG.critical(_LC('Hook %s failed to load or was not found'), str(exc)) sys.exit(1) LOG.info(_LI('Enabled processing hooks: %s'), hooks) if CONF.firewall.manage_firewall: firewall.init() self._periodics_worker = periodics.PeriodicWorker( callables=[(periodic_update, None, None), (periodic_clean_up, None, None)], executor_factory=periodics.ExistingExecutor(utils.executor())) utils.executor().submit(self._periodics_worker.start)
def init(): if utils.get_auth_strategy() != 'noauth': utils.add_auth_middleware(app) else: LOG.warning( _LW('Starting unauthenticated, please check' ' configuration')) if CONF.processing.store_data == 'none': LOG.warning( _LW('Introspection data will not be stored. Change ' '"[processing] store_data" option if this is not the ' 'desired behavior')) elif CONF.processing.store_data == 'swift': LOG.info( _LI('Introspection data will be stored in Swift in the ' 'container %s'), CONF.swift.container) db.init() try: hooks = [ext.name for ext in plugins_base.processing_hooks_manager()] except KeyError as exc: # stevedore raises KeyError on missing hook LOG.critical(_LC('Hook %s failed to load or was not found'), str(exc)) sys.exit(1) LOG.info(_LI('Enabled processing hooks: %s'), hooks) if CONF.firewall.manage_firewall: firewall.init() period = CONF.firewall.firewall_update_period utils.spawn_n(periodic_update, period) if CONF.timeout > 0: period = CONF.clean_up_period utils.spawn_n(periodic_clean_up, period) else: LOG.warning(_LW('Timeout is disabled in configuration'))
def _run_pre_hooks(introspection_data, failures): hooks = plugins_base.processing_hooks_manager() for hook_ext in hooks: # NOTE(dtantsur): catch exceptions, so that we have changes to update # node introspection status after look up try: hook_ext.obj.before_processing(introspection_data) except utils.Error as exc: LOG.error(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append('Preprocessing hook %(hook)s: %(error)s' % {'hook': hook_ext.name, 'error': exc}) except Exception as exc: LOG.exception(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append(_('Unexpected exception %(exc_class)s during ' 'preprocessing in hook %(hook)s: %(error)s') % {'hook': hook_ext.name, 'exc_class': exc.__class__.__name__, 'error': exc})
def init(): if utils.get_auth_strategy() != 'noauth': utils.add_auth_middleware(app) else: LOG.warning(_LW('Starting unauthenticated, please check' ' configuration')) if CONF.processing.store_data == 'none': LOG.warning(_LW('Introspection data will not be stored. Change ' '"[processing] store_data" option if this is not the ' 'desired behavior')) elif CONF.processing.store_data == 'swift': LOG.info(_LI('Introspection data will be stored in Swift in the ' 'container %s'), CONF.swift.container) db.init() try: hooks = [ext.name for ext in plugins_base.processing_hooks_manager()] except KeyError as exc: # stevedore raises KeyError on missing hook LOG.critical(_LC('Hook %s failed to load or was not found'), str(exc)) sys.exit(1) LOG.info(_LI('Enabled processing hooks: %s'), hooks) if CONF.firewall.manage_firewall: firewall.init() period = CONF.firewall.firewall_update_period utils.spawn_n(periodic_update, period) if CONF.timeout > 0: period = CONF.clean_up_period utils.spawn_n(periodic_clean_up, period) else: LOG.warning(_LW('Timeout is disabled in configuration'))
def process(introspection_data): """Process data from the ramdisk. This function heavily relies on the hooks to do the actual data processing. """ hooks = plugins_base.processing_hooks_manager() failures = [] for hook_ext in hooks: # NOTE(dtantsur): catch exceptions, so that we have changes to update # node introspection status after look up try: hook_ext.obj.before_processing(introspection_data) except utils.Error as exc: LOG.error( _LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), { 'hook': hook_ext.name, 'error': exc }) failures.append('Preprocessing hook %(hook)s: %(error)s' % { 'hook': hook_ext.name, 'error': exc }) except Exception as exc: LOG.exception( _LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), { 'hook': hook_ext.name, 'error': exc }) failures.append( _('Unexpected exception during preprocessing ' 'in hook %s') % hook_ext.name) node_info = _find_node_info(introspection_data, failures) if failures and node_info: msg = _('The following failures happened during running ' 'pre-processing hooks for node %(uuid)s:\n%(failures)s') % { 'uuid': node_info.uuid, 'failures': '\n'.join(failures) } node_info.finished(error=_('Data pre-processing failed')) raise utils.Error(msg) elif not node_info: msg = _('The following failures happened during running ' 'pre-processing hooks for unknown node:\n%(failures)s') % { 'failures': '\n'.join(failures) } raise utils.Error(msg) try: node = node_info.node() except exceptions.NotFound: msg = ( _('Node UUID %s was found in cache, but is not found in Ironic') % node_info.uuid) node_info.finished(error=msg) raise utils.Error(msg, code=404) try: return _process_node(node, introspection_data, node_info) except utils.Error as exc: node_info.finished(error=str(exc)) raise except Exception as exc: msg = _('Unexpected exception during processing') LOG.exception(msg) node_info.finished(error=msg) raise utils.Error(msg)
def _run_post_hooks(node_info, introspection_data): hooks = plugins_base.processing_hooks_manager() for hook_ext in hooks: hook_ext.obj.before_update(introspection_data, node_info)
def test_loadable_by_name(self, mock_caps): base.CONF.set_override('processing_hooks', 'capabilities', 'processing') ext = base.processing_hooks_manager()['capabilities'] self.assertIsInstance(ext.obj, capabilities.CapabilitiesHook)
def test_loadable_by_name(self): base.CONF.set_override('processing_hooks', 'raid_device', 'processing') ext = base.processing_hooks_manager()['raid_device'] self.assertIsInstance(ext.obj, raid_device.RaidDeviceDetection)
def test_hook_loadable_by_name(self): CONF.set_override('processing_hooks', 'validate_interfaces', 'processing') ext = base.processing_hooks_manager()['validate_interfaces'] self.assertIsInstance(ext.obj, std_plugins.ValidateInterfacesHook)
def test_hook_loadable_by_name(self): CONF.set_override('processing_hooks', 'scheduler', 'processing') ext = base.processing_hooks_manager()['scheduler'] self.assertIsInstance(ext.obj, std_plugins.SchedulerHook)
def test_manager_is_cached(self): self.assertIs(plugins_base.processing_hooks_manager(), plugins_base.processing_hooks_manager())
def process(introspection_data): """Process data from the ramdisk. This function heavily relies on the hooks to do the actual data processing. """ hooks = plugins_base.processing_hooks_manager() failures = [] for hook_ext in hooks: # NOTE(dtantsur): catch exceptions, so that we have changes to update # node introspection status after look up try: hook_ext.obj.before_processing(introspection_data) except utils.Error as exc: LOG.error(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append('Preprocessing hook %(hook)s: %(error)s' % {'hook': hook_ext.name, 'error': exc}) except Exception as exc: LOG.exception(_LE('Hook %(hook)s failed, delaying error report ' 'until node look up: %(error)s'), {'hook': hook_ext.name, 'error': exc}, data=introspection_data) failures.append(_('Unexpected exception %(exc_class)s during ' 'preprocessing in hook %(hook)s: %(error)s') % {'hook': hook_ext.name, 'exc_class': exc.__class__.__name__, 'error': exc}) node_info = _find_node_info(introspection_data, failures) if node_info: # Locking is already done in find_node() but may be not done in a # node_not_found hook node_info.acquire_lock() if failures or node_info is None: msg = _('The following failures happened during running ' 'pre-processing hooks:\n%s') % '\n'.join(failures) if node_info is not None: node_info.finished(error='\n'.join(failures)) raise utils.Error(msg, node_info=node_info, data=introspection_data) LOG.info(_LI('Matching node is %s'), node_info.uuid, node_info=node_info, data=introspection_data) if node_info.finished_at is not None: # race condition or introspection canceled raise utils.Error(_('Node processing already finished with ' 'error: %s') % node_info.error, node_info=node_info, code=400) try: node = node_info.node() except exceptions.NotFound: msg = _('Node was found in cache, but is not found in Ironic') node_info.finished(error=msg) raise utils.Error(msg, code=404, node_info=node_info, data=introspection_data) try: return _process_node(node, introspection_data, node_info) except utils.Error as exc: node_info.finished(error=str(exc)) raise except Exception as exc: LOG.exception(_LE('Unexpected exception during processing')) msg = _('Unexpected exception %(exc_class)s during processing: ' '%(error)s') % {'exc_class': exc.__class__.__name__, 'error': exc} node_info.finished(error=msg) raise utils.Error(msg, node_info=node_info, data=introspection_data)