コード例 #1
0
ファイル: process.py プロジェクト: mandre/ironic-inspector
def process(introspection_data):
    """Process data from the ramdisk.

    This function heavily relies on the hooks to do the actual data processing.
    """
    hooks = plugins_base.processing_hooks_manager()
    failures = []
    for hook_ext in hooks:
        # NOTE(dtantsur): catch exceptions, so that we have changes to update
        # node introspection status after look up
        try:
            hook_ext.obj.before_processing(introspection_data)
        except utils.Error as exc:
            LOG.error(_LE('Hook %(hook)s failed, delaying error report '
                          'until node look up: %(error)s'),
                      {'hook': hook_ext.name, 'error': exc})
            failures.append('Preprocessing hook %(hook)s: %(error)s' %
                            {'hook': hook_ext.name, 'error': exc})
        except Exception as exc:
            LOG.exception(_LE('Hook %(hook)s failed, delaying error report '
                              'until node look up: %(error)s'),
                          {'hook': hook_ext.name, 'error': exc})
            failures.append(_('Unexpected exception during preprocessing '
                              'in hook %s') % hook_ext.name)

    node_info = _find_node_info(introspection_data, failures)

    if failures and node_info:
        msg = _('The following failures happened during running '
                'pre-processing hooks for node %(uuid)s:\n%(failures)s') % {
            'uuid': node_info.uuid,
            'failures': '\n'.join(failures)
        }
        node_info.finished(error=_('Data pre-processing failed'))
        raise utils.Error(msg)
    elif not node_info:
        msg = _('The following failures happened during running '
                'pre-processing hooks for unknown node:\n%(failures)s') % {
            'failures': '\n'.join(failures)
        }
        raise utils.Error(msg)

    try:
        node = node_info.node()
    except exceptions.NotFound:
        msg = (_('Node UUID %s was found in cache, but is not found in Ironic')
               % node_info.uuid)
        node_info.finished(error=msg)
        raise utils.Error(msg, code=404)

    try:
        return _process_node(node, introspection_data, node_info)
    except utils.Error as exc:
        node_info.finished(error=str(exc))
        raise
    except Exception as exc:
        msg = _('Unexpected exception during processing')
        LOG.exception(msg)
        node_info.finished(error=msg)
        raise utils.Error(msg)
コード例 #2
0
def _run_pre_hooks(introspection_data, failures):
    hooks = plugins_base.processing_hooks_manager()
    for hook_ext in hooks:
        LOG.debug('Running pre-processing hook %s', hook_ext.name,
                  data=introspection_data)
        # NOTE(dtantsur): catch exceptions, so that we have changes to update
        # node introspection status after look up
        try:
            hook_ext.obj.before_processing(introspection_data)
        except utils.Error as exc:
            LOG.error(_LE('Hook %(hook)s failed, delaying error report '
                          'until node look up: %(error)s'),
                      {'hook': hook_ext.name, 'error': exc},
                      data=introspection_data)
            failures.append('Preprocessing hook %(hook)s: %(error)s' %
                            {'hook': hook_ext.name, 'error': exc})
        except Exception as exc:
            LOG.exception(_LE('Hook %(hook)s failed, delaying error report '
                              'until node look up: %(error)s'),
                          {'hook': hook_ext.name, 'error': exc},
                          data=introspection_data)
            failures.append(_('Unexpected exception %(exc_class)s during '
                              'preprocessing in hook %(hook)s: %(error)s') %
                            {'hook': hook_ext.name,
                             'exc_class': exc.__class__.__name__,
                             'error': exc})
コード例 #3
0
def _parse_pci_alias_entry():
    parsed_pci_devices = []
    for pci_alias_entry in CONF.pci_devices.alias:
        try:
            parsed_entry = json.loads(pci_alias_entry)
            if set(parsed_entry) != {'vendor_id', 'product_id', 'name'}:
                raise KeyError(_LE("The 'alias' entry should contain "
                                   "exactly 'vendor_id', 'product_id' and "
                                   "'name' keys"))
            parsed_pci_devices.append(parsed_entry)
        except (ValueError, KeyError) as ex:
            LOG.error(_LE("Error parsing 'alias' option: %s"), ex)
    return {(dev['vendor_id'], dev['product_id']): dev['name']
            for dev in parsed_pci_devices}
コード例 #4
0
def _parse_pci_alias_entry():
    parsed_pci_devices = []
    for pci_alias_entry in CONF.pci_devices.alias:
        try:
            parsed_entry = json.loads(pci_alias_entry)
            if set(parsed_entry) != {'vendor_id', 'product_id', 'name'}:
                raise KeyError(
                    _LE("The 'alias' entry should contain "
                        "exactly 'vendor_id', 'product_id' and "
                        "'name' keys"))
            parsed_pci_devices.append(parsed_entry)
        except (ValueError, KeyError) as ex:
            LOG.error(_LE("Error parsing 'alias' option: %s"), ex)
    return {(dev['vendor_id'], dev['product_id']): dev['name']
            for dev in parsed_pci_devices}
コード例 #5
0
    def add_attribute(self, name, value, session=None):
        """Store look up attribute for a node in the database.

        :param name: attribute name
        :param value: attribute value or list of possible values
        :param session: optional existing database session
        :raises: Error if attributes values are already in database
        """
        if not isinstance(value, list):
            value = [value]

        with db.ensure_transaction(session) as session:
            try:
                for v in value:
                    db.Attribute(name=name, value=v, uuid=self.uuid).save(
                        session)
            except db_exc.DBDuplicateEntry as exc:
                LOG.error(_LE('Database integrity error %s during '
                              'adding attributes'), exc, node_info=self)
                raise utils.Error(_(
                    'Some or all of %(name)s\'s %(value)s are already '
                    'on introspection') % {'name': name, 'value': value},
                    node_info=self)
            # Invalidate attributes so they're loaded on next usage
            self._attributes = None
コード例 #6
0
    def _get_interfaces(self, data=None):
        """Convert inventory to a dict with interfaces.

        :return: dict interface name -> dict with keys 'mac' and 'ip'
        """
        result = {}
        inventory = data.get('inventory', {})

        if inventory:
            for iface in inventory.get('interfaces', ()):
                name = iface.get('name')
                mac = iface.get('mac_address')
                ip = iface.get('ipv4_address')

                if not name:
                    LOG.error(_LE('Malformed interface record: %s'),
                              iface, data=data)
                    continue

                LOG.debug('Found interface %(name)s with MAC "%(mac)s" and '
                          'IP address "%(ip)s"',
                          {'name': name, 'mac': mac, 'ip': ip}, data=data)
                result[name] = {'ip': ip, 'mac': mac}
        else:
            LOG.warning(_LW('No inventory provided: using old bash ramdisk '
                            'is deprecated, please switch to '
                            'ironic-python-agent'), data=data)
            result = data.get('interfaces')

        return result
コード例 #7
0
def _reapply(node_info):
    # runs in background
    try:
        introspection_data = _get_unprocessed_data(node_info.uuid)
    except Exception as exc:
        LOG.exception(_LE('Encountered exception while fetching '
                          'stored introspection data'),
                      node_info=node_info)
        msg = (_('Unexpected exception %(exc_class)s while fetching '
                 'unprocessed introspection data from Swift: %(error)s') %
               {'exc_class': exc.__class__.__name__, 'error': exc})
        node_info.finished(error=msg)
        return

    try:
        ironic = ir_utils.get_client()
    except Exception as exc:
        msg = _('Encountered an exception while getting the Ironic client: '
                '%s') % exc
        LOG.error(msg, node_info=node_info, data=introspection_data)
        node_info.fsm_event(istate.Events.error)
        node_info.finished(error=msg)
        return

    try:
        _reapply_with_data(node_info, introspection_data)
    except Exception as exc:
        node_info.finished(error=str(exc))
        return

    _finish(node_info, ironic, introspection_data,
            power_off=False)

    LOG.info(_LI('Successfully reapplied introspection on stored '
                 'data'), node_info=node_info, data=introspection_data)
コード例 #8
0
ファイル: node_cache.py プロジェクト: Dexus/openstack-helm
    def add_attribute(self, name, value, session=None):
        """Store look up attribute for a node in the database.

        :param name: attribute name
        :param value: attribute value or list of possible values
        :param session: optional existing database session
        :raises: Error if attributes values are already in database
        """
        if not isinstance(value, list):
            value = [value]

        with db.ensure_transaction(session) as session:
            try:
                for v in value:
                    db.Attribute(name=name, value=v,
                                 uuid=self.uuid).save(session)
            except db_exc.DBDuplicateEntry as exc:
                LOG.error(_LE('Database integrity error %s during '
                              'adding attributes'),
                          exc,
                          node_info=self)
                raise utils.Error(
                    _('Some or all of %(name)s\'s %(value)s are already '
                      'on introspection') % {
                          'name': name,
                          'value': value
                      },
                    node_info=self)
            # Invalidate attributes so they're loaded on next usage
            self._attributes = None
コード例 #9
0
def periodic_clean_up():  # pragma: no cover
    try:
        if node_cache.clean_up():
            firewall.update_filters()
        sync_with_ironic()
    except Exception:
        LOG.exception(_LE('Periodic clean up of node cache failed'))
コード例 #10
0
 def inner(node_info, *args, **kwargs):
     ret = None
     try:
         ret = func(node_info, *args, **kwargs)
     except no_errors as exc:
         LOG.debug(
             'Not processing error event for the '
             'exception: %(exc)s raised by %(func)s', {
                 'exc': exc,
                 'func': reflection.get_callable_name(func)
             },
             node_info=node_info)
     except errors as exc:
         with excutils.save_and_reraise_exception():
             LOG.error(_LE('Processing the error event because of an '
                           'exception %(exc_type)s: %(exc)s raised by '
                           '%(func)s'),
                       {
                           'exc_type': type(exc),
                           'exc': exc,
                           'func': reflection.get_callable_name(func)
                       },
                       node_info=node_info)
             # an error event should be possible from all states
             node_info.fsm_event(istate.Events.error)
     return ret
コード例 #11
0
ファイル: main.py プロジェクト: keedya/ironic-inspector
def periodic_clean_up():  # pragma: no cover
    try:
        if node_cache.clean_up():
            firewall.update_filters()
        sync_with_ironic()
    except Exception:
        LOG.exception(_LE('Periodic clean up of node cache failed'))
コード例 #12
0
    def _migrate_up(self, engine, config, version, with_data=False):
        """migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(self, "_pre_upgrade_%s" % version, None)
                if pre_upgrade:
                    data = pre_upgrade(engine)

            self.migration_ext.upgrade(version)
            self.assertEqual(version, self.migration_ext.version())
            if with_data:
                check = getattr(self, "_check_%s" % version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(
                _LE("Failed to migrate to version %(version)s on engine "
                    "%(engine)s"), {
                        'version': version,
                        'engine': engine
                    })
            raise
コード例 #13
0
    def _migrate_up(self, engine, config, version, with_data=False):
        """migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(
                    self, "_pre_upgrade_%s" % version, None)
                if pre_upgrade:
                    data = pre_upgrade(engine)

            self.migration_ext.upgrade(version)
            self.assertEqual(version, self.migration_ext.version())
            if with_data:
                check = getattr(self, "_check_%s" % version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(_LE("Failed to migrate to version %(version)s on engine "
                          "%(engine)s"),
                      {'version': version, 'engine': engine})
            raise
コード例 #14
0
def periodic_update(period):  # pragma: no cover
    while True:
        LOG.debug('Running periodic update of filters')
        try:
            firewall.update_filters()
        except Exception:
            LOG.exception(_LE('Periodic update failed'))
        eventlet.greenthread.sleep(period)
コード例 #15
0
ファイル: main.py プロジェクト: mandre/ironic-inspector
def periodic_update(period):  # pragma: no cover
    while True:
        LOG.debug('Running periodic update of filters')
        try:
            firewall.update_filters()
        except Exception:
            LOG.exception(_LE('Periodic update failed'))
        eventlet.greenthread.sleep(period)
コード例 #16
0
ファイル: process.py プロジェクト: ericxiett/arobot_contrib
def _reapply(node_info):
    # runs in background
    try:
        introspection_data = _get_unprocessed_data(node_info.uuid)
    except Exception as exc:
        LOG.exception(_LE('Encountered exception while fetching '
                          'stored introspection data'),
                      node_info=node_info)
        msg = (_('Unexpected exception %(exc_class)s while fetching '
                 'unprocessed introspection data from Swift: %(error)s') % {
                     'exc_class': exc.__class__.__name__,
                     'error': exc
                 })
        node_info.finished(error=msg)
        return

    failures = []
    _run_pre_hooks(introspection_data, failures)
    if failures:
        LOG.error(_LE('Pre-processing failures detected reapplying '
                      'introspection on stored data:\n%s'),
                  '\n'.join(failures),
                  node_info=node_info)
        node_info.finished(error='\n'.join(failures))
        return

    try:
        ironic = ir_utils.get_client()
        node_info.create_ports(introspection_data.get('macs') or ())
        _run_post_hooks(node_info, introspection_data)
        _store_data(node_info, introspection_data)
        node_info.invalidate_cache()
        rules.apply(node_info, introspection_data)
        _finish(ironic, node_info, introspection_data, power_off=False)
    except Exception as exc:
        LOG.exception(_LE('Encountered exception reapplying '
                          'introspection on stored data'),
                      node_info=node_info,
                      data=introspection_data)
        node_info.finished(error=str(exc))
    else:
        LOG.info(_LI('Successfully reapplied introspection on stored '
                     'data'),
                 node_info=node_info,
                 data=introspection_data)
コード例 #17
0
ファイル: process.py プロジェクト: keedya/ironic-inspector
def _store_unprocessed_data(node_info, data):
    # runs in background
    try:
        _store_data(node_info, data,
                    suffix=_UNPROCESSED_DATA_STORE_SUFFIX)
    except Exception:
        LOG.exception(_LE('Encountered exception saving unprocessed '
                          'introspection data'), node_info=node_info,
                      data=data)
コード例 #18
0
def _store_unprocessed_data(node_info, data):
    # runs in background
    try:
        _store_data(node_info, data,
                    suffix=_UNPROCESSED_DATA_STORE_SUFFIX)
    except Exception:
        LOG.exception(_LE('Encountered exception saving unprocessed '
                          'introspection data'), node_info=node_info,
                      data=data)
コード例 #19
0
ファイル: process.py プロジェクト: keedya/ironic-inspector
def process(introspection_data):
    """Process data from the ramdisk.

    This function heavily relies on the hooks to do the actual data processing.
    """
    unprocessed_data = copy.deepcopy(introspection_data)
    failures = []
    _run_pre_hooks(introspection_data, failures)
    node_info = _find_node_info(introspection_data, failures)
    if node_info:
        # Locking is already done in find_node() but may be not done in a
        # node_not_found hook
        node_info.acquire_lock()

    if failures or node_info is None:
        msg = _('The following failures happened during running '
                'pre-processing hooks:\n%s') % '\n'.join(failures)
        if node_info is not None:
            node_info.finished(error='\n'.join(failures))
        raise utils.Error(msg, node_info=node_info, data=introspection_data)

    LOG.info(_LI('Matching node is %s'), node_info.uuid,
             node_info=node_info, data=introspection_data)

    if node_info.finished_at is not None:
        # race condition or introspection canceled
        raise utils.Error(_('Node processing already finished with '
                            'error: %s') % node_info.error,
                          node_info=node_info, code=400)

    # Note(mkovacik): store data now when we're sure that a background
    # thread won't race with other process() or introspect.abort()
    # call
    utils.executor().submit(_store_unprocessed_data, node_info,
                            unprocessed_data)

    try:
        node = node_info.node()
    except exceptions.NotFound:
        msg = _('Node was found in cache, but is not found in Ironic')
        node_info.finished(error=msg)
        raise utils.Error(msg, code=404, node_info=node_info,
                          data=introspection_data)

    try:
        return _process_node(node, introspection_data, node_info)
    except utils.Error as exc:
        node_info.finished(error=str(exc))
        raise
    except Exception as exc:
        LOG.exception(_LE('Unexpected exception during processing'))
        msg = _('Unexpected exception %(exc_class)s during processing: '
                '%(error)s') % {'exc_class': exc.__class__.__name__,
                                'error': exc}
        node_info.finished(error=msg)
        raise utils.Error(msg, node_info=node_info, data=introspection_data)
コード例 #20
0
def periodic_clean_up(period):  # pragma: no cover
    while True:
        LOG.debug('Running periodic clean up of node cache')
        try:
            if node_cache.clean_up():
                firewall.update_filters()
            sync_with_ironic()
        except Exception:
            LOG.exception(_LE('Periodic clean up of node cache failed'))
        eventlet.greenthread.sleep(period)
コード例 #21
0
ファイル: main.py プロジェクト: mandre/ironic-inspector
def periodic_clean_up(period):  # pragma: no cover
    while True:
        LOG.debug('Running periodic clean up of node cache')
        try:
            if node_cache.clean_up():
                firewall.update_filters()
            sync_with_ironic()
        except Exception:
            LOG.exception(_LE('Periodic clean up of node cache failed'))
        eventlet.greenthread.sleep(period)
コード例 #22
0
    def _get_interfaces(self, data=None):
        """Convert inventory to a dict with interfaces.

        :return: dict interface name -> dict with keys 'mac' and 'ip'
        """
        result = {}
        inventory = data.get('inventory', {})

        if inventory:
            for iface in inventory.get('interfaces', ()):
                name = iface.get('name')
                mac = iface.get('mac_address')
                ip = iface.get('ipv4_address')

                if not name:
                    LOG.error(_LE('Malformed interface record: %s'),
                              iface,
                              data=data)
                    continue

                if not mac:
                    LOG.debug('Skipping interface %s without link information',
                              name,
                              data=data)
                    continue

                if not utils.is_valid_mac(mac):
                    LOG.warning(_LW('MAC %(mac)s for interface %(name)s is '
                                    'not valid, skipping'), {
                                        'mac': mac,
                                        'name': name
                                    },
                                data=data)
                    continue

                mac = mac.lower()

                LOG.debug(
                    'Found interface %(name)s with MAC "%(mac)s" and '
                    'IP address "%(ip)s"', {
                        'name': name,
                        'mac': mac,
                        'ip': ip
                    },
                    data=data)
                result[name] = {'ip': ip, 'mac': mac}
        else:
            LOG.warning(_LW('No inventory provided: using old bash ramdisk '
                            'is deprecated, please switch to '
                            'ironic-python-agent'),
                        data=data)
            result = data.get('interfaces')

        return result
コード例 #23
0
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except utils.Error as exc:
         return error_response(exc, exc.http_code)
     except werkzeug.exceptions.HTTPException as exc:
         return error_response(exc, exc.code or 400)
     except Exception as exc:
         LOG.exception(_LE('Internal server error'))
         msg = _('Internal server error')
         if CONF.debug:
             msg += ' (%s): %s' % (exc.__class__.__name__, exc)
         return error_response(msg)
コード例 #24
0
ファイル: main.py プロジェクト: mandre/ironic-inspector
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except utils.Error as exc:
         return error_response(exc, exc.http_code)
     except werkzeug.exceptions.HTTPException as exc:
         return error_response(exc, exc.code or 400)
     except Exception as exc:
         LOG.exception(_LE('Internal server error'))
         msg = _('Internal server error')
         if CONF.debug:
             msg += ' (%s): %s' % (exc.__class__.__name__, exc)
         return error_response(msg)
コード例 #25
0
    def before_update(self, introspection_data, node_info, **kwargs):
        """Process LLDP data and patch Ironic port local link connection"""
        inventory = utils.get_inventory(introspection_data)

        ironic_ports = node_info.ports()

        for iface in inventory['interfaces']:
            if iface['name'] not in introspection_data['all_interfaces']:
                continue

            mac_address = iface['mac_address']
            port = ironic_ports.get(mac_address)
            if not port:
                LOG.debug(
                    "Skipping LLC processing for interface %s, matching "
                    "port not found in Ironic.",
                    mac_address,
                    node_info=node_info,
                    data=introspection_data)
                continue

            lldp_data = iface.get('lldp')
            if lldp_data is None:
                LOG.warning(_LW("No LLDP Data found for interface %s"),
                            mac_address,
                            node_info=node_info,
                            data=introspection_data)
                continue

            patches = []
            for tlv_type, tlv_value in lldp_data:
                patch = self._get_local_link_patch(tlv_type, tlv_value, port)
                if patch is not None:
                    patches.append(patch)

            try:
                # NOTE(sambetts) We need a newer version of Ironic API for this
                # transaction, so create a new ironic client and explicitly
                # pass it into the function.
                cli = ironic.get_client(api_version=REQUIRED_IRONIC_VERSION)
                node_info.patch_port(port, patches, ironic=cli)
            except client_exc.NotAcceptable:
                LOG.error(_LE("Unable to set Ironic port local link "
                              "connection information because Ironic does not "
                              "support the required version"),
                          node_info=node_info,
                          data=introspection_data)
                # NOTE(sambetts) May as well break out out of the loop here
                # because Ironic version is not going to change for the other
                # interfaces.
                break
コード例 #26
0
ファイル: process.py プロジェクト: keedya/ironic-inspector
def _reapply(node_info):
    # runs in background
    try:
        introspection_data = _get_unprocessed_data(node_info.uuid)
    except Exception:
        LOG.exception(_LE('Encountered exception while fetching '
                          'stored introspection data'),
                      node_info=node_info)
        node_info.release_lock()
        return

    failures = []
    _run_pre_hooks(introspection_data, failures)
    if failures:
        LOG.error(_LE('Pre-processing failures detected reapplying '
                      'introspection on stored data:\n%s'),
                  '\n'.join(failures), node_info=node_info)
        node_info.finished(error='\n'.join(failures))
        return

    try:
        ironic = ir_utils.get_client()
        node_info.create_ports(introspection_data.get('macs') or ())
        _run_post_hooks(node_info, introspection_data)
        _store_data(node_info, introspection_data)
        node_info.invalidate_cache()
        rules.apply(node_info, introspection_data)
        _finish(ironic, node_info, introspection_data,
                power_off=False)
    except Exception as exc:
        LOG.exception(_LE('Encountered exception reapplying '
                          'introspection on stored data'),
                      node_info=node_info,
                      data=introspection_data)
        node_info.finished(error=str(exc))
    else:
        LOG.info(_LI('Successfully reapplied introspection on stored '
                     'data'), node_info=node_info, data=introspection_data)
コード例 #27
0
ファイル: node_cache.py プロジェクト: Dexus/openstack-helm
def clean_up():
    """Clean up the cache.

    * Finish introspection for timed out nodes.
    * Drop outdated node status information.

    :return: list of timed out node UUID's
    """
    status_keep_threshold = (time.time() - CONF.node_status_keep_time)

    with db.ensure_transaction() as session:
        db.model_query(db.Node, session=session).filter(
            db.Node.finished_at.isnot(None),
            db.Node.finished_at < status_keep_threshold).delete()

        timeout = CONF.timeout
        if timeout <= 0:
            return []
        threshold = time.time() - timeout
        uuids = [
            row.uuid
            for row in db.model_query(db.Node.uuid, session=session).filter(
                db.Node.started_at < threshold, db.Node.finished_at.is_(
                    None)).all()
        ]
        if not uuids:
            return []

        LOG.error(_LE('Introspection for nodes %s has timed out'), uuids)
        for u in uuids:
            node_info = get_node(u, locked=True)
            try:
                if node_info.finished_at or node_info.started_at > threshold:
                    continue

                db.model_query(db.Node,
                               session=session).filter_by(uuid=u).update({
                                   'finished_at':
                                   time.time(),
                                   'error':
                                   'Introspection timeout'
                               })
                db.model_query(db.Attribute,
                               session=session).filter_by(uuid=u).delete()
                db.model_query(db.Option,
                               session=session).filter_by(uuid=u).delete()
            finally:
                node_info.release_lock()

    return uuids
コード例 #28
0
def check_auth(request):
    """Check authentication on request.

    :param request: Flask request
    :raises: utils.Error if access is denied
    """
    if get_auth_strategy() == 'noauth':
        return
    if request.headers.get('X-Identity-Status').lower() == 'invalid':
        raise Error(_('Authentication required'), code=401)
    roles = (request.headers.get('X-Roles') or '').split(',')
    if 'admin' not in roles:
        LOG.error(_LE('Role "admin" not in user role list %s'), roles)
        raise Error(_('Access denied'), code=403)
コード例 #29
0
ファイル: utils.py プロジェクト: keedya/ironic-inspector
def check_auth(request):
    """Check authentication on request.

    :param request: Flask request
    :raises: utils.Error if access is denied
    """
    if get_auth_strategy() == 'noauth':
        return
    if request.headers.get('X-Identity-Status').lower() == 'invalid':
        raise Error(_('Authentication required'), code=401)
    roles = (request.headers.get('X-Roles') or '').split(',')
    if 'admin' not in roles:
        LOG.error(_LE('Role "admin" not in user role list %s'), roles)
        raise Error(_('Access denied'), code=403)
コード例 #30
0
    def _get_interfaces(self, data=None):
        """Convert inventory to a dict with interfaces.

        :return: dict interface name -> dict with keys 'mac' and 'ip'
        """
        result = {}
        inventory = utils.get_inventory(data)

        for iface in inventory['interfaces']:
            name = iface.get('name')
            mac = iface.get('mac_address')
            ip = iface.get('ipv4_address')
            client_id = iface.get('client_id')

            if not name:
                LOG.error(_LE('Malformed interface record: %s'),
                          iface,
                          data=data)
                continue

            if not mac:
                LOG.debug('Skipping interface %s without link information',
                          name,
                          data=data)
                continue

            if not netutils.is_valid_mac(mac):
                LOG.warning(_LW('MAC %(mac)s for interface %(name)s is '
                                'not valid, skipping'), {
                                    'mac': mac,
                                    'name': name
                                },
                            data=data)
                continue

            mac = mac.lower()

            LOG.debug(
                'Found interface %(name)s with MAC "%(mac)s", '
                'IP address "%(ip)s" and client_id "%(client_id)s"', {
                    'name': name,
                    'mac': mac,
                    'ip': ip,
                    'client_id': client_id
                },
                data=data)
            result[name] = {'ip': ip, 'mac': mac, 'client_id': client_id}

        return result
コード例 #31
0
ファイル: process.py プロジェクト: keedya/ironic-inspector
def _run_pre_hooks(introspection_data, failures):
    hooks = plugins_base.processing_hooks_manager()
    for hook_ext in hooks:
        # NOTE(dtantsur): catch exceptions, so that we have changes to update
        # node introspection status after look up
        try:
            hook_ext.obj.before_processing(introspection_data)
        except utils.Error as exc:
            LOG.error(_LE('Hook %(hook)s failed, delaying error report '
                          'until node look up: %(error)s'),
                      {'hook': hook_ext.name, 'error': exc},
                      data=introspection_data)
            failures.append('Preprocessing hook %(hook)s: %(error)s' %
                            {'hook': hook_ext.name, 'error': exc})
        except Exception as exc:
            LOG.exception(_LE('Hook %(hook)s failed, delaying error report '
                              'until node look up: %(error)s'),
                          {'hook': hook_ext.name, 'error': exc},
                          data=introspection_data)
            failures.append(_('Unexpected exception %(exc_class)s during '
                              'preprocessing in hook %(hook)s: %(error)s') %
                            {'hook': hook_ext.name,
                             'exc_class': exc.__class__.__name__,
                             'error': exc})
コード例 #32
0
ファイル: firewall.py プロジェクト: mandre/ironic-inspector
def _iptables(*args, **kwargs):
    # NOTE(dtantsur): -w flag makes it wait for xtables lock
    cmd = BASE_COMMAND + args
    ignore = kwargs.pop('ignore', False)
    LOG.debug('Running iptables %s', args)
    kwargs['stderr'] = subprocess.STDOUT
    try:
        subprocess.check_output(cmd, **kwargs)
    except subprocess.CalledProcessError as exc:
        if ignore:
            LOG.debug('ignoring failed iptables %s:\n%s', args, exc.output)
        else:
            LOG.error(_LE('iptables %(iptables)s failed:\n%(exc)s') %
                      {'iptables': args, 'exc': exc.output})
            raise
コード例 #33
0
ファイル: firewall.py プロジェクト: ericxiett/arobot_contrib
def _iptables(*args, **kwargs):
    # NOTE(dtantsur): -w flag makes it wait for xtables lock
    cmd = BASE_COMMAND + args
    ignore = kwargs.pop('ignore', False)
    LOG.debug('Running iptables %s', args)
    kwargs['stderr'] = subprocess.STDOUT
    try:
        subprocess.check_output(cmd, **kwargs)
    except subprocess.CalledProcessError as exc:
        output = exc.output.replace('\n', '. ')
        if ignore:
            LOG.debug('Ignoring failed iptables %(args)s: %(output)s',
                      {'args': args, 'output': output})
        else:
            LOG.error(_LE('iptables %(iptables)s failed: %(exc)s') %
                      {'iptables': args, 'exc': output})
            raise
コード例 #34
0
    def before_update(self, introspection_data, node_info, **kwargs):
        """Process LLDP data and patch Ironic port local link connection"""
        inventory = utils.get_inventory(introspection_data)

        ironic_ports = node_info.ports()

        for iface in inventory['interfaces']:
            if iface['name'] not in introspection_data['all_interfaces']:
                continue

            mac_address = iface['mac_address']
            port = ironic_ports.get(mac_address)
            if not port:
                LOG.debug("Skipping LLC processing for interface %s, matching "
                          "port not found in Ironic.", mac_address,
                          node_info=node_info, data=introspection_data)
                continue

            lldp_data = iface.get('lldp')
            if lldp_data is None:
                LOG.warning(_LW("No LLDP Data found for interface %s"),
                            mac_address, node_info=node_info,
                            data=introspection_data)
                continue

            patches = []
            for tlv_type, tlv_value in lldp_data:
                patch = self._get_local_link_patch(tlv_type, tlv_value, port)
                if patch is not None:
                    patches.append(patch)

            try:
                # NOTE(sambetts) We need a newer version of Ironic API for this
                # transaction, so create a new ironic client and explicitly
                # pass it into the function.
                cli = ironic.get_client(api_version=REQUIRED_IRONIC_VERSION)
                node_info.patch_port(port, patches, ironic=cli)
            except client_exc.NotAcceptable:
                LOG.error(_LE("Unable to set Ironic port local link "
                              "connection information because Ironic does not "
                              "support the required version"),
                          node_info=node_info, data=introspection_data)
                # NOTE(sambetts) May as well break out out of the loop here
                # because Ironic version is not going to change for the other
                # interfaces.
                break
コード例 #35
0
ファイル: node_cache.py プロジェクト: KanM/ironic-inspector
def clean_up():
    """Clean up the cache.

    * Finish introspection for timed out nodes.
    * Drop outdated node status information.

    :return: list of timed out node UUID's
    """
    status_keep_threshold = (time.time() -
                             CONF.node_status_keep_time)

    with db.ensure_transaction() as session:
        db.model_query(db.Node, session=session).filter(
            db.Node.finished_at.isnot(None),
            db.Node.finished_at < status_keep_threshold).delete()

        timeout = CONF.timeout
        if timeout <= 0:
            return []
        threshold = time.time() - timeout
        uuids = [row.uuid for row in
                 db.model_query(db.Node.uuid, session=session).filter(
                     db.Node.started_at < threshold,
                     db.Node.finished_at.is_(None)).all()]
        if not uuids:
            return []

        LOG.error(_LE('Introspection for nodes %s has timed out'), uuids)
        for u in uuids:
            node_info = get_node(u, locked=True)
            try:
                if node_info.finished_at or node_info.started_at > threshold:
                    continue

                db.model_query(db.Node, session=session).filter_by(
                    uuid=u).update({'finished_at': time.time(),
                                    'error': 'Introspection timeout'})
                db.model_query(db.Attribute, session=session).filter_by(
                    uuid=u).delete()
                db.model_query(db.Option, session=session).filter_by(
                    uuid=u).delete()
            finally:
                node_info.release_lock()

    return uuids
コード例 #36
0
ファイル: process.py プロジェクト: ericxiett/arobot_contrib
def _store_logs(introspection_data, node_info):
    logs = introspection_data.get('logs')
    if not logs:
        LOG.warning(_LW('No logs were passed by the ramdisk'),
                    data=introspection_data,
                    node_info=node_info)
        return

    if not CONF.processing.ramdisk_logs_dir:
        LOG.warning(_LW('Failed to store logs received from the ramdisk '
                        'because ramdisk_logs_dir configuration option '
                        'is not set'),
                    data=introspection_data,
                    node_info=node_info)
        return

    fmt_args = {
        'uuid':
        node_info.uuid if node_info is not None else 'unknown',
        'mac': (utils.get_pxe_mac(introspection_data)
                or 'unknown').replace(':', ''),
        'dt':
        datetime.datetime.utcnow(),
        'bmc': (utils.get_ipmi_address_from_data(introspection_data)
                or 'unknown')
    }

    file_name = CONF.processing.ramdisk_logs_filename_format.format(**fmt_args)

    try:
        if not os.path.exists(CONF.processing.ramdisk_logs_dir):
            os.makedirs(CONF.processing.ramdisk_logs_dir)
        with open(os.path.join(CONF.processing.ramdisk_logs_dir, file_name),
                  'wb') as fp:
            fp.write(base64.b64decode(logs))
    except EnvironmentError:
        LOG.exception(_LE('Could not store the ramdisk logs'),
                      data=introspection_data,
                      node_info=node_info)
    else:
        LOG.info(_LI('Ramdisk logs were stored in file %s'),
                 file_name,
                 data=introspection_data,
                 node_info=node_info)
コード例 #37
0
def clean_up():
    """Clean up the cache.

    * Finish introspection for timed out nodes.
    * Drop outdated node status information.

    :return: list of timed out node UUID's
    """
    status_keep_threshold = (
        timeutils.utcnow() -
        datetime.timedelta(seconds=CONF.node_status_keep_time))

    with db.ensure_transaction() as session:
        db.model_query(db.Node, session=session).filter(
            db.Node.finished_at.isnot(None),
            db.Node.finished_at < status_keep_threshold).delete()

        timeout = CONF.timeout
        if timeout <= 0:
            return []
        threshold = timeutils.utcnow() - datetime.timedelta(seconds=timeout)
        uuids = [
            row.uuid
            for row in db.model_query(db.Node.uuid, session=session).filter(
                db.Node.started_at < threshold, db.Node.finished_at.is_(
                    None)).all()
        ]
    if not uuids:
        return []

    LOG.error(_LE('Introspection for nodes %s has timed out'), uuids)
    for u in uuids:
        node_info = get_node(u, locked=True)
        try:
            if node_info.finished_at or node_info.started_at > threshold:
                continue
            node_info.fsm_event(istate.Events.timeout)
            node_info.finished(error='Introspection timeout')
        finally:
            node_info.release_lock()

    return uuids
コード例 #38
0
def _ib_mac_to_rmac_mapping(blacklist_macs, ports_active):
    """Mapping between host InfiniBand MAC to EthernetOverInfiniBand MAC

    On InfiniBand deployment we need to map between the baremetal host
    InfiniBand MAC to the EoIB MAC. The EoIB MAC addresses are learned
    automatically by the EoIB interfaces and those MACs are recorded
    to the /sys/class/net/<ethoib_interface>/eth/neighs file.
    The InfiniBand GUID is taken from the ironic port client-id extra
    attribute. The InfiniBand GUID is the last 8 bytes of the client-id.
    The file format allows to map the GUID to EoIB MAC. The firewall
    rules based on those MACs get applied to the dnsmasq_interface by the
    update_filters function.

    :param blacklist_macs: List of InfiniBand baremetal hosts macs to
                           blacklist.
    :param ports_active: list of active ironic ports
    :return baremetal InfiniBand to remote mac on ironic node mapping
    """
    ethoib_interfaces = CONF.firewall.ethoib_interfaces
    ib_mac_to_remote_mac = {}
    for interface in ethoib_interfaces:
        neighs_file = (os.path.join('/sys/class/net', interface, 'eth/neighs'))
        try:
            with open(neighs_file, 'r') as fd:
                data = fd.read()
        except IOError:
            LOG.error(
                _LE('Interface %s is not Ethernet Over InfiniBand; '
                    'Skipping ...'), interface)
            continue
        for port in ports_active:
            if port.address in blacklist_macs:
                client_id = port.extra.get('client-id')
                if client_id:
                    # Note(moshele): The last 8 bytes in the client-id is
                    # the baremetal node InfiniBand GUID
                    guid = client_id[-23:]
                    p = re.compile(EMAC_REGEX + guid)
                    match = p.search(data)
                    if match:
                        ib_mac_to_remote_mac[port.address] = match.group(1)
    return ib_mac_to_remote_mac
コード例 #39
0
ファイル: node_cache.py プロジェクト: Dexus/openstack-helm
def create_node(driver, ironic=None, **attributes):
    """Create ironic node and cache it.

    * Create new node in ironic.
    * Cache it in inspector.

    :param driver: driver for Ironic node.
    :param ironic: ronic client instance.
    :param attributes: dict, additional keyword arguments to pass
                             to the ironic client on node creation.
    :return: NodeInfo, or None in case error happened.
    """
    if ironic is None:
        ironic = ir_utils.get_client()
    try:
        node = ironic.node.create(driver=driver, **attributes)
    except exceptions.InvalidAttribute as e:
        LOG.error(_LE('Failed to create new node: %s'), e)
    else:
        LOG.info(_LI('Node %s was created successfully'), node.uuid)
        return add_node(node.uuid, ironic=ironic)
コード例 #40
0
def create_node(driver,  ironic=None, **attributes):
    """Create ironic node and cache it.

    * Create new node in ironic.
    * Cache it in inspector.

    :param driver: driver for Ironic node.
    :param ironic: ronic client instance.
    :param attributes: dict, additional keyword arguments to pass
                             to the ironic client on node creation.
    :return: NodeInfo, or None in case error happened.
    """
    if ironic is None:
        ironic = ir_utils.get_client()
    try:
        node = ironic.node.create(driver=driver, **attributes)
    except exceptions.InvalidAttribute as e:
        LOG.error(_LE('Failed to create new node: %s'), e)
    else:
        LOG.info(_LI('Node %s was created successfully'), node.uuid)
        return add_node(node.uuid, ironic=ironic)
コード例 #41
0
    def _get_interfaces(self, data=None):
        """Convert inventory to a dict with interfaces.

        :return: dict interface name -> dict with keys 'mac' and 'ip'
        """
        result = {}
        inventory = utils.get_inventory(data)

        for iface in inventory['interfaces']:
            name = iface.get('name')
            mac = iface.get('mac_address')
            ip = iface.get('ipv4_address')

            if not name:
                LOG.error(_LE('Malformed interface record: %s'),
                          iface, data=data)
                continue

            if not mac:
                LOG.debug('Skipping interface %s without link information',
                          name, data=data)
                continue

            if not netutils.is_valid_mac(mac):
                LOG.warning(_LW('MAC %(mac)s for interface %(name)s is '
                                'not valid, skipping'),
                            {'mac': mac, 'name': name},
                            data=data)
                continue

            mac = mac.lower()

            LOG.debug('Found interface %(name)s with MAC "%(mac)s" and '
                      'IP address "%(ip)s"',
                      {'name': name, 'mac': mac, 'ip': ip}, data=data)
            result[name] = {'ip': ip, 'mac': mac}

        return result
コード例 #42
0
def process(introspection_data):
    """Process data from the ramdisk.

    This function heavily relies on the hooks to do the actual data processing.
    """
    hooks = plugins_base.processing_hooks_manager()
    failures = []
    for hook_ext in hooks:
        # NOTE(dtantsur): catch exceptions, so that we have changes to update
        # node introspection status after look up
        try:
            hook_ext.obj.before_processing(introspection_data)
        except utils.Error as exc:
            LOG.error(_LE('Hook %(hook)s failed, delaying error report '
                          'until node look up: %(error)s'),
                      {'hook': hook_ext.name, 'error': exc},
                      data=introspection_data)
            failures.append('Preprocessing hook %(hook)s: %(error)s' %
                            {'hook': hook_ext.name, 'error': exc})
        except Exception as exc:
            LOG.exception(_LE('Hook %(hook)s failed, delaying error report '
                              'until node look up: %(error)s'),
                          {'hook': hook_ext.name, 'error': exc},
                          data=introspection_data)
            failures.append(_('Unexpected exception %(exc_class)s during '
                              'preprocessing in hook %(hook)s: %(error)s') %
                            {'hook': hook_ext.name,
                             'exc_class': exc.__class__.__name__,
                             'error': exc})

    node_info = _find_node_info(introspection_data, failures)
    if node_info:
        # Locking is already done in find_node() but may be not done in a
        # node_not_found hook
        node_info.acquire_lock()

    if failures or node_info is None:
        msg = _('The following failures happened during running '
                'pre-processing hooks:\n%s') % '\n'.join(failures)
        if node_info is not None:
            node_info.finished(error='\n'.join(failures))
        raise utils.Error(msg, node_info=node_info, data=introspection_data)

    LOG.info(_LI('Matching node is %s'), node_info.uuid,
             node_info=node_info, data=introspection_data)

    if node_info.finished_at is not None:
        # race condition or introspection canceled
        raise utils.Error(_('Node processing already finished with '
                            'error: %s') % node_info.error,
                          node_info=node_info, code=400)

    try:
        node = node_info.node()
    except exceptions.NotFound:
        msg = _('Node was found in cache, but is not found in Ironic')
        node_info.finished(error=msg)
        raise utils.Error(msg, code=404, node_info=node_info,
                          data=introspection_data)

    try:
        return _process_node(node, introspection_data, node_info)
    except utils.Error as exc:
        node_info.finished(error=str(exc))
        raise
    except Exception as exc:
        LOG.exception(_LE('Unexpected exception during processing'))
        msg = _('Unexpected exception %(exc_class)s during processing: '
                '%(error)s') % {'exc_class': exc.__class__.__name__,
                                'error': exc}
        node_info.finished(error=msg)
        raise utils.Error(msg, node_info=node_info, data=introspection_data)
コード例 #43
0
def periodic_update():  # pragma: no cover
    try:
        firewall.update_filters()
    except Exception:
        LOG.exception(_LE('Periodic update of firewall rules failed'))
コード例 #44
0
ファイル: main.py プロジェクト: keedya/ironic-inspector
def periodic_update():  # pragma: no cover
    try:
        firewall.update_filters()
    except Exception:
        LOG.exception(_LE('Periodic update of firewall rules failed'))
コード例 #45
0
def create(conditions_json, actions_json, uuid=None,
           description=None):
    """Create a new rule in database.

    :param conditions_json: list of dicts with the following keys:
                            * op - operator
                            * field - JSON path to field to compare
                            Other keys are stored as is.
    :param actions_json: list of dicts with the following keys:
                         * action - action type
                         Other keys are stored as is.
    :param uuid: rule UUID, will be generated if empty
    :param description: human-readable rule description
    :returns: new IntrospectionRule object
    :raises: utils.Error on failure
    """
    uuid = uuid or uuidutils.generate_uuid()
    LOG.debug('Creating rule %(uuid)s with description "%(descr)s", '
              'conditions %(conditions)s and actions %(actions)s',
              {'uuid': uuid, 'descr': description,
               'conditions': conditions_json, 'actions': actions_json})

    try:
        jsonschema.validate(conditions_json, conditions_schema())
    except jsonschema.ValidationError as exc:
        raise utils.Error(_('Validation failed for conditions: %s') % exc)

    try:
        jsonschema.validate(actions_json, actions_schema())
    except jsonschema.ValidationError as exc:
        raise utils.Error(_('Validation failed for actions: %s') % exc)

    cond_mgr = plugins_base.rule_conditions_manager()
    act_mgr = plugins_base.rule_actions_manager()

    conditions = []
    for cond_json in conditions_json:
        field = cond_json['field']
        try:
            jsonpath.parse(field)
        except Exception as exc:
            raise utils.Error(_('Unable to parse field JSON path %(field)s: '
                                '%(error)s') % {'field': field, 'error': exc})

        plugin = cond_mgr[cond_json['op']].obj
        params = {k: v for k, v in cond_json.items()
                  if k not in ('op', 'field', 'multiple')}
        try:
            plugin.validate(params)
        except ValueError as exc:
            raise utils.Error(_('Invalid parameters for operator %(op)s: '
                                '%(error)s') %
                              {'op': cond_json['op'], 'error': exc})

        conditions.append((cond_json['field'], cond_json['op'],
                           cond_json.get('multiple', 'any'), params))

    actions = []
    for action_json in actions_json:
        plugin = act_mgr[action_json['action']].obj
        params = {k: v for k, v in action_json.items() if k != 'action'}
        try:
            plugin.validate(params)
        except ValueError as exc:
            raise utils.Error(_('Invalid parameters for action %(act)s: '
                                '%(error)s') %
                              {'act': action_json['action'], 'error': exc})

        actions.append((action_json['action'], params))

    try:
        with db.ensure_transaction() as session:
            rule = db.Rule(uuid=uuid, description=description,
                           disabled=False, created_at=timeutils.utcnow())

            for field, op, multiple, params in conditions:
                rule.conditions.append(db.RuleCondition(op=op, field=field,
                                                        multiple=multiple,
                                                        params=params))

            for action, params in actions:
                rule.actions.append(db.RuleAction(action=action,
                                                  params=params))

            rule.save(session)
    except db_exc.DBDuplicateEntry as exc:
        LOG.error(_LE('Database integrity error %s when '
                      'creating a rule'), exc)
        raise utils.Error(_('Rule with UUID %s already exists') % uuid,
                          code=409)

    LOG.info(_LI('Created rule %(uuid)s with description "%(descr)s"'),
             {'uuid': uuid, 'descr': description})
    return IntrospectionRule(uuid=uuid,
                             conditions=rule.conditions,
                             actions=rule.actions,
                             description=description)
コード例 #46
0
ファイル: process.py プロジェクト: mandre/ironic-inspector
def process(introspection_data):
    """Process data from the ramdisk.

    This function heavily relies on the hooks to do the actual data processing.
    """
    hooks = plugins_base.processing_hooks_manager()
    failures = []
    for hook_ext in hooks:
        # NOTE(dtantsur): catch exceptions, so that we have changes to update
        # node introspection status after look up
        try:
            hook_ext.obj.before_processing(introspection_data)
        except utils.Error as exc:
            LOG.error(
                _LE('Hook %(hook)s failed, delaying error report '
                    'until node look up: %(error)s'), {
                        'hook': hook_ext.name,
                        'error': exc
                    })
            failures.append('Preprocessing hook %(hook)s: %(error)s' % {
                'hook': hook_ext.name,
                'error': exc
            })
        except Exception as exc:
            LOG.exception(
                _LE('Hook %(hook)s failed, delaying error report '
                    'until node look up: %(error)s'), {
                        'hook': hook_ext.name,
                        'error': exc
                    })
            failures.append(
                _('Unexpected exception during preprocessing '
                  'in hook %s') % hook_ext.name)

    node_info = _find_node_info(introspection_data, failures)

    if failures and node_info:
        msg = _('The following failures happened during running '
                'pre-processing hooks for node %(uuid)s:\n%(failures)s') % {
                    'uuid': node_info.uuid,
                    'failures': '\n'.join(failures)
                }
        node_info.finished(error=_('Data pre-processing failed'))
        raise utils.Error(msg)
    elif not node_info:
        msg = _('The following failures happened during running '
                'pre-processing hooks for unknown node:\n%(failures)s') % {
                    'failures': '\n'.join(failures)
                }
        raise utils.Error(msg)

    try:
        node = node_info.node()
    except exceptions.NotFound:
        msg = (
            _('Node UUID %s was found in cache, but is not found in Ironic') %
            node_info.uuid)
        node_info.finished(error=msg)
        raise utils.Error(msg, code=404)

    try:
        return _process_node(node, introspection_data, node_info)
    except utils.Error as exc:
        node_info.finished(error=str(exc))
        raise
    except Exception as exc:
        msg = _('Unexpected exception during processing')
        LOG.exception(msg)
        node_info.finished(error=msg)
        raise utils.Error(msg)
コード例 #47
0
def process(introspection_data):
    """Process data from the ramdisk.

    This function heavily relies on the hooks to do the actual data processing.
    """
    unprocessed_data = copy.deepcopy(introspection_data)
    failures = []
    _run_pre_hooks(introspection_data, failures)
    node_info = _find_node_info(introspection_data, failures)
    if node_info:
        # Locking is already done in find_node() but may be not done in a
        # node_not_found hook
        node_info.acquire_lock()

    if failures or node_info is None:
        msg = _('The following failures happened during running '
                'pre-processing hooks:\n%s') % '\n'.join(failures)
        if node_info is not None:
            node_info.finished(error='\n'.join(failures))
        _store_logs(introspection_data, node_info)
        raise utils.Error(msg, node_info=node_info, data=introspection_data)

    LOG.info(_LI('Matching node is %s'), node_info.uuid,
             node_info=node_info, data=introspection_data)

    if node_info.finished_at is not None:
        # race condition or introspection canceled
        raise utils.Error(_('Node processing already finished with '
                            'error: %s') % node_info.error,
                          node_info=node_info, code=400)

    # Note(mkovacik): store data now when we're sure that a background
    # thread won't race with other process() or introspect.abort()
    # call
    utils.executor().submit(_store_unprocessed_data, node_info,
                            unprocessed_data)

    try:
        node = node_info.node()
    except ir_utils.NotFound as exc:
        with excutils.save_and_reraise_exception():
            node_info.finished(error=str(exc))
            _store_logs(introspection_data, node_info)

    try:
        result = _process_node(node_info, node, introspection_data)
    except utils.Error as exc:
        node_info.finished(error=str(exc))
        with excutils.save_and_reraise_exception():
            _store_logs(introspection_data, node_info)
    except Exception as exc:
        LOG.exception(_LE('Unexpected exception during processing'))
        msg = _('Unexpected exception %(exc_class)s during processing: '
                '%(error)s') % {'exc_class': exc.__class__.__name__,
                                'error': exc}
        node_info.finished(error=msg)
        _store_logs(introspection_data, node_info)
        raise utils.Error(msg, node_info=node_info, data=introspection_data,
                          code=500)

    if CONF.processing.always_store_ramdisk_logs:
        _store_logs(introspection_data, node_info)
    return result