def _write_back(self):
     keys = []
     values = []
     expire_time_mss = []
     for name, (data, expire_time_ms) in self.cache.iteritems(prefix=[], shallow=True):
         keys.append(self._get_name_bytes_wo_tl(name))
         values.append(data)
         expire_time_mss.append(expire_time_ms)
     if len(keys) > 0:
         self._put_batch(keys, values, expire_time_mss)
         logging.info(f'Cache write back {len(keys)} items')
     self.cache = NameTrie()
Exemplo n.º 2
0
 def __init__(self, face=None, keychain=None):
     config = read_client_conf() if not face or not keychain else {}
     if face is not None:
         self.face = face
     else:
         self.face = default_face(config['transport'])
     self.face.callback = self._receive
     if keychain is not None:
         self.keychain = keychain
     else:
         self.keychain = default_keychain(config['pib'], config['tpm'])
     self._int_tree = NameTrie()
     self._prefix_tree = NameTrie()
     self.data_validator = sha256_digest_checker
     self.int_validator = sha256_digest_checker
     self._autoreg_routes = []
     self._prefix_register_semaphore = aio.Semaphore(1)
     self.pub_key = None
Exemplo n.º 3
0
    def __init__(self,
                 app: NDNApp,
                 prefix: NonStrictName = None,
                 forwarding_hint: NonStrictName = None):
        """
        Initialize a ``PubSub`` instance with identity ``prefix`` and can be reached at \
            ``forwarding_hint``.
        TODO: support msg larger than MTU

        :param app: NDNApp.
        :param prefix: NonStrictName. The identity of this ``PubSub`` instance. ``PubSub`` sends\
            Data packets under the hood to make pub-sub work, so it needs an identify under which\
            can publish data. Note that you cannot initialize two ``PubSub`` instances on the same\
            node, which will cause double registration error.
        :param forwarding_hint: NonStrictName. When working as publisher, if ``prefix`` is not\
            reachable, the subscriber can use ``forwarding_hint`` to reach the publisher.
        """
        self.app = app
        self.prefix = prefix
        self.forwarding_hint = forwarding_hint
        self.published_data = NameTrie()  # name -> packet
        self.topic_to_cb = NameTrie()
        self.nonce_processed = set(
        )  # used by subscriber to de-duplicate notify interests
Exemplo n.º 4
0
class SVSyncStorage:
    cache = NameTrie()
    def time_ms(self) -> int:
        return int(time() * 1000)
    def put_data_packet(self, name:NonStrictName, data_packet:bytes) -> None:
        _, meta_info, _, _ = parse_data(data_packet)
        expire_time_ms = self.time_ms()
        if meta_info.freshness_period:
            expire_time_ms += meta_info.freshness_period
        name = Name.normalize(name)
        self.cache[name] = (data_packet, expire_time_ms)
        logging.info(f'SVSyncStorage: cache save {Name.to_str(name)}')
    def get_data_packet(self, name:NonStrictName, can_be_prefix:bool=False, must_be_fresh:bool=False) -> Optional[bytes]:
        name = Name.normalize(name)
        try:
            if not can_be_prefix:
                data, expire_time_ms = self.cache[name]
                if not must_be_fresh or expire_time_ms > self.time_ms():
                    logging.info('SVSyncStorage: get from cache')
                    return data
            else:
                it = self.cache.itervalues(prefix=name, shallow=True)
                while True:
                    data, expire_time_ms = next(it)
                    if not must_be_fresh or expire_time_ms > self.time_ms():
                        logging.info('SVSyncStorage: get from cache')
                        return data
        except (KeyError, StopIteration):
            return None
    def remove_data_packet(self, name:NonStrictName) -> bool:
        removed = False
        name = Name.normalize(name)
        try:
            del self.cache[name]
            removed = True
        except KeyError:
            pass
        return removed
class Storage:
    cache = NameTrie()

    def __init__(self):
        """
        Interface for a unified key-value storage API.
        """
        aio.get_event_loop().create_task(self._periodic_write_back())

    def _put(self, key: bytes, data: bytes, expire_time_ms: int=None):
        raise NotImplementedError

    def _put_batch(self, keys: List[bytes], values: List[bytes], expire_time_mss:List[Optional[int]]):
        raise NotImplementedError

    def _get(self, key: bytes, can_be_prefix: bool=False, must_be_fresh: bool=False) -> bytes:
        raise NotImplementedError

    def _remove(self, key: bytes) -> bool:
        raise NotImplementedError


    ###### wrappers around key-value store
    async def _periodic_write_back(self):
        self._write_back()
        await aio.sleep(10)
        aio.get_event_loop().create_task(self._periodic_write_back())

    @staticmethod
    def _get_name_bytes_wo_tl(name: NonStrictName) -> bytes:
        # remove name's TL as key to support efficient prefix search
        name = Name.to_bytes(name)
        offset = 0
        offset += parse_tl_num(name, offset)[1]
        offset += parse_tl_num(name, offset)[1]
        return name[offset:]
    
    @staticmethod
    def _time_ms():
        return int(time.time() * 1000)

    def _write_back(self):
        keys = []
        values = []
        expire_time_mss = []
        for name, (data, expire_time_ms) in self.cache.iteritems(prefix=[], shallow=True):
            keys.append(self._get_name_bytes_wo_tl(name))
            values.append(data)
            expire_time_mss.append(expire_time_ms)
        if len(keys) > 0:
            self._put_batch(keys, values, expire_time_mss)
            logging.info(f'Cache write back {len(keys)} items')
        self.cache = NameTrie()

    def put_data_packet(self, name: NonStrictName, data: bytes):
        """
        Insert a data packet named ``name`` with value ``data``.
        This method will parse ``data`` to get its freshnessPeriod, and compute its expiration time\
            by adding the freshnessPeriod to the current time.
        
        :param name: NonStrictName. The name of the data packet.
        :param data: bytes. The value of the data packet.
        """
        _, meta_info, _, _ = parse_data(data)
        expire_time_ms = self._time_ms()
        if meta_info.freshness_period:
            expire_time_ms += meta_info.freshness_period

        # write data packet and freshness_period to cache
        name = Name.normalize(name)
        self.cache[name] = (data, expire_time_ms)
        logging.info(f'Cache save: {Name.to_str(name)}')

    def get_data_packet(self, name: NonStrictName, can_be_prefix: bool=False,
                        must_be_fresh: bool=False) -> Optional[bytes]:
        """
        Get a data packet named ``name``.

        :param name: NonStrictName. The name of the data packet. 
        :param can_be_perfix: bool. If true, use prefix match instead of exact match.
        :param must_be_fresh: bool. If true, ignore expired data.
        :return: The value of the data packet.
        """
        name = Name.normalize(name)
        # cache lookup
        try:
            if not can_be_prefix:
                data, expire_time_ms = self.cache[name]
                if not must_be_fresh or expire_time_ms > self._time_ms():
                    logging.info('get from cache')
                    return data
            else:
                it = self.cache.itervalues(prefix=name, shallow=True)
                while True:
                    data, expire_time_ms = next(it)
                    if not must_be_fresh or expire_time_ms > self._time_ms():
                        logging.info('get from cache')
                        return data
        # not in cache, lookup in storage
        except (KeyError, StopIteration):
            key = self._get_name_bytes_wo_tl(name)
            return self._get(key, can_be_prefix, must_be_fresh)

    def remove_data_packet(self, name: NonStrictName) -> bool:
        """
        Remove a data packet named ``name``.

        :param name: NonStrictName. The name of the data packet. 
        :return: True if a data packet is being removed.
        """
        removed = False
        name = Name.normalize(name)
        try:
            del self.cache[name]
            removed = True
        except KeyError:
            pass
        if self._remove(self._get_name_bytes_wo_tl(name)):
            removed = True
        return removed
Exemplo n.º 6
0
class NDNApp:
    """
    An NDN application.

    :ivar face: the Face used to connection to a NFD node.
    :ivar keychain: the Keychain to store Identities and Keys, providing Signers.
    :ivar int_validator: the default validator for Interest packets.
    :ivar data_validator: the default validator for Data packets.
    """
    face: Face = None
    keychain: Keychain = None
    _int_tree: NameTrie = None
    _prefix_tree: NameTrie = None
    int_validator: Validator = None
    data_validator: Validator = None
    _autoreg_routes: List[Tuple[FormalName, Route, Optional[Validator], bool,
                                bool]]
    _prefix_register_semaphore: aio.Semaphore = None
    pub_key = None

    def __init__(self, face=None, keychain=None):
        config = read_client_conf() if not face or not keychain else {}
        if face is not None:
            self.face = face
        else:
            self.face = default_face(config['transport'])
        self.face.callback = self._receive
        if keychain is not None:
            self.keychain = keychain
        else:
            self.keychain = default_keychain(config['pib'], config['tpm'])
        self._int_tree = NameTrie()
        self._prefix_tree = NameTrie()
        self.data_validator = sha256_digest_checker
        self.int_validator = sha256_digest_checker
        self._autoreg_routes = []
        self._prefix_register_semaphore = aio.Semaphore(1)
        self.pub_key = None

    async def _receive(self, typ: int, data: BinaryStr):
        """
        Pipeline when a packet is received.

        :param typ: the Type.
        :param data: the Value of the packet with TL.
        """
        logging.debug('Packet received %s, %s' % (typ, bytes(data)))
        if typ == LpTypeNumber.LP_PACKET:
            try:
                nack_reason, fragment = parse_lp_packet(data, with_tl=True)
            except (DecodeError, TypeError, ValueError, struct.error):
                logging.warning('Unable to decode received packet')
                return
            data = fragment
            typ, _ = parse_tl_num(data)
        else:
            nack_reason = None

        if nack_reason is not None:
            try:
                name, _, _, _ = parse_interest(data, with_tl=True)
            except (DecodeError, TypeError, ValueError, struct.error):
                logging.warning('Unable to decode the fragment of LpPacket')
                return
            logging.debug('NetworkNack received %s, reason=%s' %
                          (Name.to_str(name), nack_reason))
            self._on_nack(name, nack_reason)
        else:
            if typ == TypeNumber.INTEREST:
                try:
                    name, param, app_param, sig = parse_interest(data,
                                                                 with_tl=True)
                except (DecodeError, TypeError, ValueError, struct.error):
                    logging.warning('Unable to decode received packet')
                    return
                logging.debug('Interest received %s' % Name.to_str(name))
                await self._on_interest(name,
                                        param,
                                        app_param,
                                        sig,
                                        raw_packet=data)
            elif typ == TypeNumber.DATA:
                try:
                    name, meta_info, content, sig = parse_data(data,
                                                               with_tl=True)
                except (DecodeError, TypeError, ValueError, struct.error):
                    logging.warning('Unable to decode received packet')
                    return
                logging.debug('Data received %s' % Name.to_str(name))
                await self._on_data(name,
                                    meta_info,
                                    content,
                                    sig,
                                    raw_packet=data)
            else:
                logging.warning('Unable to decode received packet')

    def put_raw_packet(self, data: BinaryStr):
        r"""
        Send a raw Data packet.

        :param data: TLV encoded Data packet.
        :type data: :any:`BinaryStr`
        :raises NetworkError: the face to NFD is down.
        """
        if not self.face.running:
            raise NetworkError('cannot send packet before connected')
        self.face.send(data)

    def prepare_data(self,
                     name: NonStrictName,
                     content: Optional[BinaryStr] = None,
                     **kwargs):
        r"""
        Prepare a Data packet by generating, encoding and signing it.

        :param name: the Name.
        :type name: :any:`NonStrictName`
        :param content: the Content.
        :type content: Optional[:any:`BinaryStr`]
        :param kwargs: :ref:`label-keyword-arguments`.
        :return: TLV encoded Data packet.
        """
        if 'signer' in kwargs:
            signer = kwargs['signer']
        else:
            signer = self.keychain.get_signer(kwargs)
        if 'meta_info' in kwargs:
            meta_info = kwargs['meta_info']
        else:
            meta_info = MetaInfo.from_dict(kwargs)
        return make_data(name, meta_info, content, signer=signer)

    def put_data(self,
                 name: NonStrictName,
                 content: Optional[BinaryStr] = None,
                 **kwargs):
        r"""
        Publish a Data packet.

        :param name: the Name.
        :type name: :any:`NonStrictName`
        :param content: the Content.
        :type content: Optional[:any:`BinaryStr`]
        :param kwargs: :ref:`label-keyword-arguments`.
        :return: TLV encoded Data packet.
        """
        self.put_raw_packet(self.prepare_data(name, content, **kwargs))

    def express_interest(
        self,
        name: NonStrictName,
        app_param: Optional[BinaryStr] = None,
        validator: Optional[Validator] = None,
        need_raw_packet: bool = False,
        **kwargs
    ) -> Coroutine[Any, None, Tuple[FormalName, MetaInfo,
                                    Optional[BinaryStr]]]:
        r"""
        Express an Interest packet.

        The Interest packet is sent immediately and a coroutine used to get the result is returned.
        Awaiting on what is returned will block until the Data is received and return that Data.
        An exception is raised if unable to receive the Data.

        :param name: the Name.
        :type name: :any:`NonStrictName`
        :param app_param: the ApplicationParameters.
        :type app_param: Optional[:any:`BinaryStr`]
        :param validator: the Validator used to verify the Data received.
        :type validator: Optional[:any:`Validator`]
        :param need_raw_packet: if True, return the raw Data packet with TL.
        :type need_raw_packet: bool
        :param kwargs: :ref:`label-keyword-arguments`.
        :return: A tuple of (Name, MetaInfo, Content) after ``await``.
            If need_raw_packet is True, return a tuple (Name, MetaInfo, Content, RawPacket).
        :rtype: Coroutine[Any, None, Tuple[:any:`FormalName`, :any:`MetaInfo`, Optional[:any:`BinaryStr`]]]

        The following exception is raised by ``express_interest``:

        :raises NetworkError: the face to NFD is down before sending this Interest.

        The following exceptions are raised by the coroutine returned:

        :raises InterestNack: an NetworkNack is received.
        :raises InterestTimeout: time out.
        :raises ValidationFailure: unable to validate the Data packet.
        :raises InterestCanceled: the face to NFD is shut down after sending this Interest.
        """
        if not self.face.running:
            raise NetworkError('cannot send packet before connected')
        if 'signer' in kwargs:
            signer = kwargs['signer']
        elif app_param is not None:
            signer = self.keychain.get_signer(kwargs)
        else:
            signer = None
        if 'interest_param' in kwargs:
            interest_param = kwargs['interest_param']
        else:
            if 'nonce' not in kwargs:
                kwargs['nonce'] = gen_nonce()
            interest_param = InterestParam.from_dict(kwargs)
        interest, final_name = make_interest(name,
                                             interest_param,
                                             app_param,
                                             signer=signer,
                                             need_final_name=True)
        future = aio.get_event_loop().create_future()
        node = self._int_tree.setdefault(final_name, InterestTreeNode())
        node.append_interest(future, interest_param)
        self.face.send(interest)
        return self._wait_for_data(future, interest_param.lifetime, final_name,
                                   node, validator, need_raw_packet)

    async def _wait_for_data(self, future: aio.Future, lifetime: int,
                             name: FormalName, node: InterestTreeNode,
                             validator: Validator, need_raw_packet: bool):
        lifetime = 100 if lifetime is None else lifetime
        try:
            name, meta_info, content, sig, raw_packet = await aio.wait_for(
                future, timeout=lifetime / 1000.0)
        except aio.TimeoutError:
            if node.timeout(future):
                del self._int_tree[name]
            raise InterestTimeout()
        except aio.CancelledError:
            raise InterestCanceled()
        if validator is None:
            validator = self.data_validator
        if self.pub_key is None:
            if await validator(name, sig):
                if need_raw_packet:
                    return name, meta_info, content, raw_packet
                else:
                    return name, meta_info, content
            else:
                raise ValidationFailure(name, meta_info, content)
        else:
            if await validator(name, sig, self.pub_key):
                if need_raw_packet:
                    return name, meta_info, content, raw_packet
                else:
                    return name, meta_info, content
            else:
                raise ValidationFailure(name, meta_info, content)

    async def main_loop(self, after_start: Awaitable = None):
        """
        The main loop of NDNApp.

        :param after_start: the coroutine to start after connection to NFD is established.
        """
        async def starting_task():
            for name, route, validator, need_raw_packet, need_sig_ptrs in self._autoreg_routes:
                await self.register(name, route, validator, need_raw_packet,
                                    need_sig_ptrs)
            if after_start:
                try:
                    await after_start
                except Exception:
                    self.face.shutdown()
                    raise

        try:
            await self.face.open()
        except (FileNotFoundError, ConnectionError, OSError, PermissionError):
            if after_start:
                if isinstance(after_start, Coroutine):
                    after_start.close()
                elif isinstance(after_start, aio.Task) or isinstance(
                        after_start, aio.Future):
                    after_start.cancel()
            raise
        task = aio.ensure_future(starting_task())
        logging.debug('Connected to NFD node, start running...')
        await self.face.run()
        self.face.shutdown()
        self._clean_up()
        await task

    def _clean_up(self):
        for node in self._int_tree.itervalues():
            node.cancel()
        self._prefix_tree.clear()
        self._int_tree.clear()

    def shutdown(self):
        """
        Manually shutdown the face to NFD.
        """
        logging.info('Manually shutdown')
        self.face.shutdown()

    def run_forever(self, after_start: Awaitable = None) -> bool:
        """
        A non-async wrapper of :meth:`main_loop`.

        :param after_start: the coroutine to start after connection to NFD is established.

        :examples:
            .. code-block:: python3

                app = NDNApp()

                if __name__ == '__main__':
                    app.run_forever(after_start=main())
        """
        task = self.main_loop(after_start)
        try:
            aio.get_event_loop().run_until_complete(task)
            ret = True
        except KeyboardInterrupt:
            logging.info('Receiving Ctrl+C, shutdown')
            ret = False
        finally:
            self.face.shutdown()
        logging.debug('Face is down now')
        return ret

    def route(self,
              name: NonStrictName,
              validator: Optional[Validator] = None,
              need_raw_packet: bool = False,
              need_sig_ptrs: bool = False):
        """
        A decorator used to register a permanent route for a specific prefix.

        This function is non-blocking and can be called at any time.
        If it is called before connecting to NFD, NDNApp will remember this route and
        automatically register it every time when a connection is established.
        Failure in registering this route to NFD will be ignored.

        The decorated function should accept 3 arguments: Name, Interest parameters and ApplicationParameters.

        :param name: the Name prefix for this route.
        :type name: :any:`NonStrictName`
        :param validator: the Validator used to validate coming Interests.
            An Interest without ApplicationParameters and SignatureInfo will be considered valid without
            calling validator.
            Interests with malformed ParametersSha256DigestComponent will be dropped before going into the validator.
            Otherwise NDNApp will try to validate the Interest with the validator.
            Interests which fail to be validated will be dropped without raising any exception.
        :type validator: Optional[:any:`Validator`]
        :param need_raw_packet: if True, pass the raw Interest packet to the callback as a keyword argument
            ``raw_packet``.
        :type need_raw_packet: bool
        :param need_sig_ptrs: if True, pass the Signature pointers to the callback as a keyword argument
            ``sig_ptrs``.
        :type need_sig_ptrs: bool

        :examples:
            .. code-block:: python3

                app = NDNApp()

                @app.route('/example/rpc')
                def on_interest(name: FormalName, param: InterestParam, app_param):
                    pass
        """
        name = Name.normalize(name)

        def decorator(func: Route):
            self._autoreg_routes.append(
                (name, func, validator, need_raw_packet, need_sig_ptrs))
            if self.face.running:
                aio.ensure_future(
                    self.register(name, func, validator, need_raw_packet,
                                  need_sig_ptrs))
            return func

        return decorator

    async def register(self,
                       name: NonStrictName,
                       func: Optional[Route],
                       validator: Optional[Validator] = None,
                       need_raw_packet: bool = False,
                       need_sig_ptrs: bool = False) -> bool:
        """
        Register a route for a specific prefix dynamically.

        :param name: the Name prefix for this route.
        :type name: :any:`NonStrictName`
        :param func: the onInterest function for the specified route.
            If ``None``, the NDNApp will only send the register command to forwarder,
            without setting any callback function.
        :type func: Optional[Callable[[:any:`FormalName`, :any:`InterestParam`, Optional[:any:`BinaryStr`]], ``None``]]
        :param validator: the Validator used to validate coming Interests.
        :type validator: Optional[:any:`Validator`]
        :return: ``True`` if the registration succeeded.
        :param need_raw_packet: if True, pass the raw Interest packet to the callback as a keyword argument
            ``raw_packet``.
        :type need_raw_packet: bool
        :param need_sig_ptrs: if True, pass the Signature pointers to the callback as a keyword argument
            ``sig_ptrs``.
        :type need_sig_ptrs: bool

        :raises ValueError: the prefix is already registered.
        :raises NetworkError: the face to NFD is down now.
        """
        name = Name.normalize(name)
        if func is not None:
            node = self._prefix_tree.setdefault(name, PrefixTreeNode())
            if node.callback:
                raise ValueError(
                    f'Duplicated registration: {Name.to_str(name)}')
            node.callback = func
            node.extra_param = {
                'raw_packet': need_raw_packet,
                'sig_ptrs': need_sig_ptrs
            }
            if validator:
                node.validator = validator

        # Fix the issue that NFD only allows one packet signed by a specific key for a timestamp number
        async with self._prefix_register_semaphore:
            try:
                _, _, reply = await self.express_interest(make_command(
                    'rib', 'register', name=name),
                                                          lifetime=1000)
                ret = parse_response(reply)
                if ret['status_code'] != 200:
                    logging.error(
                        f'Registration for {Name.to_str(name)} failed: '
                        f'{ret["status_code"]} {bytes(ret["status_text"]).decode()}'
                    )
                    return False
                else:
                    logging.debug(
                        f'Registration for {Name.to_str(name)} succeeded: '
                        f'{ret["status_code"]} {bytes(ret["status_text"]).decode()}'
                    )
                    return True
            except (InterestNack, InterestTimeout, InterestCanceled,
                    ValidationFailure) as e:
                logging.error(
                    f'Registration for {Name.to_str(name)} failed: {e.__class__.__name__}'
                )
                return False

    async def unregister(self, name: NonStrictName) -> bool:
        """
        Unregister a route for a specific prefix.

        :param name: the Name prefix.
        :type name: :any:`NonStrictName`
        """
        name = Name.normalize(name)
        del self._prefix_tree[name]
        try:
            await self.express_interest(make_command('rib',
                                                     'unregister',
                                                     name=name),
                                        lifetime=1000)
            return True
        except (InterestNack, InterestTimeout, InterestCanceled,
                ValidationFailure):
            return False

    def set_interest_filter(self,
                            name: NonStrictName,
                            func: Route,
                            validator: Optional[Validator] = None,
                            need_raw_packet: bool = False,
                            need_sig_ptrs: bool = False):
        """
        Set the callback function for an Interest prefix without sending a register command to the forwarder.

        .. note::
            All callbacks registered by ``set_interest_filter`` are removed when disconnected from
            the the forwarder, and will not be added back after reconnection.
            This behaviour is the same as ``register``.
            Therefore, it is strongly recommended to use ``route`` for static routes.
        """
        name = Name.normalize(name)
        node = self._prefix_tree.setdefault(name, PrefixTreeNode())
        if node.callback:
            raise ValueError(f'Duplicated registration: {Name.to_str(name)}')
        node.callback = func
        node.extra_param = {
            'raw_packet': need_raw_packet,
            'sig_ptrs': need_sig_ptrs
        }
        if validator:
            node.validator = validator

    def unset_interest_filter(self, name: NonStrictName):
        """
        Remove the callback function for an Interest prefix without sending an unregister command.

        .. note::
            ``unregister`` will only remove the callback if the callback's name matches exactly
            the route's name.
            This is because there may be one route whose name is the prefix of another.
            To avoid cancelling unexpected routes, neither ``unregister`` nor ``unset_interest_filter``
            behaves in a cascading manner.
            Please remove callbacks manually.
        """
        name = Name.normalize(name)
        del self._prefix_tree[name]

    def _on_nack(self, name: FormalName, nack_reason: int):
        node = self._int_tree[name]
        if node:
            if node.nack_interest(nack_reason):
                del self._int_tree[name]

    async def _on_data(self, name: FormalName, meta_info: MetaInfo,
                       content: Optional[BinaryStr], sig: SignaturePtrs,
                       raw_packet):
        clean_list = []
        for prefix, node in self._int_tree.prefixes(name):
            if node.satisfy((name, meta_info, content, sig, raw_packet),
                            prefix != name):
                clean_list.append(prefix)
        for prefix in clean_list:
            del self._int_tree[prefix]

    async def _on_interest(self, name: FormalName, param: InterestParam,
                           app_param: Optional[BinaryStr], sig: SignaturePtrs,
                           raw_packet: BinaryStr):
        trie_step = self._prefix_tree.longest_prefix(name)
        if not trie_step:
            logging.warning('No route: %s' % name)
            return
        node = trie_step.value
        if node.callback is None:
            logging.warning('No callback: %s' % name)
            return
        if app_param is not None or sig.signature_info is not None:
            if not await params_sha256_checker(name, sig):
                logging.warning('Drop malformed Interest: %s' % name)
                return
        if sig.signature_info is not None:
            validator = node.validator if node.validator else self.int_validator
            valid = await validator(name, sig)
        else:
            valid = True
        if not valid:
            logging.warning('Drop unvalidated Interest: %s' % name)
            return
        if node.extra_param:
            kwargs = {}
            if node.extra_param.get('raw_packet', False):
                kwargs['raw_packet'] = raw_packet
            if node.extra_param.get('sig_ptrs', False):
                kwargs['sig_ptrs'] = sig
            node.callback(name, param, app_param, **kwargs)
        else:
            node.callback(name, param, app_param)