Esempio n. 1
0
    def test_normalize():
        name = b'\x07\x0c\x08\x01a\x08\x01b\x08\x01c\x08\x01\x1d'
        assert Name.normalize(name) == [b'\x08\x01a', b'\x08\x01b', b'\x08\x01c', b'\x08\x01\x1d']

        name = '/8=a/b/%63/\x1d'
        assert Name.normalize(name) == [b'\x08\x01a', b'\x08\x01b', b'\x08\x01c', b'\x08\x01\x1d']

        name = ['8=a', 'b', b'\x08\x01c', '\x1d']
        assert Name.normalize(name) == [b'\x08\x01a', b'\x08\x01b', b'\x08\x01c', b'\x08\x01\x1d']
Esempio n. 2
0
 def __init__(self, app: NDNApp, command_client: CommandClient,
              node_name: NonStrictName, repo_prefix: NonStrictName):
     self.app = app
     self.command_client = command_client
     self.node_name = node_name
     self.repo_prefix = repo_prefix
     node_name_name = Name.normalize(self.node_name)
     self.node_prefix = Name.normalize(self.repo_prefix)
     self.node_prefix.extend(node_name_name)
     self.node_recall_prefix = self.node_prefix
     self.node_recall_prefix.append(Component.from_str('recall'))
Esempio n. 3
0
    def set_interest_filter(self,
                            name: NonStrictName,
                            func: Route,
                            validator: Optional[Validator] = None,
                            need_raw_packet: bool = False,
                            need_sig_ptrs: bool = False):
        """
        Set the callback function for an Interest prefix without sending a register command to the forwarder.

        .. note::
            All callbacks registered by ``set_interest_filter`` are removed when disconnected from
            the the forwarder, and will not be added back after reconnection.
            This behaviour is the same as ``register``.
            Therefore, it is strongly recommended to use ``route`` for static routes.
        """
        name = Name.normalize(name)
        node = self._prefix_tree.setdefault(name, PrefixTreeNode())
        if node.callback:
            raise ValueError(f'Duplicated registration: {Name.to_str(name)}')
        node.callback = func
        node.extra_param = {
            'raw_packet': need_raw_packet,
            'sig_ptrs': need_sig_ptrs
        }
        if validator:
            node.validator = validator
Esempio n. 4
0
def main():
    if len(sys.argv) <= 2:
        print(f'Usage: {sys.argv[0]} <name> <file>')
        exit(0)
    logging.basicConfig(format='[{asctime}]{levelname}:{message}',
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        style='{')

    app = NDNApp()
    name = Name.normalize(sys.argv[1])
    name.append(Component.from_version(timestamp()))

    with open(sys.argv[2], 'rb') as f:
        data = f.read()
        seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE
        packets = [app.prepare_data(name + [Component.from_segment(i)],
                                    data[i*SEGMENT_SIZE:(i+1)*SEGMENT_SIZE],
                                    freshness_period=10000,
                                    final_block_id=Component.from_segment(seg_cnt - 1))
                   for i in range(seg_cnt)]
    print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}')

    @app.route(name)
    def on_interest(int_name, _int_param, _app_param):
        if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT:
            seg_no = Component.to_number(int_name[-1])
        else:
            seg_no = 0
        if seg_no < seg_cnt:
            app.put_raw_packet(packets[seg_no])

    app.run_forever()
Esempio n. 5
0
    def update_prefixes_in_storage(storage: Storage, prefix) -> bool:
        """
        :param storage: Storage
        :param prefix: NonStrictName
        Add a new prefix into database
        return whether the prefix has been registered before
        """
        prefixes_msg = PrefixesInStorage()
        ret = storage.get('prefixes')
        if ret:
            prefixes_msg = PrefixesInStorage.parse(ret)

        # Check if this prefix already exists
        prefix_str = Name.to_str(prefix)
        for existing_prefix in prefixes_msg.prefixes:
            existing_prefix_str = Name.to_str(existing_prefix)
            if existing_prefix_str == prefix_str or prefix_str.startswith(
                    existing_prefix_str):
                return True

        prefixes_msg.prefixes.append(Name.normalize(prefix))
        prefixes_msg_bytes = prefixes_msg.encode()
        storage.put('prefixes', bytes(prefixes_msg_bytes))
        logging.info(f'Added new prefix into the database: {prefix_str}')
        return False
    def get_data_packet(self, name: NonStrictName, can_be_prefix: bool=False,
                        must_be_fresh: bool=False) -> Optional[bytes]:
        """
        Get a data packet named ``name``.

        :param name: NonStrictName. The name of the data packet. 
        :param can_be_perfix: bool. If true, use prefix match instead of exact match.
        :param must_be_fresh: bool. If true, ignore expired data.
        :return: The value of the data packet.
        """
        name = Name.normalize(name)
        # cache lookup
        try:
            if not can_be_prefix:
                data, expire_time_ms = self.cache[name]
                if not must_be_fresh or expire_time_ms > self._time_ms():
                    logging.info('get from cache')
                    return data
            else:
                it = self.cache.itervalues(prefix=name, shallow=True)
                while True:
                    data, expire_time_ms = next(it)
                    if not must_be_fresh or expire_time_ms > self._time_ms():
                        logging.info('get from cache')
                        return data
        # not in cache, lookup in storage
        except (KeyError, StopIteration):
            key = self._get_name_bytes_wo_tl(name)
            return self._get(key, can_be_prefix, must_be_fresh)
 async def _subscribe_helper(self, topic: NonStrictName, cb: callable):
     """
     Async helper for ``subscribe()``.
     """
     logging.info(f'subscribing to topic: {Name.to_str(topic)}')
     topic = Name.normalize(topic)
     self.topic_to_cb[topic] = cb
     self.app.route(topic + ['notify'])(self._on_notify_interest)
Esempio n. 8
0
 def put_data_packet(self, name:NonStrictName, data_packet:bytes) -> None:
     _, meta_info, _, _ = parse_data(data_packet)
     expire_time_ms = self.time_ms()
     if meta_info.freshness_period:
         expire_time_ms += meta_info.freshness_period
     name = Name.normalize(name)
     self.cache[name] = (data_packet, expire_time_ms)
     logging.info(f'SVSyncStorage: cache save {Name.to_str(name)}')
    def unsubscribe(self, topic: NonStrictName):
        """
        Unsubscribe from ``topic``.

        :param topic: NonStrictName. The topic to unsubscribe from.
        """
        logging.info(f'unsubscribing topic: {Name.to_str(topic)}')
        topic = Name.normalize(topic)
        del self.topic_to_cb[topic]
Esempio n. 10
0
    async def publish(self, topic: NonStrictName, msg: bytes):
        """
        Publish ``msg`` to ``topic``. Make several attempts until the subscriber returns a\
            response.

        :param topic: NonStrictName. The topic to publish ``msg`` to.
        :param msg: bytes. The message to publish. The pub-sub API does not make any assumptions on\
            the format of this message.
        :return: Return true if received response from a subscriber.
        """
        logging.info(f'publishing a message to topic: {Name.to_str(topic)}')
        # generate a nonce for each message. Nonce is a random sequence of bytes
        nonce = os.urandom(4)
        # wrap msg in a data packet named /<publisher_prefix>/msg/<topic>/nonce
        data_name = Name.normalize(self.prefix + ['msg'] + topic +
                                   [Component.from_bytes(nonce)])
        self.published_data[data_name] = self.app.prepare_data(data_name, msg)

        # prepare notify interest
        int_name = topic + ['notify']
        app_param = NotifyAppParam()
        app_param.publisher_prefix = self.prefix
        app_param.notify_nonce = nonce
        if self.forwarding_hint:
            app_param.publisher_fwd_hint = ForwardingHint()
            app_param.publisher_fwd_hint.name = self.forwarding_hint

        aio.ensure_future(self._erase_publisher_state_after(data_name, 5))

        # express notify interest
        n_retries = 3
        is_success = False
        while n_retries > 0:
            try:
                logging.debug(
                    f'sending notify interest: {Name.to_str(int_name)}')
                _, _, _ = await self.app.express_interest(int_name,
                                                          app_param.encode(),
                                                          must_be_fresh=False,
                                                          can_be_prefix=False)
                is_success = True
                break
            except InterestNack as e:
                logging.debug(f'Nacked with reason: {e.reason}')
                await aio.sleep(1)
                n_retries -= 1
            except InterestTimeout:
                logging.debug(f'Timeout')
                n_retries -= 1

        # if receiving notify response, the subscriber has finished fetching msg
        if is_success:
            logging.debug(f'received notify response for: {data_name}')
        else:
            logging.debug(f'did not receive notify response for: {data_name}')
        await self._erase_publisher_state_after(data_name, 0)
        return is_success
Esempio n. 11
0
 def remove_data_packet(self, name:NonStrictName) -> bool:
     removed = False
     name = Name.normalize(name)
     try:
         del self.cache[name]
         removed = True
     except KeyError:
         pass
     return removed
 def decode_sql(name, catalog_prefix):
     #remove the prefix
     sql_name_component = name[len(Name.normalize(catalog_prefix))]
     # parse the sql_name_component
     sqls_tlv_model = SqlsTlvModel.parse(
         Component.get_value(sql_name_component).tobytes())
     # from list of bytearray to list of string
     sqls = list(map(lambda x: x.tobytes().decode(), sqls_tlv_model.sqls))
     return sqls
Esempio n. 13
0
    def route(self,
              name: NonStrictName,
              validator: Optional[Validator] = None,
              need_raw_packet: bool = False,
              need_sig_ptrs: bool = False):
        """
        A decorator used to register a permanent route for a specific prefix.

        This function is non-blocking and can be called at any time.
        If it is called before connecting to NFD, NDNApp will remember this route and
        automatically register it every time when a connection is established.
        Failure in registering this route to NFD will be ignored.

        The decorated function should accept 3 arguments: Name, Interest parameters and ApplicationParameters.

        :param name: the Name prefix for this route.
        :type name: :any:`NonStrictName`
        :param validator: the Validator used to validate coming Interests.
            An Interest without ApplicationParameters and SignatureInfo will be considered valid without
            calling validator.
            Interests with malformed ParametersSha256DigestComponent will be dropped before going into the validator.
            Otherwise NDNApp will try to validate the Interest with the validator.
            Interests which fail to be validated will be dropped without raising any exception.
        :type validator: Optional[:any:`Validator`]
        :param need_raw_packet: if True, pass the raw Interest packet to the callback as a keyword argument
            ``raw_packet``.
        :type need_raw_packet: bool
        :param need_sig_ptrs: if True, pass the Signature pointers to the callback as a keyword argument
            ``sig_ptrs``.
        :type need_sig_ptrs: bool

        :examples:
            .. code-block:: python3

                app = NDNApp()

                @app.route('/example/rpc')
                def on_interest(name: FormalName, param: InterestParam, app_param):
                    pass
        """
        name = Name.normalize(name)

        def decorator(func: Route):
            self._autoreg_routes.append(
                (name, func, validator, need_raw_packet, need_sig_ptrs))
            if self.face.running:
                aio.ensure_future(
                    self.register(name, func, validator, need_raw_packet,
                                  need_sig_ptrs))
            return func

        return decorator
 def remove_name_from_set_in_storage(set_name: str, storage: Storage, name: NonStrictName) -> bool:
     """
     Remove ``name`` from set ``set_name`` in the storage.
     :param set_name: str
     :param storage: Storage
     :param name: NonStrictName
     :return: Returns true if ``name`` exists in set ``set_name`` and is being successfully\
         removed.
     """
     names_msg = RepeatedNames()
     ret = storage._get(set_name.encode('utf-8'))
     if ret:
         names_msg = RepeatedNames.parse(ret)
     
     name = Name.normalize(name)
     if name in names_msg.names:
         names_msg.names.remove(Name.normalize(name))
         names_msg_bytes = names_msg.encode()
         storage._put(set_name.encode('utf-8'), bytes(names_msg_bytes))
         return True
     else:
         return False
Esempio n. 15
0
 def on_sd_adv_interest(name: FormalName, param: InterestParam, app_param: Optional[BinaryStr]):
     # prefix = /<home-prefix>/<SD=1>/<ADV=0>
     locator = name[3:-1]
     fresh_period = struct.unpack("!I", app_param[:4])[0]
     service_ids = [sid for sid in app_param[4:]]
     logging.debug("ON ADV: %s %s %s", locator, fresh_period, service_ids)
     cur_time = self.get_time_now_ms()
     for sid in service_ids:
         # /<home-prefix>/<SD=1>/<service>/<locator>
         sname = [self.system_prefix, bytearray(b'\x08\x011'), sid, locator]
         sname = Name.normalize(sname)
         logging.debug("SNAME: %s", sname)
         self.real_service_list[sname] = cur_time + fresh_period
    def remove_prefixes_in_storage(storage: Storage, prefix):
        """
        :param storage: Storage
        :param prefix: NonStrictName
        Remove a new prefix into database.
        Return whether the prefix is successfully removed.
        """
        prefixes_msg = PrefixesInStorage()
        ret = storage._get(b'prefixes')
        if ret:
            prefixes_msg = PrefixesInStorage.parse(ret)

        prefix = Name.normalize(prefix)
        if prefix in prefixes_msg.prefixes:
            prefixes_msg.prefixes.remove(Name.normalize(prefix))
            prefixes_msg_bytes = prefixes_msg.encode()
            storage._put(b'prefixes', bytes(prefixes_msg_bytes))
            logging.info(
                f'Removed existing prefix from the database: {Name.to_str(prefix)}'
            )
            return True
        else:
            return False
Esempio n. 17
0
    def unset_interest_filter(self, name: NonStrictName):
        """
        Remove the callback function for an Interest prefix without sending an unregister command.

        .. note::
            ``unregister`` will only remove the callback if the callback's name matches exactly
            the route's name.
            This is because there may be one route whose name is the prefix of another.
            To avoid cancelling unexpected routes, neither ``unregister`` nor ``unset_interest_filter``
            behaves in a cascading manner.
            Please remove callbacks manually.
        """
        name = Name.normalize(name)
        del self._prefix_tree[name]
Esempio n. 18
0
async def main():
    if len(sys.argv) <= 1:
        print(f'Usage: {sys.argv[0]} <name> [<file-path>]')
        exit(0)

    # Make schema tree
    root = Node()
    root['/<IDName>/KEY/<KeyID>/self/<CertID>'] = Node()
    root['/file/<FileName>'] = RDRNode()

    # Set policies
    id_name = Name.Component.get_value(app.keychain.default_identity().name[0])
    cache = MemoryCache()
    root.set_policy(policy.Cache, MemoryCachePolicy(cache))
    root['/file/<FileName>'].set_policy(
        policy.DataValidator,
        SignedBy(root['/<IDName>/KEY/<KeyID>'],
                 subject_to=lambda _, vars: vars['IDName'] == id_name))

    # Store the certificate
    cert = app.keychain.default_identity().default_key().default_cert()
    await cache.save(Name.normalize(cert.name), cert.data)

    # Attach the tree to the face
    await root.attach(app, '/')

    filename = sys.argv[1]
    if len(sys.argv) > 2:
        # If it's the producer
        filepath = sys.argv[2]
        print(f'Read {filename} from file {filepath}...')
        # Provider with file
        with open(filepath, 'rb') as f:
            data = f.read()
            await root.match('/file/' + filename).provide(
                data, freshness_period=60000)
        # Wait for it to be cached
        await aio.sleep(0.1)
    else:
        # If it's the producer
        print(f'Try to fetch {filename}...')

    # The file is ready!
    data, metadata = await root.match('/file/' + filename).need()
    print(f'Content size: {len(data)}')
    print(f'Content: {data[:70]} ...')
    print(f'Number of segments: {metadata["block_count"]}')
    print(f'Serving {filename}')
    def remove_data_packet(self, name: NonStrictName) -> bool:
        """
        Remove a data packet named ``name``.

        :param name: NonStrictName. The name of the data packet. 
        :return: True if a data packet is being removed.
        """
        removed = False
        name = Name.normalize(name)
        try:
            del self.cache[name]
            removed = True
        except KeyError:
            pass
        if self._remove(self._get_name_bytes_wo_tl(name)):
            removed = True
        return removed
Esempio n. 20
0
 def get_data_packet(self, name:NonStrictName, can_be_prefix:bool=False, must_be_fresh:bool=False) -> Optional[bytes]:
     name = Name.normalize(name)
     try:
         if not can_be_prefix:
             data, expire_time_ms = self.cache[name]
             if not must_be_fresh or expire_time_ms > self.time_ms():
                 logging.info('SVSyncStorage: get from cache')
                 return data
         else:
             it = self.cache.itervalues(prefix=name, shallow=True)
             while True:
                 data, expire_time_ms = next(it)
                 if not must_be_fresh or expire_time_ms > self.time_ms():
                     logging.info('SVSyncStorage: get from cache')
                     return data
     except (KeyError, StopIteration):
         return None
Esempio n. 21
0
 async def remove_device(request):
     data = await request.json()
     # delete from keychain
     try:
         # TODO bring this line back when the identity delete bug is fixed
         # controller.app.keychain.del_identity(data['deviceIdentityName'])
         os.system('ndnsec-delete ' + data['deviceIdentityName'])
     except KeyError:
         pass  # great, the key has already been removed
     # delete from database
     controller.device_list.devices = [device for device in controller.device_list.devices
                                       if Name.to_str(device.device_identity_name) != data['deviceIdentityName']]
     # delete service info
     temp_name = Name.from_str(data['deviceIdentityName'])
     controller.service_list.services = [service for service in controller.service_list.services
                                         if Name.normalize(service.service_name)[2:4] != temp_name[1:3]]
     return web.json_response({"st_code": 200})
    def put_data_packet(self, name: NonStrictName, data: bytes):
        """
        Insert a data packet named ``name`` with value ``data``.
        This method will parse ``data`` to get its freshnessPeriod, and compute its expiration time\
            by adding the freshnessPeriod to the current time.
        
        :param name: NonStrictName. The name of the data packet.
        :param data: bytes. The value of the data packet.
        """
        _, meta_info, _, _ = parse_data(data)
        expire_time_ms = self._time_ms()
        if meta_info.freshness_period:
            expire_time_ms += meta_info.freshness_period

        # write data packet and freshness_period to cache
        name = Name.normalize(name)
        self.cache[name] = (data, expire_time_ms)
        logging.info(f'Cache save: {Name.to_str(name)}')
Esempio n. 23
0
    async def unregister(self, name: NonStrictName) -> bool:
        """
        Unregister a route for a specific prefix.

        :param name: the Name prefix.
        :type name: :any:`NonStrictName`
        """
        name = Name.normalize(name)
        del self._prefix_tree[name]
        try:
            await self.express_interest(make_command('rib',
                                                     'unregister',
                                                     name=name),
                                        lifetime=1000)
            return True
        except (InterestNack, InterestTimeout, InterestCanceled,
                ValidationFailure):
            return False
Esempio n. 24
0
def main():
    if len(sys.argv) <= 2:
        print(f'Usage: {sys.argv[0]} <name> <file>')
        exit(0)
    logging.basicConfig(format='[{asctime}]{levelname}:{message}',
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        style='{')

    app = NDNApp()
    name = Name.normalize(sys.argv[1])

    with open(sys.argv[2], 'rb') as f:
        data = f.read()
        seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE
        packets = [
            data[i * SEGMENT_SIZE:(i + 1) * SEGMENT_SIZE]
            for i in range(seg_cnt)
        ]
    print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}')

    @app.route(name)
    def on_interest(inst_name: FormalName, inst_param: InterestParam,
                    app_param: BinaryStr):
        d = json.loads(app_param.tobytes().decode())
        enc_session_key = base64.b64decode(d['enc_session_key'])
        nonce = base64.b64decode(d['nonce'])

        if Component.get_type(inst_name[-1]) == Component.TYPE_SEGMENT:
            seg_no = Component.to_number(inst_name[-1])
        else:
            seg_no = 0

        if seg_no < seg_cnt:
            app.put_data(inst_name,
                         packets[seg_no],
                         final_block_id=Component.from_segment(seg_cnt - 1),
                         freshness_period=10000)

    app.run_forever()
    def add_prefixes_in_storage(storage: Storage, prefix) -> bool:
        """
        :param storage: Storage
        :param prefix: NonStrictName
        Add a new prefix into database.
        Return whether the prefix has been registered before.
        """
        prefixes_msg = PrefixesInStorage()
        ret = storage._get(b'prefixes')
        if ret:
            prefixes_msg = PrefixesInStorage.parse(ret)

        prefix = Name.normalize(prefix)
        if prefix in prefixes_msg.prefixes:
            return True
        else:
            prefixes_msg.prefixes.append(prefix)
            prefixes_msg_bytes = prefixes_msg.encode()
            storage._put(b'prefixes', bytes(prefixes_msg_bytes))
            logging.info(
                f'Added new prefix into the database: {Name.to_str(prefix)}')
            return False
Esempio n. 26
0
 def encode_file_into_packets(self, file_info):
     num_segs = file_info.num_segs
     logging.info("Encoding {} into {} packets".format(file_info.filename, file_info.num_segs))
     encoded_packets = []
     with open(file_info.relative_path, 'rb') as infile:
         name_components = [ comp for comp in file_info.get_ndn_name().split("/") if comp ]
         name = Name.normalize(name_components)
         
         logging.info("Segmenting file under {}".format(Name.to_str(name)))
         
         data = infile.read()
         final_block_id = Component.from_segment(num_segs-1)
         
         for i in range(num_segs):
             full_name = name + [Component.from_segment(i)]
             encoded_packet = EncodedPacket(Name.to_str(full_name), full_name,
                                             data[i*file_info.block_size:(i+1)*file_info.block_size],
                                             self.freshness_period,
                                             final_block_id,
                                             self.app)
             logging.info("Encoded packet: {}".format(encoded_packet.original_name))
             encoded_packets.append(encoded_packet)
     return encoded_packets
    def add_name_to_set_in_storage(set_name: str, storage: Storage, name: NonStrictName) -> bool:
        """
        Add ``name`` to set ``set_name`` in the storage. This function implements a set of Names\
            over the key-value storage interface. The set name is stored as the key, and the set\
            elements are serialized and stored as the value.
        :param set_name: str
        :param storage: Storage
        :param name: NonStrictName
        :return: Returns true if ``name`` is already in set ``set_name``. 
        """
        names_msg = RepeatedNames()
        ret = storage._get(set_name.encode('utf-8'))
        if ret:
            names_msg = RepeatedNames.parse(ret)

        name = Name.normalize(name)
        if name in names_msg.names:
            return True
        else:
            names_msg.names.append(name)
            names_msg_bytes = names_msg.encode()
            storage._put(set_name.encode('utf-8'), bytes(names_msg_bytes))
            return False
 async def send_repo_command(self, node_prefix : NonStrictName, verb : str , datainfo : DatainfoTlvModel):
     # "/a/b" -> list of bytearrays
     name = Name.normalize(node_prefix)
     # "/a/b" -> "/a/b/insert"
     name.append(Component.from_str(verb))
     datainfo_name_component = Component.from_bytes(datainfo.encode())
     name.append(datainfo_name_component)
     logging.info('Interest Sent: {}'.format(Name.to_str(name)))
     try:
         data_name, meta_info, content = await self.app.express_interest(name, must_be_fresh=True, can_be_prefix=False, nonce=gen_nonce(), lifetime=1000)
         logging.info('Data Received: {}\n'.format(Name.to_str(data_name)))
         # print(meta_info)
         # print(bytes(content) if content else None)
     except InterestNack as e:
         # A NACK is received
         logging.warning(f'Interest Nacked with reason={e.reason}\n')
         return 0
     except InterestTimeout:
         # Interest times out
         logging.warning(f'Interest Timeout\n')
         return 0
     # results = self.parse_results(content)
     # logging.info(results)
     return 1
Esempio n. 29
0
    async def register(self,
                       name: NonStrictName,
                       func: Optional[Route],
                       validator: Optional[Validator] = None,
                       need_raw_packet: bool = False,
                       need_sig_ptrs: bool = False) -> bool:
        """
        Register a route for a specific prefix dynamically.

        :param name: the Name prefix for this route.
        :type name: :any:`NonStrictName`
        :param func: the onInterest function for the specified route.
            If ``None``, the NDNApp will only send the register command to forwarder,
            without setting any callback function.
        :type func: Optional[Callable[[:any:`FormalName`, :any:`InterestParam`, Optional[:any:`BinaryStr`]], ``None``]]
        :param validator: the Validator used to validate coming Interests.
        :type validator: Optional[:any:`Validator`]
        :return: ``True`` if the registration succeeded.
        :param need_raw_packet: if True, pass the raw Interest packet to the callback as a keyword argument
            ``raw_packet``.
        :type need_raw_packet: bool
        :param need_sig_ptrs: if True, pass the Signature pointers to the callback as a keyword argument
            ``sig_ptrs``.
        :type need_sig_ptrs: bool

        :raises ValueError: the prefix is already registered.
        :raises NetworkError: the face to NFD is down now.
        """
        name = Name.normalize(name)
        if func is not None:
            node = self._prefix_tree.setdefault(name, PrefixTreeNode())
            if node.callback:
                raise ValueError(
                    f'Duplicated registration: {Name.to_str(name)}')
            node.callback = func
            node.extra_param = {
                'raw_packet': need_raw_packet,
                'sig_ptrs': need_sig_ptrs
            }
            if validator:
                node.validator = validator

        # Fix the issue that NFD only allows one packet signed by a specific key for a timestamp number
        async with self._prefix_register_semaphore:
            try:
                _, _, reply = await self.express_interest(make_command(
                    'rib', 'register', name=name),
                                                          lifetime=1000)
                ret = parse_response(reply)
                if ret['status_code'] != 200:
                    logging.error(
                        f'Registration for {Name.to_str(name)} failed: '
                        f'{ret["status_code"]} {bytes(ret["status_text"]).decode()}'
                    )
                    return False
                else:
                    logging.debug(
                        f'Registration for {Name.to_str(name)} succeeded: '
                        f'{ret["status_code"]} {bytes(ret["status_text"]).decode()}'
                    )
                    return True
            except (InterestNack, InterestTimeout, InterestCanceled,
                    ValidationFailure) as e:
                logging.error(
                    f'Registration for {Name.to_str(name)} failed: {e.__class__.__name__}'
                )
                return False
Esempio n. 30
0
async def segment_fetcher(app: NDNApp,
                          name: NonStrictName,
                          app_param: BinaryStr,
                          timeout=4000,
                          retry_times=3,
                          validator=None,
                          must_be_fresh=True):
    """
    An async-generator to fetch a segmented object. Interests are issued one by one.

    :param app: NDN Application
    :param name: Name prefix of Data
    :param timeout: Timeout value, in milliseconds
    :param retry_times: Times for retry
    :param validator: Validator
    :param must_be_fresh: MustBeFresh field of Interest
    :return: Data segments in order.
    """
    async def retry(first):
        nonlocal name
        trial_times = 0
        while True:
            future = app.express_interest(name,
                                          app_param=app_param,
                                          validator=validator,
                                          can_be_prefix=first,
                                          must_be_fresh=must_be_fresh,
                                          lifetime=timeout,
                                          signer=None)
            try:
                return await future
            except InterestTimeout:
                trial_times += 1
                if trial_times >= retry_times:
                    raise

    name = Name.normalize(name)

    # First Interest
    name, meta, content = await retry(True)
    name += [Component.from_segment(0)]

    # If it's not segmented
    if Component.get_type(name[-1]) != Component.TYPE_SEGMENT:
        yield content
        return

    # If it's segmented
    if Component.to_number(name[-1]) == 0:
        yield content
        if meta.final_block_id == name[-1]:
            return
        seg_no = 1
    else:
        # If it's not segment 0, starting from 0
        seg_no = 0

    # Following Interests
    while True:
        name[-1] = Component.from_segment(seg_no)
        name, meta, content = await retry(False)
        yield content
        if meta.final_block_id == name[-1]:
            return
        seg_no += 1