async def main(app: NDNApp):
    """
    Async helper function to run the concurrent fetcher.
    This function is necessary because it's responsible for calling app.shutdown().
    :param app: NDNApp
    """
    semaphore = aio.Semaphore(20)
    async for data_bytes in concurrent_fetcher(app,
                                               Name.from_str('/test1.pdf'), 0,
                                               161, semaphore):
        (data_name, meta_info, content,
         sig) = ndn_format_0_3.parse_data(data_bytes, with_tl=False)
        print(Name.to_str(data_name))
    app.shutdown()
Exemple #2
0
 def _process_received(self, client: H3Client) -> int:
     n = 0
     t1 = time.time()
     for pkt in client.received:
         try:
             data_name, _, _, _ = parse_data(pkt, with_tl=True)
             for i, name in enumerate(self._names):
                 if data_name == name:
                     self._result.probes[i] = ProbeNameResult(
                         ok=True, rtt=(t1 - self._t0[i]) * 1000)
                     n += 1
                     break
         except:
             pass
     client.received.clear()
     return n
    async def _retry(seq: int):
        """
        Retry 3 times fetching data of the given sequence number or fail.
        :param seq: block_id of data
        """
        nonlocal app, name, semaphore, is_failed, received_or_fail, final_id
        int_name = name[:]
        int_name.append(str(seq))

        trial_times = 0
        while True:
            trial_times += 1
            if trial_times > 3:
                semaphore.release()
                is_failed = True
                received_or_fail.set()
                return
            try:
                print(datetime.now().strftime("%H:%M:%S.%f "), end='')
                print('Express Interest: {}'.format(Name.to_str(int_name)))

                # Get the data name first, and then use the name to get the entire packet value (including sig). This
                # is necessary because express_interest() does not return the sig, which is needed by the repo. An
                # additional decoding step is necessary to obtain the metadata.
                data_name, _, _ = await app.express_interest(
                    int_name,
                    must_be_fresh=True,
                    can_be_prefix=False,
                    lifetime=1000)
                data_bytes = app.get_original_packet_value(data_name)
                (_, meta_info, content,
                 sig) = ndn_format_0_3.parse_data(data_bytes, with_tl=False)

                # Save data and update final_id
                print(datetime.now().strftime("%H:%M:%S.%f "), end='')
                print('Received data: {}'.format(Name.to_str(data_name)))
                seq_to_data_packet[seq] = data_bytes
                if meta_info is not None and meta_info.final_block_id is not None:
                    final_id = Component.to_number(meta_info.final_block_id)
                break
            except InterestNack as e:
                print(f'Nacked with reason={e.reason}')
            except InterestTimeout:
                print(f'Timeout')
        semaphore.release()
        received_or_fail.set()
Exemple #4
0
    async def fetch_file(self, name_at_repo):
        """
        Fetch a file from remote repo, and write to disk.
        :param name_at_repo: NonStrictName. The name with which this file is stored in the repo.
        """
        semaphore = aio.Semaphore(10)
        b_array = bytearray()
        async for data_bytes in concurrent_fetcher(self.app, name_at_repo, 0,
                                                   None, semaphore):
            (_, _, content, _) = ndn_format_0_3.parse_data(data_bytes,
                                                           with_tl=False)
            b_array.extend(content)

        if len(b_array) > 0:
            filename = Name.to_str(name_at_repo)
            filename = filename.strip().split('/')[-1]
            logging.info(f'Fetching completed, writing to file {filename}')
            with open(filename, 'wb') as f:
                f.write(b_array)
Exemple #5
0
    async def _process_insert(self, int_name, _int_param, _app_param):
        """
        Process segmented insertion command.
        Return to client with status code 100 immediately, and then start data fetching process.
        """
        try:
            cmd_param = self.decode_cmd_param_bytes(int_name)
        except DecodeError as exc:
            logging.info('Parameter interest blob decode failed')
            # TODO: return response
            return

        name = cmd_param.name
        start_block_id = cmd_param.start_block_id
        end_block_id = cmd_param.end_block_id

        logging.info(
            f'Write handle processing insert command: {name}, {start_block_id}, {end_block_id}'
        )

        # Reply to client with status code 100
        process_id = random.randint(0, 0x7fffffff)
        self.m_processes[process_id] = RepoCommandResponse()
        self.m_processes[process_id].status_code = 100
        self.m_processes[process_id].process_id = process_id
        self.m_processes[process_id].insert_num = 0
        self.reply_to_cmd(int_name, self.m_processes[process_id])

        # Start data fetching process
        self.m_processes[process_id].status_code = 300
        semaphore = aio.Semaphore(10)
        block_id = start_block_id
        async for data_bytes in concurrent_fetcher(self.app, name,
                                                   start_block_id,
                                                   end_block_id, semaphore):
            # Obtain data name by parsing data
            (data_name, _, _, _) = ndn_format_0_3.parse_data(data_bytes,
                                                             with_tl=False)
            self.storage.put(Name.to_str(data_name), data_bytes)
            assert block_id <= end_block_id
            block_id += 1

        # Insert is successful if all packets are retrieved, or if end_block_id is not set
        insert_num = block_id - start_block_id
        if end_block_id is None or block_id == end_block_id + 1:
            self.m_processes[process_id].status_code = 200
            logging.info('Segment insertion success, {} items inserted'.format(
                insert_num))
        else:
            self.m_processes[process_id].status_code = 400
            logging.info('Segment insertion failure, {} items inserted'.format(
                insert_num))
        self.m_processes[process_id].insert_num = insert_num

        # Let read handle listen for this prefix
        existing = CommandHandle.update_prefixes_in_storage(self.storage, name)
        if not existing:
            self.m_read_handle.listen(name)

        # Delete process state after some time
        await self.schedule_delete_process(process_id)