示例#1
0
    def test_generic():
        comp = b'\x08\x0andn-python'
        assert Component.get_type(comp) == Component.TYPE_GENERIC
        assert Component.to_str(comp) == 'ndn-python'
        assert Component.from_str('ndn-python') == comp
        assert Component.from_str('8=ndn-python') == comp

        comp = b"\x08\x07foo%bar"
        assert Component.to_str(comp) == "foo%25bar"
        assert Component.from_str('foo%25bar') == comp
        assert Component.from_str('8=foo%25bar') == comp

        comp = b'\x08\x04-._~'
        assert Component.to_str(comp) == "-._~"
        assert Component.from_str('-._~') == comp
        assert Component.from_str('8=-._~') == comp

        with pytest.raises(ValueError):
            Component.from_str(":/?#[]@")
        comp = Component.from_bytes(b':/?#[]@')
        assert Component.to_str(comp) == "%3A%2F%3F%23%5B%5D%40"
        assert Component.from_str("%3a%2f%3f%23%5b%5d%40") == comp

        with pytest.raises(ValueError):
            Component.from_str("/")
        assert Component.from_str('') == b'\x08\x00'
        assert Component.from_bytes(b'') == b'\x08\x00'
示例#2
0
 def test_compare():
     comps = [
         Component.from_hex('0000000000000000000000000000000000000000000000000000000000000000', 1),
         Component.from_hex('0000000000000000000000000000000000000000000000000000000000000001', 1),
         Component.from_hex('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', 1),
         Component.from_hex('0000000000000000000000000000000000000000000000000000000000000000', 2),
         Component.from_hex('0000000000000000000000000000000000000000000000000000000000000001', 2),
         Component.from_hex('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', 2),
         Component.from_bytes(b'', 0x03),
         Component.from_bytes(b'\x44', 0x03),
         Component.from_bytes(b'\x46', 0x03),
         Component.from_bytes(b'\x41\x41', 0x03),
         Component.from_str(''),
         Component.from_str('D'),
         Component.from_str('F'),
         Component.from_str('AA'),
         Component.from_str('21426='),
         Component.from_str('21426=%44'),
         Component.from_str('21426=%46'),
         Component.from_str('21426=%41%41')
     ]
     for i, lhs in enumerate(comps):
         for j, rhs in enumerate(comps):
             assert (lhs == rhs) == (i == j)
             assert (lhs != rhs) == (i != j)
             assert (lhs <  rhs) == (i <  j)
             assert (lhs <= rhs) == (i <= j)
             assert (lhs >  rhs) == (i >  j)
             assert (lhs >= rhs) == (i >= j)
示例#3
0
 async def fetch(self, obj_type: str, obj_name: bytes):
     # Return if it exists
     if self.repo.has_obj(obj_name) and obj_name not in self.incomplete_list:
         return False
     self.incomplete_list[obj_name] = obj_type
     # Fetch object
     packet_name = self.prefix + [Component.from_bytes(obj_name)]
     wire = b''.join([bytes(seg) async for seg in segment_fetcher(self.app, packet_name, must_be_fresh=False)])
     pack = SyncObject.parse(wire, ignore_critical=True)
     fetched_obj_type = bytes(pack.obj_type).decode()
     # Check type
     if obj_type and obj_type != fetched_obj_type:
         raise ValueError(f'{obj_type} is expected but get {fetched_obj_type}')
     # Write into repo TODO: Transfer compressed data
     h = hashlib.sha1(obj_type.encode() + b' ' + f'{len(pack.obj_data)}'.encode() + b'\x00')
     h.update(pack.obj_data)
     if h.digest() != obj_name:
         raise ValueError(f'{obj_name} has a different digest')
     self.repo.store_obj(bytes(pack.obj_type), bytes(pack.obj_data))
     # Trigger recurisve fetching
     if obj_type == "commit":
         await self.traverse_commit(bytes(pack.obj_data))
     elif obj_type == "tree":
         await self.traverse_tree(bytes(pack.obj_data))
     elif obj_type != "blob":
         raise ValueError(f'Unknown data type {obj_type}')
     del self.incomplete_list[obj_name]
示例#4
0
 def on_interest(self, name: FormalName, _param: InterestParam, _app_param: typing.Optional[BinaryStr]):
     # Get the name and segment number
     if Component.get_type(name[-1]) == Component.TYPE_SEGMENT:
         obj_name = Component.get_value(name[-2])
         seg_no = Component.to_number(name[-1])
     else:
         obj_name = Component.get_value(name[-1])
         seg_no = 0
     # Read the data
     # Git objects are small so we can read the whole object
     try:
         obj_type, data = self.repo.read_obj(bytes(obj_name))
     except ValueError:
         logging.warning(f'Requested file {obj_name} does not exist in repo {self.repo.repo_name}')
         return
     # Extract the segment and calculate Name
     data_name = self.prefix + [Component.from_bytes(obj_name), Component.from_segment(seg_no)]
     start_pos = seg_no * SEGMENTATION_SIZE
     data_seg = data[start_pos:start_pos + SEGMENTATION_SIZE]
     packet_obj = SyncObject()
     packet_obj.obj_type = obj_type.encode()
     packet_obj.obj_data = data_seg
     wire = packet_obj.encode()
     final_block = (len(data) + SEGMENTATION_SIZE - 1) // SEGMENTATION_SIZE
     self.app.put_data(data_name, wire,
                       freshness_period=3600000,
                       final_block_id=Component.from_segment(final_block))
     logging.debug(f'Responded {obj_name} segment {final_block} in repo {self.repo.repo_name}')
示例#5
0
async def query_face_id(app, uri, fuzzy_query=False):
    query_filter = FaceQueryFilter()
    query_filter.face_query_filter = FaceQueryFilterValue()
    if not fuzzy_query:
        query_filter.face_query_filter.uri = uri.encode('utf-8')
    else:
        query_filter.face_query_filter.uri_scheme = uri.encode('utf-8')
    query_filter_msg = query_filter.encode()
    name = Name.from_str("/localhost/nfd/faces/query") + [
        Component.from_bytes(query_filter_msg)
    ]
    try:
        _, _, data = await app.express_interest(name,
                                                lifetime=1000,
                                                can_be_prefix=True,
                                                must_be_fresh=True)
    except (InterestCanceled, InterestTimeout, InterestNack, ValidationFailure,
            NetworkError):
        logging.error(f'Query failed')
        return None
    ret = FaceStatusMsg.parse(data)
    logging.info(ret)

    if not fuzzy_query:
        return ret.face_status[0].face_id
    else:
        return ret
示例#6
0
    async def _check(self, method: str, repo_name, process_id: int) -> RepoCommandResponse:
        """
        Return parsed insert check response message.
        # TODO: Use command interests instead of regular interests
        """
        cmd_param = RepoCommandParameter()
        cmd_param.process_id = process_id
        cmd_param_bytes = cmd_param.encode()

        name = repo_name[:]
        name.append(method + ' check')
        name.append(Component.from_bytes(cmd_param_bytes))

        try:
            print(f'Expressing interest: {Name.to_str(name)}')
            data_name, meta_info, content = await self.app.express_interest(
                name, must_be_fresh=True, can_be_prefix=False, lifetime=1000)
            print(f'Received data name: {Name.to_str(data_name)}')
        except InterestNack as e:
            print(f'Nacked with reason={e.reason}')
            return None
        except InterestTimeout:
            print(f'Timeout: {Name.to_str(name)}')
            return None

        try:
            cmd_response = RepoCommandResponse.parse(content)
        except DecodeError as exc:
            logging.warning('Response blob decoding failed')
            return None
        except Exception as e:
            print(e)
        return cmd_response
示例#7
0
 def encode(self) -> Component:
     model = MetaDataModel()
     model.source = self.source
     model.tseqno = self.tseqno
     model.nulled = self.nulled
     model.nopcks = self.nopcks
     return Component.from_bytes(model.encode())
def test_state_vector_decode():
    enc_sv = b'\xCA\x03\x6F\x6E\x65\xCB\x01\x01\xCA\x03\x74\x77\x6F\xCB\x01\x02'
    enc_sv = Component.from_bytes(enc_sv, StateVectorModelTypes.VECTOR.value)

    sv = StateVector(enc_sv)
    assert sv.get("one") == 1
    assert sv.get("two") == 2
示例#9
0
    def test_basic_encode():
        uri = ('/Emid/25042=P3//./%1C%9F'
               '/sha256digest=0415e3624a151850ac686c84f155f29808c0dd73819aa4a4c20be73a4d8a874c')
        name = Name.from_str(uri)
        assert len(name) == 6
        assert name[0] == Component.from_bytes(b'Emid')
        assert name[1] == b'\xfd\x61\xd2\x02\x50\x33'
        assert name[2] == Component.from_bytes(b'')
        assert name[3] == Component.from_bytes(b'.')
        assert name[4] == Component.from_bytes(b'\x1C\x9F')
        assert Component.get_type(name[5]) == Component.TYPE_IMPLICIT_SHA256

        assert Name.encoded_length(name) == 57
        assert (Name.encode(name) ==
                b'\x07\x37\x08\x04Emid\xfda\xd2\x02P3\x08\x00\x08\x01.\x08\x02\x1c\x9f'
                b'\x01 \x04\x15\xe3bJ\x15\x18P\xachl\x84\xf1U\xf2\x98\x08\xc0\xdds\x81'
                b'\x9a\xa4\xa4\xc2\x0b\xe7:M\x8a\x87L')
def test_state_vector_decode() -> None:
    # hard coded bytes of component vector based on SVS protocol
    enc_sv = b'\xCA\x03\x6F\x6E\x65\xCB\x01\x01\xCA\x03\x74\x77\x6F\xCB\x01\x02'
    enc_sv = Component.from_bytes(enc_sv, SVSyncTlvTypes.VECTOR.value)

    sv: StateVector = StateVector(enc_sv)
    assert sv.get("one") == 1
    assert sv.get("two") == 2
 def _unsubscribe_inactive_processes(self):
     for process_id, last_check_tp in self.process_id_to_last_check_tp.items():
         if last_check_tp > int(time.time()) + 10:
             topic = process_id_to_check_prefix[process_id] + ['check', Component.from_bytes(process_id)]
             self.pb.unsubscribe(topic)
             logging.info('CommandChecker unsubscribed from {}'.format(Name.to_str(topic)))
             del self.process_id_to_response[process_id]
             del self.process_id_to_last_check_tp[process_id]
             del self.process_id_to_check_prefix[process_id]
示例#12
0
    async def publish(self, topic: NonStrictName, msg: bytes):
        """
        Publish ``msg`` to ``topic``. Make several attempts until the subscriber returns a\
            response.

        :param topic: NonStrictName. The topic to publish ``msg`` to.
        :param msg: bytes. The message to publish. The pub-sub API does not make any assumptions on\
            the format of this message.
        :return: Return true if received response from a subscriber.
        """
        logging.info(f'publishing a message to topic: {Name.to_str(topic)}')
        # generate a nonce for each message. Nonce is a random sequence of bytes
        nonce = os.urandom(4)
        # wrap msg in a data packet named /<publisher_prefix>/msg/<topic>/nonce
        data_name = Name.normalize(self.prefix + ['msg'] + topic +
                                   [Component.from_bytes(nonce)])
        self.published_data[data_name] = self.app.prepare_data(data_name, msg)

        # prepare notify interest
        int_name = topic + ['notify']
        app_param = NotifyAppParam()
        app_param.publisher_prefix = self.prefix
        app_param.notify_nonce = nonce
        if self.forwarding_hint:
            app_param.publisher_fwd_hint = ForwardingHint()
            app_param.publisher_fwd_hint.name = self.forwarding_hint

        aio.ensure_future(self._erase_publisher_state_after(data_name, 5))

        # express notify interest
        n_retries = 3
        is_success = False
        while n_retries > 0:
            try:
                logging.debug(
                    f'sending notify interest: {Name.to_str(int_name)}')
                _, _, _ = await self.app.express_interest(int_name,
                                                          app_param.encode(),
                                                          must_be_fresh=False,
                                                          can_be_prefix=False)
                is_success = True
                break
            except InterestNack as e:
                logging.debug(f'Nacked with reason: {e.reason}')
                await aio.sleep(1)
                n_retries -= 1
            except InterestTimeout:
                logging.debug(f'Timeout')
                n_retries -= 1

        # if receiving notify response, the subscriber has finished fetching msg
        if is_success:
            logging.debug(f'received notify response for: {data_name}')
        else:
            logging.debug(f'did not receive notify response for: {data_name}')
        await self._erase_publisher_state_after(data_name, 0)
        return is_success
示例#13
0
    def sha256_tester(typ, uri_prefix):
        hex_text = '%28%ba%d4%b5%27%5b%d3%92%db%b6%70%c7%5c%f0%b6%6f%13%f7%94%2b%21%e8%0f%55%c0%e8%6b%37%47%53%a5%48'
        hex_lower = ''.join(hex_text.split('%'))
        hex_upper = hex_lower.upper()

        comp = Component.from_bytes(bytes.fromhex(hex_upper), typ=typ)
        assert Component.get_type(comp) == typ
        assert Component.to_str(comp) == uri_prefix + hex_lower
        assert Component.from_str(uri_prefix + hex_lower) == comp
        assert Component.from_str(uri_prefix + hex_upper) == comp
示例#14
0
    async def _process_notify_interest(self, int_name, int_param, app_param):
        """
        Async helper for ``_on_notify_interest()``.
        """
        logging.debug(f'received notify interest: {Name.to_str(int_name)}')
        topic = int_name[:-2]  # remove digest and `notify`

        # parse notify interest
        app_param = NotifyAppParam.parse(app_param)
        publisher_prefix = app_param.publisher_prefix
        notify_nonce = app_param.notify_nonce
        publisher_fwd_hint = app_param.publisher_fwd_hint
        int_param = InterestParam()
        if publisher_fwd_hint:
            # support only 1 forwarding hint now
            int_param.forwarding_hint = [(0x0, publisher_fwd_hint.name)]

        # send msg interest, retransmit 3 times
        msg_int_name = publisher_prefix + ['msg'] + topic + [
            Component.from_bytes(notify_nonce)
        ]
        n_retries = 3

        # de-duplicate notify interests of the same nonce
        if notify_nonce in self.nonce_processed:
            logging.info(
                f'Received duplicate notify interest for nonce {notify_nonce}')
            return
        self.nonce_processed.add(notify_nonce)
        aio.ensure_future(self._erase_subsciber_state_after(notify_nonce, 60))

        while n_retries > 0:
            try:
                logging.debug(
                    f'sending msg interest: {Name.to_str(msg_int_name)}')
                data_name, meta_info, msg = await self.app.express_interest(
                    msg_int_name, int_param=int_param)
                break
            except InterestNack as e:
                logging.debug(f'Nacked with reason: {e.reason}')
                await aio.sleep(1)
                n_retries -= 1
            except InterestTimeout:
                logging.debug(f'Timeout')
                n_retries -= 1
        if msg == None:
            return

        # pass msg to application
        logging.info(f'received subscribed msg: {Name.to_str(msg_int_name)}')
        self.topic_to_cb[topic](bytes(msg))

        # acknowledge notify interest with an empty data packet
        logging.debug(f'acknowledging notify interest {Name.to_str(int_name)}')
        self.app.put_data(int_name, None)
示例#15
0
 def _announce_process_status(self):
     """
     Announce the status of all active processes over PubSub. Each process status is published\
         to topic /<check_prefix>/check/<process_id>.
     """
     for process_id, status in self.m_process_id_to_status.items():
         check_prefix = self.m_process_id_to_check_prefix[process_id]
         topic = check_prefix + ['check', Component.from_bytes(process_id)]
         msg = status.encode()
         # do not care about whether the subscriber acknowledges
         aio.ensure_future(self.pb.publish(topic, msg))
示例#16
0
def main():
    parser = argparse.ArgumentParser(description='python client.py')
    parser.add_argument('-n',
                        '--node_prefix',
                        required=True,
                        help='Prefix of catalog ("/217B/repo/node/A")')
    parser.add_argument('-c',
                        '--command',
                        default='insert',
                        choices=['insert', 'delete', 'recall'],
                        help='Command Verb')
    parser.add_argument('-d',
                        '--data_name',
                        required=True,
                        help='data name ("/foo/bar/1.txt")')
    parser.add_argument('-s',
                        '--hash',
                        required=True,
                        help='data hash ("1bd109fe")')
    parser.add_argument('-o',
                        '--desired_copies',
                        type=int,
                        default=3,
                        help='desired copies')
    args = parser.parse_args()

    logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO)

    app = NDNApp(face=None, keychain=KeychainDigest())

    name = Name.from_str(args.node_prefix)
    name.append(Component.from_str(args.command))

    datainfo_tlv_model = DatainfoTlvModel()
    datainfo_tlv_model.data_name = args.data_name.encode()
    datainfo_tlv_model.hash = args.hash.encode()
    datainfo_tlv_model.desired_copies = args.desired_copies
    datainfo_name_component = Component.from_bytes(datainfo_tlv_model.encode())

    name.append(datainfo_name_component)

    # logging.info(name)

    try:
        app.run_forever(after_start=send(app, name))
    except FileNotFoundError:
        logging.error('Error: could not connect to NFD.\n')
    return 0
    def check(self, check_prefix: NonStrictName, process_id: bytes) -> Optional[RepoCommandResponse]:
        """
        Check the status of process ``process_id`` published under ``check_prefix``. This function\
            returns the in-memory process status, therefore it returns immediately.
        :return: Optional[RepoCommandResponse]. The last known status of ``process_id``. Therefore,\
                the first call to a process, it returns None.
        """
        # If process_id is not seen before, subscribe to its status with PubSub
        if process_id not in self.process_id_to_response:
            topic = check_prefix + ['check', Component.from_bytes(process_id)]
            cb = self.make_on_msg(process_id)
            self.pb.subscribe(topic, cb)
            self.process_id_to_response[process_id] = None
            self.process_id_to_check_prefix[process_id] = check_prefix
            logging.info('CommandChecker subscribing to {}'.format(Name.to_str(topic)))

        # Remember when this process is last checked
        self.process_id_to_last_check_tp[process_id] = int(time.time())

        return self.process_id_to_response[process_id]
示例#18
0
 async def query_face_id(self, uri):
     query_filter = FaceQueryFilter()
     query_filter.face_query_filter = FaceQueryFilterValue()
     query_filter.face_query_filter.uri = uri
     query_filter_msg = query_filter.encode()
     name = Name.from_str("/localhost/nfd/faces/query") + [
         Component.from_bytes(query_filter_msg)
     ]
     try:
         _, _, data = await self.app.express_interest(name,
                                                      lifetime=1000,
                                                      can_be_prefix=True,
                                                      must_be_fresh=True)
     except (InterestCanceled, InterestTimeout, InterestNack,
             ValidationFailure, NetworkError):
         logging.error(f'Query failed')
         return None
     msg = FaceStatusMsg.parse(data)
     if len(msg.face_status) <= 0:
         return None
     return msg.face_status[0].face_id
 async def send_repo_command(self, node_prefix : NonStrictName, verb : str , datainfo : DatainfoTlvModel):
     # "/a/b" -> list of bytearrays
     name = Name.normalize(node_prefix)
     # "/a/b" -> "/a/b/insert"
     name.append(Component.from_str(verb))
     datainfo_name_component = Component.from_bytes(datainfo.encode())
     name.append(datainfo_name_component)
     logging.info('Interest Sent: {}'.format(Name.to_str(name)))
     try:
         data_name, meta_info, content = await self.app.express_interest(name, must_be_fresh=True, can_be_prefix=False, nonce=gen_nonce(), lifetime=1000)
         logging.info('Data Received: {}\n'.format(Name.to_str(data_name)))
         # print(meta_info)
         # print(bytes(content) if content else None)
     except InterestNack as e:
         # A NACK is received
         logging.warning(f'Interest Nacked with reason={e.reason}\n')
         return 0
     except InterestTimeout:
         # Interest times out
         logging.warning(f'Interest Timeout\n')
         return 0
     # results = self.parse_results(content)
     # logging.info(results)
     return 1
示例#20
0
 def encode(self) -> Component:
     model:MetaDataModel = MetaDataModel()
     model.source, model.tseqno, model.nopcks = self.source, self.tseqno, self.nopcks
     return Component.from_bytes(model.encode())
示例#21
0
    async def delete_file(self, prefix, start_block_id: int,
                          end_block_id: int):
        """
        Delete data packets between [<name_at_repo>/<start_block_id>, <name_at_repo>/<end_block_id>]
        from the remote repo.
        :param prefix: NonStrictName. The name with which this file is stored in the repo.
        :param start_block_id: int.
        :param end_block_id: int.
        """
        # Send command interest
        cmd_param = RepoCommandParameter()
        cmd_param.name = prefix
        cmd_param.start_block_id = start_block_id
        cmd_param.end_block_id = end_block_id
        cmd_param_bytes = cmd_param.encode()

        # Send cmd interests to repo
        name = self.repo_name[:]
        name.append('delete')
        name.append(Component.from_bytes(cmd_param_bytes))
        try:
            logging.info(f'Expressing interest: {Name.to_str(name)}')
            data_name, meta_info, content = await self.app.express_interest(
                name, must_be_fresh=True, can_be_prefix=False, lifetime=1000)
            logging.info(f'Received data name: {Name.to_str(data_name)}')
        except InterestNack as e:
            logging.warning(f'Nacked with reason: {e.reason}')
            return
        except InterestTimeout:
            logging.warning(f'Timeout')
            return

        # Parse response from repo
        try:
            cmd_response = RepoCommandResponse.parse(content)
        except DecodeError as exc:
            logging.warning('Response blob decoding failed')
            return
        process_id = cmd_response.process_id
        status_code = cmd_response.status_code
        logging.info(
            f'cmd_response process {process_id} accepted: status code {status_code}'
        )

        # Send delete check interest wait until delete process completes
        checker = CommandChecker(self.app)
        while True:
            response = await checker.check_delete(self.repo_name, process_id)
            if response is None:
                logging.info(f'Response code is None')
                await aio.sleep(1)
            elif response.status_code == 300:
                logging.info(f'Response code is {response.status_code}')
                await aio.sleep(1)
            elif response.status_code == 200:
                logging.info(
                    'Delete process {} status: {}, delete_num: {}'.format(
                        process_id, response.status_code, response.delete_num))
                break
            else:
                # Shouldn't get here
                assert False
示例#22
0
    async def insert_file(self, file_path: str, name_at_repo):
        """
        Insert a file to remote repo.
        :param file_path: Local FS path to file to insert
        :param name_at_repo: Name used to store file at repo
        """
        num_packets = self._prepare_data(file_path, name_at_repo)
        if num_packets == 0:
            return

        # Register prefix for responding interests from repo
        # self.app.route(name_at_repo)(self._on_interest)
        await self.app.register(name_at_repo, self._on_interest)

        cmd_param = RepoCommandParameter()
        cmd_param.name = name_at_repo
        cmd_param.start_block_id = 0
        cmd_param.end_block_id = num_packets - 1
        cmd_param_bytes = cmd_param.encode()

        # Send cmd interest to repo
        name = self.repo_name[:]
        name.append('insert')
        name.append(Component.from_bytes(cmd_param_bytes))

        try:
            logging.info(f'Expressing interest: {Name.to_str(name)}')
            data_name, meta_info, content = await self.app.express_interest(
                name, must_be_fresh=True, can_be_prefix=False, lifetime=1000)
            logging.info(f'Received data name: {Name.to_str(data_name)}')
        except InterestNack as e:
            logging.warning(f'Nacked with reason: {e.reason}')
            return
        except InterestTimeout:
            logging.warning(f'Timeout')
            return

        # Parse response from repo
        try:
            cmd_response = RepoCommandResponse.parse(content)
        except DecodeError as exc:
            logging.warning('Response blob decoding failed')
            return
        process_id = cmd_response.process_id
        status_code = cmd_response.status_code
        logging.info(f'cmd_response process {process_id} accepted: status code {status_code}')

        # Send insert check interest to wait until insert process completes
        checker = CommandChecker(self.app)
        while True:
            response = await checker.check_insert(self.repo_name, process_id)
            if response is None:
                logging.info(f'Response code is None')
                await aio.sleep(1)
            elif response.status_code == 300:
                logging.info(f'Response code is {response.status_code}')
                await aio.sleep(1)
            elif response.status_code == 200:
                logging.info('Insert process {} status: {}, insert_num: {}'
                             .format(process_id,
                                     response.status_code,
                                     response.insert_num))
                break
            else:
                # Shouldn't get here
                assert False