def test_send_single_packet_with_delay(self): """Test fetching a single content object over the simulation bus""" delay_func = lambda packet: 0.5 self.fetchiface = self.simulation_bus.add_interface("fetch", delay_func=delay_func) self.icn_forwarder1 = ICNForwarder(port=0, encoder=self.encoder_type(), interfaces=[self.simulation_bus.add_interface("icnfwd1", delay_func=delay_func)]) self.icn_forwarder2 = ICNForwarder(port=0, encoder=self.encoder_type(), interfaces=[self.simulation_bus.add_interface("icnfwd2", delay_func=delay_func)]) self.simulation_bus.start_process() self.icn_forwarder1.start_forwarder() self.icn_forwarder2.start_forwarder() fid1 = self.icn_forwarder1.linklayer.faceidtable.get_or_create_faceid(AddressInfo("icnfwd2", 0)) self.icn_forwarder1.icnlayer.fib.add_fib_entry(Name("/test"), [fid1]) self.icn_forwarder2.icnlayer.cs.add_content_object(Content("/test/data", "HelloWorld"), static=True) interest = Interest("/test/data") wire_data = self.encoder.encode(interest) self.fetchiface.send(wire_data, "icnfwd1") res, src = self.fetchiface.receive() self.assertEqual(src, "icnfwd1") c = self.encoder.decode(res) self.assertEqual(c, Content("/test/data", "HelloWorld"))
def test_thunk_request_for_data_in_cache(self): """test receiving a thunk request for data, if data is in cache""" self.thunklayer.cs.add_content_object(Content(Name("/fct/f1"), "data")) interest = Interest(Name("/fct/f1/THUNK")) self.thunklayer.queue_from_lower.put([3, interest]) res = self.thunklayer.queue_to_lower.get(timeout=2) self.assertEqual(res, [3, Content(interest.name, str(4))])
def write_out(self, content_content: str): """ The write_out function which is used for the named functions. Stores content object as parts into the content store. Before the first element is stored a meta title is stored into the content store so the node who gets this content object can detect and start the stream. :param contentContent: the content object to be stored out """ print("[write_out] Computation name: ", self.comp_name) # meta_title_content object creation to return as a first part if self.write_out_part_counter < 0: metatitle_content = Content( self.comp_name, "sdo:\n" + str(self.comp_name) + "/streaming/p*") self.queue_to_lower.put((self.packetid, metatitle_content)) # self.cs.add_content_object(metatitle_content) TODO not needed? # actual content_object for streaming self.write_out_part_counter += 1 content_name = self.comp_name content_name += "/streaming/p" + str(self.write_out_part_counter) content_object = Content(content_name, content_content) self.cs.add_content_object(content_object) print("[write_out] Last entry in content store:", self.cs.get_container()[-1].content.name, self.cs.get_container()[-1].content.content)
def test_computation_table_rewrite(self): """test computation rewriting""" name = Name("/test/NFN") self.computationList.add_computation(name, 0, Interest(name)) self.computationList.update_status(name, NFNComputationState.REWRITE) rewrite_list = [Name("/test1/NFN"), Name("/test2/NFN")] entry = self.computationList.get_computation(name) self.computationList.remove_computation(name) entry.rewrite_list = rewrite_list self.computationList.append_computation(entry) #do not match wrong data self.computationList.push_data(Content(Name("/test2/NFN"))) self.assertEqual( self.computationList.get_computation(name).comp_state, NFNComputationState.REWRITE) #match correct data self.computationList.push_data( Content(Name("/test1/NFN"), "HelloWorld")) self.assertEqual( self.computationList.get_computation(name).comp_state, NFNComputationState.WRITEBACK) #test ready ready = self.computationList.get_ready_computations() self.assertEqual(name, ready[0].original_name) self.assertEqual("HelloWorld", ready[0].available_data.get(rewrite_list[0]))
def test_handle_received_chunk_data(self): """test if received chunk data are handled correctly""" self.chunkLayer.start_process() n1 = Name("/test/data") chunk1_n = Name("/test/data/c0") chunk2_n = Name("/test/data/c1") request_table_entry = RequestTableEntry(n1) request_table_entry.chunked = True request_table_entry.requested_chunks.append(chunk1_n) request_table_entry.requested_chunks.append(chunk2_n) chunk1 = Content(chunk1_n, "chunk1") chunk2 = Content(chunk2_n, "chunk2") request_table_entry = self.chunkLayer.handle_received_chunk_data( 0, chunk1, request_table_entry, self.q1_to_higher) self.assertEqual(request_table_entry.requested_chunks, [chunk2_n]) request_table_entry = self.chunkLayer.handle_received_chunk_data( 0, chunk2, request_table_entry, self.q1_to_higher) self.assertEqual(request_table_entry, None) try: data = self.q1_to_higher.get(timeout=2.0)[1] except: self.fail() self.assertEqual(data.name, n1) self.assertEqual(data.content, "chunk1chunk2") self.assertEqual(len(self.chunkLayer._request_table), 0)
def test_find_content_to_cs_no_match(self): """Test adding and searching data to CS""" c1 = Content("/test/data", "Hello World") c2 = Content("/data/test", "Hello World") self.cs.add_content_object(c1) fc = self.cs.find_content_object(c2.name) self.assertEqual(fc, None)
def test_simple_thunk_request_from_lower_data_local_repo(self): """test receiving a thunk request from the network with some data local data in a repo""" self.thunklayer.fib.add_fib_entry(Name("/fct"), [1]) name = Name("/fct/f1") name += "_(/dat/data/d2)" name += "THUNK" name += "NFN" interest = Interest(name) self.thunklayer.queue_from_lower.put([1,interest]) res1 = self.thunklayer.queue_to_lower.get(timeout=2) res2 = self.thunklayer.queue_to_lower.get(timeout=2) self.assertEqual(res1, [1, Interest("/fct/f1/THUNK")]) n1 = Name("/fct/f1") n1 += '_(/dat/data/d2)' n1 += 'THUNK' n1 += 'NFN' self.assertEqual(res2, [1, Interest(n1)]) content1 = Content(res1[1].name, str(4)) self.thunklayer.queue_from_lower.put([1, content1]) content2 = Content(res2[1].name, str(9)) self.thunklayer.queue_from_lower.put([1, content2]) res = self.thunklayer.queue_to_lower.get() c = Content(name, str(4)) self.assertEqual(res, [1, c])
def test_service_registration_timeout_renewal(self): """Test that the service registration is renewed before the timeout""" self.faceidtable.add(42, AddressInfo(('127.42.42.42', 9000), 0)) waittime = 5 self.autoconflayer.start_process() # Receive forwarder solicitation bface, _ = self.queue_to_lower.get() # Send forwarder advertisement forwarders = Content(Name('/autoconfig/forwarders'), 'udp4://127.42.42.42:9000\nr:/global\npl:/test\n') self.queue_from_lower.put([42, forwarders]) # Receive service registration fid, data = self.queue_to_lower.get() registration_name = Name('/autoconfig/service') registration_name += 'udp4://127.0.1.1:1337' registration_name += 'test' registration_name += 'testrepo' self.assertEqual(registration_name, data.name) # Send service registration ACK with a ridiculously short timeout content = Content(registration_name, f'{waittime}\n') self.queue_from_lower.put([42, content]) # Catch all data the autoconfig layer sends downwards for 5 seconds data = [] timeout = datetime.utcnow() + timedelta(seconds=waittime) while datetime.utcnow() < timeout: try: data.append(self.queue_to_lower.get(timeout=waittime/10)) except queue.Empty: pass registration_interest = Interest(registration_name) self.assertIn([42, registration_interest], data)
def generate_data(self, network_name: Name, chunk_size: int = 4096): """ Generates manifest and chunks for a file in the repo :param network_name: Network name of high-level object :param chunk_size: chunk size :return: True if successful, False otherwise """ try: fs_name = self._files_in_repo[network_name.to_string()] except: return False with open(fs_name, "r+") as f: # open file and determine number of chunks file = mmap.mmap(f.fileno(), 0) file_length = len(file) num_chunks = math.ceil(file_length / chunk_size) # generate data packets (manifest and chunk) chunk_names = list() for n in range(0, num_chunks): # extract chunk and compute digest chunk = file[chunk_size * n:min(chunk_size * (n + 1), file_length)] m = hashlib.sha256() m.update(chunk) digest = m.hexdigest() chunk_network_name = Name(network_name.to_string() + '/chunk/' + digest) # add to cache and chunk list chunk_names.append(chunk_network_name.to_string()) self.add_to_cache(Content(chunk_network_name, chunk)) # generate manifest manifest_data = "\n".join(chunk_names) manifest = Content(network_name, manifest_data) self.add_to_cache(manifest) return True
def test_ICNLayer_ageing_cs(self): """Test CS ageing and static entries""" self.icn_layer.start_process() name1 = Name("/test/data") content1 = Content(name1, "HelloWorld") name2 = Name("/data/test") content2 = Content(name2, "Goodbye") self.icn_layer.cs.add_content_object(content1) self.icn_layer.cs.add_content_object(content2, static=True) self.assertEqual(self.icn_layer.cs.get_container_size(), 2) self.assertEqual( self.icn_layer.cs.find_content_object(name1).content, content1) self.assertEqual( self.icn_layer.cs.find_content_object(name2).content, content2) #Test aging 1 self.icn_layer.ageing() self.assertEqual(self.icn_layer.cs.get_container_size(), 2) self.assertEqual( self.icn_layer.cs.find_content_object(name1).content, content1) self.assertEqual( self.icn_layer.cs.find_content_object(name2).content, content2) time.sleep(2) # Test aging 2 self.icn_layer.ageing() self.assertEqual(self.icn_layer.cs.get_container_size(), 1) self.assertEqual( self.icn_layer.cs.find_content_object(name2).content, content2)
def test_chunk_from_lower_layer(self): """test receiving metadata from lower layer""" self.chunkLayer.start_process() n1 = Name("/test/data") re1 = RequestTableEntry(n1) re1.chunked = True chunk1_n = Name("/test/data/c0") chunk2_n = Name("/test/data/c1") chunk1 = Content(chunk1_n, "chunk1") chunk2 = Content(chunk2_n, "chunk2") re1.requested_chunks.append(chunk1_n) re1.requested_chunks.append(chunk2_n) self.chunkLayer._request_table.append(re1) self.chunkLayer.queue_from_lower.put([0, chunk2]) time.sleep(1) self.assertTrue(self.chunkLayer.queue_to_higher.empty()) self.chunkLayer.queue_from_lower.put([0, chunk1]) try: data = self.chunkLayer.queue_to_higher.get(timeout=2.0) except: self.fail() self.assertEqual(data[1].content, "chunk1chunk2")
def test_computation_table_push_back(self): """Test the return value of the push back function""" name = Name("/test/NFN") self.computationList.add_computation(name, 0, Interest(name)) reqeuest_name = Name("/request/name") v = self.computationList.push_data(Content(reqeuest_name)) self.assertFalse(v) self.computationList.container[0].add_name_to_await_list(reqeuest_name) v = self.computationList.push_data(Content(reqeuest_name)) self.assertTrue(v)
def test_computation_table_ageing_nfn_requests_and_ready_computations( self): """test the ageing of the computation table using nfn requests and check ready computations""" name = Name("/test/NFN") name2 = Name("/data/NFN") self.computationList.add_computation(name, 0, Interest(name)) self.computationList.add_computation(name2, 0, Interest(name2)) self.computationList.container[0].timeout = 1.0 self.computationList.container[1].timeout = 1.0 request_name = Name("/request/NFN") request_name1 = Name("/request1/NFN") request_name2 = Name("/request2/NFN") self.computationList.container[0].add_name_to_await_list(request_name) self.computationList.container[0].add_name_to_await_list(request_name1) self.computationList.container[1].add_name_to_await_list(request_name2) self.assertEqual(len(self.computationList.container), 2) self.assertEqual(len(self.computationList.container[0].awaiting_data), 2) self.assertEqual(len(self.computationList.container[1].awaiting_data), 1) res = self.computationList.ageing() self.assertEqual(res, ([], [])) time.sleep(2) res = self.computationList.ageing() self.assertEqual(len(self.computationList.container), 2) self.assertEqual(len(self.computationList.container[0].awaiting_data), 2) # four since r2c self.assertEqual(len(self.computationList.container[1].awaiting_data), 1) # two since r2c self.assertEqual(self.computationList.container[0].awaiting_data, [ NFNAwaitListEntry(request_name), NFNAwaitListEntry(request_name1), ]) self.assertEqual(res, ([request_name, request_name1, request_name2], [])) self.computationList.push_data(Content(request_name)) ready_comps = self.computationList.get_ready_computations() self.assertEqual(ready_comps, []) v = self.computationList.push_data(Content(request_name1)) self.assertTrue(v) ready_comps = self.computationList.get_ready_computations() self.assertEqual(len(ready_comps), 1) self.assertEqual(ready_comps[0].original_name, name)
def test_payload_to_string(self): # paylaod can not be decoded with utf-8 codec c1 = Content("/test/data", bytes( [0x00, 0xb1, 0x01])) # note: this payload can not be decoded with utf-8 codec payload_as_string1 = c1.content self.assertEqual("0x00 0xb1 0x01", payload_as_string1) # paylaod not be decoded with utf-8 codec c2 = Content("/test/data", "the-payload") payload_as_string2 = c2.content self.assertEqual("the-payload", payload_as_string2)
def test_content_from_higher_chunk(self): """Test handling content from higher with chunks""" self.chunkLayer.start_process() data = "A" * 4096 + "B" * 200 c = Content("/test/data", data) self.chunkLayer.queue_from_higher.put([0, c]) try: data = self.chunkLayer.queue_to_lower.get(timeout=2.0) except: self.fail() md = Content("/test/data", "mdo:/test/data/c0;/test/data/c1:") self.assertEqual(data[1], md)
def test_metadata_from_lower_layer(self): """test receiving metadata from lower layer""" self.chunkLayer.start_process() md1_n = Name("/test/data") md1 = Content( md1_n, "mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:/test/data/m1" ) md2_n = Name("/test/data/m1") md2 = Content(md2_n, "mdo:/test/data/c4:") self.chunkLayer._request_table.append(RequestTableEntry(md1_n)) self.chunkLayer.queue_from_lower.put([0, md1]) data = self.chunkLayer.queue_to_lower.get() self.assertEqual(Interest(md2_n), data[1]) chunknames = [ Name("/test/data/c0"), Name("/test/data/c1"), Name("/test/data/c2"), Name("/test/data/c3"), Name("/test/data/c4") ] for i in range(0, 4): data = self.chunkLayer.queue_to_lower.get() self.assertEqual(Interest(chunknames[i]), data[1]) self.assertTrue(self.chunkLayer.queue_to_lower.empty()) request: RequestTableEntry = self.chunkLayer.get_request_table_entry( md1_n) self.assertEqual(request.requested_chunks, chunknames[:4]) self.assertEqual(request.requested_md[0], md2_n) self.chunkLayer.queue_from_lower.put([0, md2]) try: data = self.chunkLayer.queue_to_lower.get(timeout=2.0) except: self.fail() self.assertEqual(data[1], Interest(chunknames[4])) self.assertTrue(self.chunkLayer.queue_to_lower.empty()) time.sleep(1) request: RequestTableEntry = self.chunkLayer.get_request_table_entry( md1_n) self.assertEqual(len(request.requested_md), 0) self.assertEqual(len(request.requested_chunks), 5) self.assertEqual(request.requested_chunks, chunknames)
def test_get_content(self): """test if the function get content works correct""" c1_cmp = Content("/test/data/f1", "data1") c1 = self.repository.get_content(Name("/test/data/f1")) self.assertEqual(c1, c1_cmp) c2_cmp = Content("/test/data/f2", "data2") c2 = self.repository.get_content(Name("/test/data/f2")) self.assertEqual(c2, c2_cmp) c3 = self.repository.get_content(Name("/test/data/f3")) self.assertEqual(c3, None)
def test_add_multiple_content_to_cs(self): """Test adding multiple data to CS""" c1 = Content("/test/data", "Hello World") c2 = Content("/data/test", "Goodbye") self.cs.add_content_object(c1) self.cs.add_content_object(c2) entry1 = self.cs._container[0] self.assertIsNotNone(self.cs.find_content_object(entry1.name)) self.assertEqual(entry1.content, c1) entry2 = self.cs._container[1] self.assertIsNotNone(self.cs.find_content_object(entry2.name)) self.assertEqual(entry2.content, c2)
def compute(self, interest: Interest): """Compute a result, when all data are available :param interest: The original interest message to be handled (can be taken from computation table) """ self.logger.info("Start computation: " + str(interest.name)) params = [] entry = self.computation_table.get_computation(interest.name) if entry is None: return self.computation_table.remove_computation(interest.name) if entry.comp_state == NFNComputationState.WRITEBACK: name = self.parser.nfn_str_to_network_name(entry.rewrite_list[0]) res = entry.available_data[name] data = Content(entry.original_name, res) #self.queue_to_lower.put([entry.id, data]) self.handleContent(entry.id, data) return function_name = Name(entry.ast._element) function_code = entry.available_data.get(function_name) if function_code is None: self.queue_to_lower.put([entry.id, Nack(entry.original_name, NackReason.COMP_PARAM_UNAVAILABLE, interest=entry.interest)]) return executor: BaseNFNExecutor = self.executors.get(self.get_nf_code_language(function_code)) if executor is None: self.queue_to_lower.put([entry.id, Nack(entry.original_name, NackReason.COMP_EXCEPTION, interest=entry.interest)]) for e in entry.ast.params: if isinstance(e, AST_Name): param = entry.available_data.get(Name(e._element)) if param is None: self.queue_to_lower.put([entry.id, Nack(entry.original_name, NackReason.COMP_PARAM_UNAVAILABLE, interest=entry.interest)]) return params.append(param) elif isinstance(e, AST_FuncCall): search_name = Name() search_name += str(e) search_name += "NFN" params.append(entry.available_data[search_name]) elif not isinstance(e.type, AST): params.append(e.type(e._element)) res = executor.execute(function_code=function_code, params=params) if res is None: self.queue_to_lower.put([entry.id, Nack(entry.original_name, NackReason.COMP_EXCEPTION, interest=entry.interest)]) content_res: Content = Content(entry.original_name, str(res)) #TODO typed results self.logger.info("Finish Computation: " + str(content_res.name)) #self.computation_table.push_data(content_res) #self.queue_to_lower.put([entry.id, content_res]) self.handleContent(entry.id, content_res)
def handle_interest_from_lower(self, face_id: int, interest: Interest, to_lower: multiprocessing.Queue): self.logger.info("Incoming interest: " + interest.name.to_string()) # incoming interest is nfn expression if interest.name.string_components[-1] == "NFN": try: parser = DefaultNFNParser() nfn_str, prepended_name = parser.network_name_to_nfn_str(interest.name) ast = parser.parse(nfn_str) # assert that valid publish expression if is_publish_expression(ast): # store to database data_name = ast.params[0]._element payload = ast.params[1]._element try: payload = base64.b64decode(payload[7:]) self.logger.info("Payload is base64 encoded. Decoded.") except: self.logger.info("Invalid publish expression. The payload could not be decoded.") nack = Nack(interest.name, reason=NackReason.COMP_NOT_PARSED, interest=interest) self.queue_to_lower.put([face_id, nack]) self.cs.add_content_object(Content(data_name, payload)) self.logger.info("Add to database: " + data_name) # reply confirmation confirmation = Content(interest.name, "ok") to_lower.put([face_id, confirmation]) else: self.logger.info("Invalid publish expression. Wrong format.") nack = Nack(interest.name, reason=NackReason.COMP_NOT_PARSED, interest=interest) self.queue_to_lower.put([face_id, nack]) except: self.logger.info("Invalid publish expression.") nack = Nack(interest.name, reason=NackReason.COMP_NOT_PARSED, interest=interest) self.queue_to_lower.put([face_id, nack]) # incoming interest is data request else: db_entry = self.cs.find_content_object(interest.name) if db_entry is not None: self.logger.info("Found in database") to_lower.put([face_id, db_entry.content]) return else: self.logger.info("Not found in database") nack = Nack(interest.name, NackReason.NO_CONTENT, interest) to_lower.put([face_id, nack]) return
def test_compute(self): """Test Computing using the BasicNFNLayer""" self.nfn_layer.fib.add_fib_entry(Name('/test'), 1, True) c1 = Content("/func/f1", "PYTHON\nf\ndef f(a):\n return a.upper()") self.nfn_layer.cs.add_content_object(c1) self.nfn_layer.start_process() computation_name = Name("/func/f1") computation_name += "_(/func/f2(/test/data))" computation_name += "NFN" computation_interest = Interest(computation_name) self.nfn_layer.queue_from_lower.put([2, computation_interest]) res1 = self.nfn_layer.queue_to_lower.get(timeout=2.0) self.assertEqual(res1, [2, Interest(Name("/func/f1"))]) res2 = self.nfn_layer.queue_to_lower.get(timeout=2.0) inner_name = Name("/test/data") inner_name += "/func/f2(_)" inner_name += "NFN" inner_interest = Interest(inner_name) self.assertEqual(res2, [2, inner_interest]) self.nfn_layer.queue_from_lower.put([2, c1]) time.sleep(4) self.nfn_layer.ageing() res3 = self.nfn_layer.queue_to_lower.get(timeout=2.0) self.assertEqual(res3[1], inner_interest) r2c_name = Name("/test/data") r2c_name += "/func/f2(_)" r2c_name += "R2C" r2c_name += "KEEPALIVE" r2c_name += "NFN" r2c_interest = Interest(r2c_name) res4 = self.nfn_layer.queue_to_lower.get(timeout=2.0) self.assertEqual(res4[1], Content(r2c_name, 'Running')) self.nfn_layer.queue_from_lower.put( [2, Content(inner_name, "HelloWorld")]) res = self.nfn_layer.queue_to_lower.get() self.assertEqual(res[1], Content(computation_name, "HELLOWORLD"))
def __init__(self, log_level=255, manager: multiprocessing.Manager = None): super().__init__(logger_name="RepoLayer", log_level=log_level) if manager is None: manager = multiprocessing.Manager() self._data_structs = manager.dict() cache = ContentStoreMemoryExact() cache.add_content_object( Content("/alice/schema.index", alice_index_schema)) cache.add_content_object( Content("/alice/homebrewed/ac", ac_wrapper_desc)) self._data_structs['cache'] = cache self._files_in_repo = { "/alice/movies/cats-and-dogs.mp4": "/tmp/cats-and-dogs.mp4", "/alice/public/img/basel.jpg": "/tmp/basel.jpg" }
def get_chunks_available(self, packet: Packet): """ Check if chunks are available for a given name. Return a content object containing the names of the available chunks, or NACK """ chunks_available = [] name = self.unpack(packet.name) request_entry = self.get_request_entry(name) cs_entry = self.cs.find_content_object(name) if request_entry is not None: chunks_available = [ str(chunk.name) for chunk in request_entry.chunks ] elif cs_entry: chunks_available.append("complete;") if chunks_available: chunks_available = Content(packet.name, ";".join(chunks_available)) if len(chunks_available.content) > self.chunk_size: meta_data, chunks = self.chunkifyer.chunk_data( chunks_available) meta_data.extend(chunks) for data in meta_data: self.cs.remove_content_object(data.name) self.cs.add_content_object(data) return meta_data[0] else: return chunks_available return Nack(packet.name, NackReason.NO_CONTENT, packet)
def test_simple_call_params_to_function_local_prepended_data(self): """Test, if ToDataFirstOptimizer works correctly with a single function call with parameter, to function, compute local since prepended data are local""" cmp_name = Name("/func/f1") cmp_name += "_(/test/data)" cmp_name += "NFN" workflow = "/func/f1(/test/data)" fib = self.optimizer.fib fib.add_fib_entry(Name("/func"), [1], False) self.optimizer.fib = fib prefix = Name("/func/f1") cs = self.optimizer.cs cs.add_content_object( Content(Name("/func/f1"), "PYTHON\nf\ndef f():\n return 'Hello World'"), True) self.optimizer.cs = cs ast = self.parser.parse(workflow) self.assertFalse( self.optimizer.compute_fwd(prefix, ast, Interest(cmp_name))) self.assertTrue( self.optimizer.compute_local(prefix, ast, Interest(cmp_name))) rules = self.optimizer.rewrite(prefix, ast) self.assertEqual(rules, ['%/func/f1%(/test/data)', 'local']) name = self.parser.nfn_str_to_network_name(rules[0]) self.assertEqual(name, cmp_name) name_str, prepended = self.parser.network_name_to_nfn_str(name) self.assertEqual(name_str, workflow) self.assertEqual(prepended, Name("/func/f1"))
def icnl_mgmt(self, command, params, replysock): if(self.cs == None or self.fib == None or self.pit== None): reply = "HTTP/1.1 200 OK \r\n Content-Type: text/html \r\n\r\n Not a Forwarder OK\r\n" replysock.send(reply.encode()) # newface expects /linklayer/newface/ip:port elif (command == "newforwardingrule"): prefix, faceid = params.split(":", 1) faceid = int(faceid) prefix = prefix.replace("%2F", "/") name = Name(prefix) self.fib.add_fib_entry(name, faceid, True) reply = "HTTP/1.1 200 OK \r\n Content-Type: text/html \r\n\r\n newforwardingrule OK:" + str(faceid) + "\r\n" replysock.send(reply.encode()) self.logger.info("New Forwardingrule added " + prefix + "|" + str(faceid)) return elif(command == "newcontent"): prefix, content = params.split(":", 1) prefix = prefix.replace("%2F", "/") name = Name(prefix) content = Content(name, content) self.cs.add_content_object(content, static=True) reply = "HTTP/1.1 200 OK \r\n Content-Type: text/html \r\n\r\n newcontent OK\r\n" replysock.send(reply.encode()) self.logger.info("New content added " + prefix + "|" + content.content) return else: self.unknown_command(replysock) return
def test_r2c_timeout_prevention(self): #todo same for rewrite """test r2c timeout prevention""" name1 = Name("/test1/NFN") name2 = Name("/test2/NFN") self.computationList.add_computation(name1, 0, Interest(name1)) self.computationList.add_computation(name2, 0, Interest(name2)) requestname1 = Name("/request1/NFN") requestname2 = Name("/request2/NFN") self.computationList.add_awaiting_data(name1, requestname1) self.computationList.add_awaiting_data(name2, requestname2) entry1 = self.computationList.get_computation(name1) self.computationList.remove_computation(name1) entry1.timeout = 1 self.computationList.append_computation(entry1) entry2 = self.computationList.get_computation(name2) self.computationList.remove_computation(name2) entry2.timeout = 1 self.computationList.append_computation(entry2) #ask for requests time.sleep(1) request_list = self.computationList.ageing() self.assertEqual(request_list, ([requestname1, self.r2cclient.R2C_create_message(requestname1), requestname2, self.r2cclient.R2C_create_message(requestname2)], [])) self.computationList.push_data(Content(self.r2cclient.R2C_create_message(requestname1))) time.sleep(1) request_list = self.computationList.ageing() self.assertEqual(request_list, ([requestname1, self.r2cclient.R2C_create_message(requestname1)], [name2]))
def test_chunk_multiple_metadata(self): """Test chunking metadata with three metadata objects and 10 chunks""" name = Name("/test/data") string = "A"*4096 + "B"*4096 + "C"*4096 + "D"*4096 + "E"*4096 + "F"*4096 + "G"*4096 + "H"*4096 \ + "I"*4096 + "J"*4000 content = Content(name, string) md, chunked_content = self.chunkifyer.chunk_data(content) md_name_comp = ['/test/data', '/test/data/m1', '/test/data/m2'] md_data_comp = [ 'mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:/test/data/m1', 'mdo:/test/data/c4;/test/data/c5;/test/data/c6;/test/data/c7:/test/data/m2', 'mdo:/test/data/c8;/test/data/c9:' ] content_name_comp = [ '/test/data/c0', '/test/data/c1', '/test/data/c2', '/test/data/c3', '/test/data/c4', '/test/data/c5', '/test/data/c6', '/test/data/c7', '/test/data/c8', '/test/data/c9' ] content_data_comp = [ "A" * 4096, "B" * 4096, "C" * 4096, "D" * 4096, "E" * 4096, "F" * 4096, "G" * 4096, "H" * 4096, "I" * 4096, "J" * 4000 ] for i in range(0, len(md)): self.assertEqual(md[i].name.to_string(), md_name_comp[i]) self.assertEqual(md[i].content, md_data_comp[i]) for i in range(0, len(chunked_content)): self.assertEqual(chunked_content[i].name.to_string(), content_name_comp[i]) self.assertEqual(chunked_content[i].content, content_data_comp[i])
def decode(self, wire_data) -> Packet: """ NDN TLV wire format packet to python object (PiCN's internal representation) :param wire_data: Packet in wire format (NDN TLV representation) :return: Packet in PiCN's internal representation """ # print("got %d bytes to decode" % len(wire_data)) if(self.is_content(wire_data)): self.logger.info("Decode content object") try: (name, payload) = self.decode_data(wire_data) return Content(name, payload, wire_data) except: self.logger.info("Decoding failed (malformed packet)") return UnknownPacket(wire_format=wire_data) if(self.is_interest(wire_data)): self.logger.info("Decode interest") try: name = self.decode_interest(wire_data) return Interest(name, wire_data) except: self.logger.info("Decoding failed (malformed packet)") return UnknownPacket(wire_format=wire_data) if(self.is_nack(wire_data)): self.logger.info("Decode NACK") try: (name, reason) = self.decode_nack(wire_data) return Nack(name, reason, None, wire_format=wire_data) except: self.logger.info("Decoding failed (malformed packet)") return UnknownPacket(wire_format=wire_data) else: self.logger.info("Decode failed (unknown packet type)") return UnknownPacket(wire_format=wire_data)
def test_ICNLayer_interest_forward_content_no_match(self): """Test ICN layer with CS entry no match""" self.icn_layer.start_process() to_face_id = 1 from_face_id = 2 interest = Interest("/test/data/bla") name = Name("/test/data") self.icn_layer.fib.add_fib_entry(name, [to_face_id], static=True) #add content content = Content("/test/data") self.icn_layer.cs.add_content_object(content) #request content self.queue1_icn_routing_up.put([from_face_id, interest]) #get data from fib try: face_id, data = self.queue1_icn_routing_down.get(timeout=2.0) except: self.fail() self.assertTrue(data, interest) self.assertTrue(face_id, to_face_id) self.assertTrue(self.queue1_icn_routing_up.empty()) self.assertEqual(self.icn_layer.pit.get_container_size(), 1) self.assertEqual( self.icn_layer.pit.find_pit_entry(interest.name).name, interest.name)
def test_ICNLayer_content_two_pit_entries(self): """Test receiving a content object with two PIT entries""" self.icn_layer.start_process() content_in_face_id = 1 from_face_id_1 = 2 from_face_id_2 = 3 name = Name("/test/data") content = Content("/test/data") self.icn_layer.pit.add_pit_entry(name, from_face_id_1, None, False) self.icn_layer.pit.add_pit_entry(name, from_face_id_2, None, False) self.queue1_icn_routing_up.put([content_in_face_id, content]) try: face_id_1, data1 = self.queue1_icn_routing_down.get(timeout=2.0) except: self.fail() self.assertEqual(face_id_1, from_face_id_1) self.assertEqual(data1, content) face_id_2, data2 = self.queue1_icn_routing_down.get() self.assertEqual(face_id_2, from_face_id_2) self.assertEqual(data2, content)