Пример #1
0
    def __init__(self,
                 cs: BaseContentStore,
                 pit: BasePendingInterestTable,
                 fib: BaseForwardingInformationBase,
                 chunkifyer: BaseChunkifyer = None,
                 chunk_size: int = 4096,
                 num_of_forwards: int = 1,
                 prefix: str = "car",
                 log_level: int = 255):
        super().__init__("ChunkLayer", log_level=log_level)
        self.cs = cs
        self.pit = pit
        self.fib = fib
        self.chunk_size = chunk_size
        if chunkifyer is None:
            self.chunkifyer = SimpleContentChunkifyer(chunk_size)
        else:
            self.chunkifyer: BaseChunkifyer = chunkifyer
        self.num_of_forwards = num_of_forwards
        self.prefix = prefix

        manager = multiprocessing.Manager()
        self._chunk_table: Dict[Name, (Content, float)] = manager.dict()
        self._request_table: List[RequestTableEntry] = manager.list()
        self._ca_table: Dict[Name, CaEntry] = manager.dict()
        self.recipient_cl: Dict[Name, Name] = manager.dict()
        self.pass_through = False
Пример #2
0
    def setUp(self):
        self.encoder_type = SimpleStringEncoder()
        self.simulation_bus = SimulationBus(packetencoder=self.encoder_type)
        chunk_size = 8192
        self.chunkifyer = SimpleContentChunkifyer(chunk_size)

        # Initialize two cars
        self.cars = []
        self.fetch_tool_cars = []
        self.mgmt_client_cars = []
        for i in range(2):
            self.cars.append(
                ICNForwarder(
                    0,
                    encoder=self.encoder_type,
                    routing=True,
                    interfaces=[self.simulation_bus.add_interface(f"car{i}")]))
            self.fetch_tool_cars.append(
                Fetch(f"car{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[
                          self.simulation_bus.add_interface(f"ftcar{i}")
                      ]))
            self.mgmt_client_cars.append(
                MgmtClient(self.cars[i].mgmt.mgmt_sock.getsockname()[1]))
            self.cars[i].icnlayer.cs.set_cs_timeout(40)

        # Initialize RSUs
        self.rsus = []
        self.fetch_tools = []
        self.mgmt_clients = []
        for i in range(3):
            self.rsus.append(
                NFNForwarderData(
                    0,
                    encoder=self.encoder_type,
                    interfaces=[self.simulation_bus.add_interface(f"rsu{i}")],
                    chunk_size=chunk_size,
                    num_of_forwards=1,
                    ageing_interval=10))
            self.fetch_tools.append(
                Fetch(f"rsu{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[self.simulation_bus.add_interface(f"ft{i}")
                                  ]))
            self.rsus[i].nfnlayer.optimizer = EdgeComputingOptimizer(
                self.rsus[i].icnlayer.cs, self.rsus[i].icnlayer.fib,
                self.rsus[i].icnlayer.pit, self.rsus[i].linklayer.faceidtable)
            self.mgmt_clients.append(
                MgmtClient(self.rsus[i].mgmt.mgmt_sock.getsockname()[1]))
            self.fetch_tools[i].timeoutpreventionlayer.timeout_interval = 40
            self.rsus[i].icnlayer.cs.set_cs_timeout(60)
Пример #3
0
 def __init__(self,
              chunkifyer: BaseChunkifyer = None,
              chunk_size: int = 4096,
              manager: multiprocessing.Manager = None,
              log_level=255):
     super().__init__("ChunkLayer", log_level=log_level)
     self.chunk_size = chunk_size
     if chunkifyer == None:
         self.chunkifyer = SimpleContentChunkifyer(chunk_size)
     else:
         self.chunkifyer: BaseChunkifyer = chunkifyer
     if manager is None:
         manager = multiprocessing.Manager()
     self._chunk_table: Dict[Name, (Content, float)] = manager.dict()
     self._request_table: List[RequestTableEntry] = manager.list()
Пример #4
0
    def setUp(self):
        self.encoder_type = SimpleStringEncoder()
        self.simulation_bus = SimulationBus(packetencoder=self.encoder_type)
        chunk_size = 4
        self.chunkifyer = SimpleContentChunkifyer(chunk_size)

        self.car = ICNForwarder(
            0,
            encoder=self.encoder_type,
            routing=True,
            interfaces=[self.simulation_bus.add_interface("car")])
        self.fetch_tool_car = Fetch(
            "car",
            None,
            255,
            self.encoder_type,
            interfaces=[self.simulation_bus.add_interface("ftcar")])
        self.mgmt_client_car = MgmtClient(
            self.car.mgmt.mgmt_sock.getsockname()[1])

        self.rsus = []
        self.fetch_tools = []
        self.mgmt_clients = []

        for i in range(4):
            self.rsus.append(
                NFNForwarderData(
                    0,
                    encoder=self.encoder_type,
                    interfaces=[self.simulation_bus.add_interface(f"rsu{i}")],
                    chunk_size=chunk_size,
                    num_of_forwards=0))
            self.fetch_tools.append(
                Fetch(f"rsu{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[self.simulation_bus.add_interface(f"ft{i}")
                                  ]))
            self.rsus[i].nfnlayer.optimizer = EdgeComputingOptimizer(
                self.rsus[i].icnlayer.cs, self.rsus[i].icnlayer.fib,
                self.rsus[i].icnlayer.pit, self.rsus[i].linklayer.faceidtable)
            self.mgmt_clients.append(
                MgmtClient(self.rsus[i].mgmt.mgmt_sock.getsockname()[1]))
            self.fetch_tools[i].timeoutpreventionlayer.timeout_interval = 30
Пример #5
0
    def __init__(self,
                 ip: str,
                 port: int,
                 log_level=255,
                 encoder: BasicEncoder = None,
                 autoconfig: bool = False,
                 interfaces=None):

        # create encoder and chunkifyer
        if encoder is None:
            self.encoder = SimpleStringEncoder(log_level=log_level)
        else:
            encoder.set_log_level(log_level)
            self.encoder = encoder
        self.chunkifyer = SimpleContentChunkifyer()

        # initialize layers
        synced_data_struct_factory = PiCNSyncDataStructFactory()
        synced_data_struct_factory.register("faceidtable", FaceIDDict)
        synced_data_struct_factory.create_manager()
        faceidtable = synced_data_struct_factory.manager.faceidtable()

        if interfaces is None:
            interfaces = [UDP4Interface(0)]
        else:
            interfaces = interfaces

        # create layers
        self.linklayer = BasicLinkLayer(interfaces,
                                        faceidtable,
                                        log_level=log_level)
        self.packetencodinglayer = BasicPacketEncodingLayer(
            self.encoder, log_level=log_level)
        self.chunklayer = BasicChunkLayer(self.chunkifyer, log_level=log_level)

        self.lstack: LayerStack = LayerStack(
            [self.chunklayer, self.packetencodinglayer, self.linklayer])

        self.autoconfig = autoconfig
        if autoconfig:
            self.autoconfiglayer: AutoconfigClientLayer = AutoconfigClientLayer(
                self.linklayer)
            self.lstack.insert(self.autoconfiglayer,
                               on_top_of=self.packetencodinglayer)

        # setup communication
        if port is None:
            self.fid = self.linklayer.faceidtable.get_or_create_faceid(
                AddressInfo(ip, 0))
        else:
            self.fid = self.linklayer.faceidtable.get_or_create_faceid(
                AddressInfo((ip, port), 0))

        # send packet
        self.lstack.start_all()
Пример #6
0
    def setUp(self):
        self.chunkifyer = SimpleContentChunkifyer()
        self.chunkLayer: BasicChunkLayer = BasicChunkLayer(self.chunkifyer,
                                                           log_level=255)

        self.q1_to_lower = multiprocessing.Queue()
        self.q1_to_higher = multiprocessing.Queue()

        self.q1_from_lower = multiprocessing.Queue()
        self.q1_from_higher = multiprocessing.Queue()

        self.chunkLayer.queue_to_lower = self.q1_to_lower
        self.chunkLayer.queue_to_higher = self.q1_to_higher
        self.chunkLayer.queue_from_lower = self.q1_from_lower
        self.chunkLayer.queue_from_higher = self.q1_from_higher
Пример #7
0
class test_SimpleContentChunkifyer(unittest.TestCase):
    def setUp(self):
        self.chunkifyer = SimpleContentChunkifyer()

    def tearDown(self):
        pass

    def test_generate_metadata_no_next(self):
        """Test generating a simple metadata object"""
        name = Name("/test/data")

        res = self.chunkifyer.generate_meta_data(2, 4, 0, 0, name)

        self.assertEqual(res.name.to_string(), "/test/data")
        self.assertEqual(res.content, "mdo:/test/data/c2;/test/data/c3:")

    def test_generate_metadata_one_next(self):
        """Test generating a simple metadata object with one following"""
        name = Name("/test/data")

        res = self.chunkifyer.generate_meta_data(2, 4, 0, 1, name)

        self.assertEqual(res.name.to_string(), "/test/data")
        self.assertEqual(res.content,
                         "mdo:/test/data/c2;/test/data/c3:/test/data/m1")

    def test_generate_metadata_two_next(self):
        """Test generating a simple metadata object with two following"""
        name = Name("/test/data")

        res = self.chunkifyer.generate_meta_data(2, 4, 1, 2, name)

        self.assertEqual(res.name.to_string(), "/test/data/m1")
        self.assertEqual(res.content,
                         "mdo:/test/data/c2;/test/data/c3:/test/data/m2")

    def test_chunk_single_metadata(self):
        name = Name("/test/data")
        string = "A" * 4096 + "B" * 4096 + "C" * 4096
        content = Content(name, string)

        md, content = self.chunkifyer.chunk_data(content)

        md_name_comp = ['/test/data']
        md_data_comp = ['mdo:/test/data/c0;/test/data/c1;/test/data/c2:']

        content_name_comp = ['/test/data/c0', '/test/data/c1', '/test/data/c2']

        content_data_comp = ["A" * 4096, "B" * 4096, "C" * 4096]

        for i in range(0, len(md)):
            self.assertEqual(md[i].name.to_string(), md_name_comp[i])
            self.assertEqual(md[i].content, md_data_comp[i])

        for i in range(0, len(content)):
            self.assertEqual(content[i].name.to_string(), content_name_comp[i])
            self.assertEqual(content[i].content, content_data_comp[i])

    def test_chunk_multiple_metadata(self):
        """Test chunking metadata with three metadata objects and 10 chunks"""
        name = Name("/test/data")
        string = "A"*4096 + "B"*4096 + "C"*4096 + "D"*4096 + "E"*4096 + "F"*4096 + "G"*4096 + "H"*4096 \
                 + "I"*4096 + "J"*4000
        content = Content(name, string)

        md, chunked_content = self.chunkifyer.chunk_data(content)

        md_name_comp = ['/test/data', '/test/data/m1', '/test/data/m2']
        md_data_comp = [
            'mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:/test/data/m1',
            'mdo:/test/data/c4;/test/data/c5;/test/data/c6;/test/data/c7:/test/data/m2',
            'mdo:/test/data/c8;/test/data/c9:'
        ]

        content_name_comp = [
            '/test/data/c0', '/test/data/c1', '/test/data/c2', '/test/data/c3',
            '/test/data/c4', '/test/data/c5', '/test/data/c6', '/test/data/c7',
            '/test/data/c8', '/test/data/c9'
        ]

        content_data_comp = [
            "A" * 4096, "B" * 4096, "C" * 4096, "D" * 4096, "E" * 4096,
            "F" * 4096, "G" * 4096, "H" * 4096, "I" * 4096, "J" * 4000
        ]

        for i in range(0, len(md)):
            self.assertEqual(md[i].name.to_string(), md_name_comp[i])
            self.assertEqual(md[i].content, md_data_comp[i])

        for i in range(0, len(chunked_content)):
            self.assertEqual(chunked_content[i].name.to_string(),
                             content_name_comp[i])
            self.assertEqual(chunked_content[i].content, content_data_comp[i])

    def test_chunk_multiple_metadata_reassemble(self):
        """Test chunking metadata with three metadata objects and 10 chunks and reassemble"""
        name = Name("/test/data")
        string = "A" * 4096 + "B" * 4096 + "C" * 4096 + "D" * 4096 + "E" * 4096 + "F" * 4096 + "G" * 4096 + "H" * 4096 \
                 + "I" * 4096 + "J" * 4000
        content = Content(name, string)

        md, chunked_content = self.chunkifyer.chunk_data(content)

        md_name_comp = ['/test/data', '/test/data/m1', '/test/data/m2']
        md_data_comp = [
            'mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:/test/data/m1',
            'mdo:/test/data/c4;/test/data/c5;/test/data/c6;/test/data/c7:/test/data/m2',
            'mdo:/test/data/c8;/test/data/c9:'
        ]

        content_name_comp = [
            '/test/data/c0', '/test/data/c1', '/test/data/c2', '/test/data/c3',
            '/test/data/c4', '/test/data/c5', '/test/data/c6', '/test/data/c7',
            '/test/data/c8', '/test/data/c9'
        ]

        content_data_comp = [
            "A" * 4096, "B" * 4096, "C" * 4096, "D" * 4096, "E" * 4096,
            "F" * 4096, "G" * 4096, "H" * 4096, "I" * 4096, "J" * 4000
        ]

        for i in range(0, len(md)):
            self.assertEqual(md[i].name.to_string(), md_name_comp[i])
            self.assertEqual(md[i].content, md_data_comp[i])

        for i in range(0, len(chunked_content)):
            self.assertEqual(chunked_content[i].name.to_string(),
                             content_name_comp[i])
            self.assertEqual(chunked_content[i].content, content_data_comp[i])

        reassembled_content = self.chunkifyer.reassamble_data(
            md[0].name, chunked_content)
        self.assertEqual(content, reassembled_content)

    def test_parse_metadata_next(self):
        """Test parse metadata with next metadata"""
        md, names = self.chunkifyer.parse_meta_data(
            "mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:/test/data/m1"
        )

        self.assertEqual(Name("/test/data/m1"), md)
        names_comp = [
            Name("/test/data/c0"),
            Name("/test/data/c1"),
            Name("/test/data/c2"),
            Name("/test/data/c3")
        ]
        self.assertEqual(names, names_comp)

    def test_parse_metadata(self):
        """Test parse metadata"""
        md, names = self.chunkifyer.parse_meta_data(
            "mdo:/test/data/c0;/test/data/c1;/test/data/c2;/test/data/c3:")

        self.assertEqual(None, md)
        names_comp = [
            Name("/test/data/c0"),
            Name("/test/data/c1"),
            Name("/test/data/c2"),
            Name("/test/data/c3")
        ]
        self.assertEqual(names, names_comp)
Пример #8
0
    def setUp(self):
        synced_data_struct_factory = PiCNSyncDataStructFactory()
        synced_data_struct_factory.register('cs', ContentStoreMemoryExact)
        synced_data_struct_factory.register('pit',
                                            PendingInterstTableMemoryExact)
        synced_data_struct_factory.register(
            'fib', ForwardingInformationBaseMemoryPrefix)
        synced_data_struct_factory.register('faceidtable', FaceIDDict)
        synced_data_struct_factory.create_manager()
        # Set up forwarder
        cs = synced_data_struct_factory.manager.cs()
        pit = synced_data_struct_factory.manager.pit()
        fib = synced_data_struct_factory.manager.fib()
        prefixes = [(Name('/test/prefix/repos'), True)]
        # Auto-assign port
        forwarder_interface = UDP4Interface(0)
        forwarder_fidtable = synced_data_struct_factory.manager.faceidtable()
        forwarder_linklayer = BasicLinkLayer([forwarder_interface],
                                             forwarder_fidtable)
        forwarder_port = forwarder_interface.get_port()
        forwarder_encoder = NdnTlvEncoder()
        icnlayer = BasicICNLayer()
        icnlayer.cs = cs
        icnlayer.pit = pit
        icnlayer.fib = fib
        forwarder_autoconfiglayer = AutoconfigServerLayer(
            forwarder_linklayer, registration_prefixes=prefixes)
        forwarder_autoconfiglayer.fib = fib
        self.forwarder = LayerStack([
            icnlayer, forwarder_autoconfiglayer,
            BasicPacketEncodingLayer(forwarder_encoder), forwarder_linklayer
        ])

        # Set up repo
        repository = MockRepository(Name('/thisshouldbechanged'))
        repo_chunkifyer = SimpleContentChunkifyer()
        repo_chunklayer = BasicChunkLayer(repo_chunkifyer)
        repo_encoder = NdnTlvEncoder()
        # Auto-assign port
        repo_interface = UDP4Interface(0)
        repo_fidtable = synced_data_struct_factory.manager.faceidtable()
        repo_linklayer = BasicLinkLayer([repo_interface], repo_fidtable)
        repo_port = repo_interface.get_port()
        self.repo = LayerStack([
            BasicRepositoryLayer(repository), repo_chunklayer,
            AutoconfigRepoLayer('testrepo', repo_linklayer, repository,
                                '127.0.0.1', forwarder_port),
            BasicPacketEncodingLayer(repo_encoder), repo_linklayer
        ])

        # Set up fetch client
        client_chunkifyer = SimpleContentChunkifyer()
        client_chunklayer = BasicChunkLayer(client_chunkifyer)
        client_encoder = NdnTlvEncoder()
        client_interface = UDP4Interface(0)
        client_fidtable = synced_data_struct_factory.manager.faceidtable()
        client_linklayer = BasicLinkLayer([client_interface], client_fidtable)
        self.client = LayerStack([
            client_chunklayer,
            AutoconfigClientLayer(client_linklayer, bcport=forwarder_port),
            BasicPacketEncodingLayer(client_encoder), client_linklayer
        ])
Пример #9
0
class DataOffloadingChunklayer(LayerProcess):
    """This Chunklayer handles interrupted data uploads by asking neighbouring nodes for available content."""
    def __init__(self,
                 cs: BaseContentStore,
                 pit: BasePendingInterestTable,
                 fib: BaseForwardingInformationBase,
                 chunkifyer: BaseChunkifyer = None,
                 chunk_size: int = 4096,
                 num_of_forwards: int = 1,
                 prefix: str = "car",
                 log_level: int = 255):
        super().__init__("ChunkLayer", log_level=log_level)
        self.cs = cs
        self.pit = pit
        self.fib = fib
        self.chunk_size = chunk_size
        if chunkifyer is None:
            self.chunkifyer = SimpleContentChunkifyer(chunk_size)
        else:
            self.chunkifyer: BaseChunkifyer = chunkifyer
        self.num_of_forwards = num_of_forwards
        self.prefix = prefix

        manager = multiprocessing.Manager()
        self._chunk_table: Dict[Name, (Content, float)] = manager.dict()
        self._request_table: List[RequestTableEntry] = manager.list()
        self._ca_table: Dict[Name, CaEntry] = manager.dict()
        self.recipient_cl: Dict[Name, Name] = manager.dict()
        self.pass_through = False

    def data_from_higher(self, to_lower: multiprocessing.Queue,
                         to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from higher")
        faceid = data[0]
        packet = data[1]

        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest " + str(packet.name))
            request_entry = self.get_request_entry(packet.name)
            if request_entry is None:
                self._request_table.append(RequestTableEntry(packet.name))
                self._ca_table[packet.name] = CaEntry()

            # If the interest starts with /car and not ends with NFN, request metadata and available chunks from neighbours
            components = packet.name.string_components
            if self.prefix in components[
                    0] and components[-1] != "NFN" and not request_entry:
                self.pass_through = False
                ca_entry = self._ca_table.get(packet.name)
                name1 = Name("/nL") + packet.name + f"CA{self.num_of_forwards}"
                name2 = Name("/nR") + packet.name + f"CA{self.num_of_forwards}"
                name3 = Name("/nL") + packet.name + f"CL{self.num_of_forwards}"
                name4 = Name("/nR") + packet.name + f"CL{self.num_of_forwards}"
                if not self.pit.find_pit_entry(
                        name1) and self.fib.find_fib_entry(name1):
                    ca_entry.answer_L = False
                    ca_entry.received_all = False
                    to_lower.put([faceid, Interest(name1)])
                if not self.pit.find_pit_entry(
                        name2) and self.fib.find_fib_entry(name2):
                    ca_entry.answer_R = False
                    ca_entry.received_all = False
                    to_lower.put([faceid, Interest(name2)])
                if not self.pit.find_pit_entry(
                        name3) and self.fib.find_fib_entry(name3):
                    to_lower.put([faceid, Interest(name3)])
                if not self.pit.find_pit_entry(
                        name4) and self.fib.find_fib_entry(name4):
                    to_lower.put([faceid, Interest(name4)])
                self._ca_table[packet.name] = ca_entry

            to_lower.put(data)
            return

        elif isinstance(packet, Content):
            self.logger.info("Packet is Content (name=%s, %d bytes)" %
                             (str(packet.name), len(packet.content)))
            if len(packet.content) < self.chunk_size:
                to_lower.put(data)
            else:
                self.logger.info("Chunking Packet")
                metadata, chunks = self.chunkifyer.chunk_data(
                    packet)  # Create metadata and chunks
                self.logger.info("Metadata: " + metadata[0].content)
                to_lower.put([faceid, metadata[0]
                              ])  # Return name of first metadata object
                for md in metadata:  # Add metadata to chunktable
                    if md.name not in self._chunk_table:
                        self._chunk_table[md.name] = (md, time.time())
                for c in chunks:  # Add chunks to chunktable
                    if c.name not in self._chunk_table:
                        self._chunk_table[c.name] = (c, time.time())

        elif isinstance(packet, Nack):
            request_entry = self.get_request_entry(packet.name)
            if request_entry is not None:
                self._request_table.remove(request_entry)
            to_lower.put(data)

    def data_from_lower(self, to_lower: multiprocessing.Queue,
                        to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from lower")
        faceid = data[0]
        packet = data[1]
        components = packet.name.components
        string_components = packet.name.string_components
        last = components[-1]

        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest")

            # Interest for available chunks
            if "CA" in string_components[-1]:
                self.pass_through = True  # This node doesn't handle CA content, only forwards to neighbours
                chunks_available = self.get_chunks_available(packet)
                if last == b"CA0" or isinstance(
                        chunks_available,
                        Content):  # If there are chunks available, return them
                    to_lower.put([faceid, chunks_available])
                else:  # Otherwise try to pass on interest to neighbour
                    to_lower.put(
                        [faceid,
                         Interest(self.decrease_name(packet.name))])

            # General Interest passed on to chunklayer
            elif "CL" in string_components[-1]:
                matching_content = self.get_matching_content_from_packed_name(
                    packet)
                if last == b"CL0" or isinstance(
                        matching_content,
                        Content):  # If there is matching content, return it
                    to_lower.put([faceid, matching_content])
                else:  # Otherwise try to pass on to neighbour
                    to_lower.put(
                        [faceid,
                         Interest(self.decrease_name(packet.name))])

            elif packet.name in self._chunk_table:
                matching_content = self._chunk_table.get(packet.name)[0]
                to_lower.put([faceid, matching_content])
            else:
                to_higher.put(data)

        elif isinstance(packet, Content):
            self.logger.info("Packet is Content")
            ca_content = False
            cl_content = False

            # Metadata or string containing available chunks from neighbour
            if "CA" in string_components[-1]:
                self.cs.remove_content_object(
                    packet.name
                )  # Remove from cs, so next interest comes through to chunklayer
                if string_components[
                        -1] == f"CA{self.num_of_forwards}":  # This is the requesting node --> unpack
                    ca_content = True
                    ca_entry = self._ca_table.get(self.unpack(packet.name))
                    # In order to be able to request the available chunks from the sender of this packet,
                    # we need to safe the first component.
                    # This is then used in pack_ca() to send packet to the correct recipient.
                    ca_entry.recipient = Name(components[:1])
                    # Only replace existing list of available chunks if it contains more chunks
                    self.save_if_longest(packet, ca_entry)
                    if components[0] == b"nL":
                        ca_entry.answer_L = True
                    else:
                        ca_entry.answer_R = True
                    if ca_entry.answer_L and ca_entry.answer_R:  # We have a response from both neighbours and can proceed
                        packet = ca_entry.ca
                        self._request_table.append(
                            RequestTableEntry(packet.name))
                    self._ca_table[self.unpack(packet.name)] = ca_entry
                else:  # This is not the requesting node --> pass on to neighbour
                    to_lower.put([
                        faceid,
                        Content(self.increase_name(packet.name),
                                packet.content)
                    ])
                    return

            # Content from the chunklayer of a neighbouring node
            elif "CL" in string_components[-1]:
                if string_components[
                        -1] == f"CL{self.num_of_forwards}":  # This is the requesting node --> unpack
                    cl_content = True
                    packet.name = self.unpack(packet.name)
                    # Save the sender of this packet as the recipient for further interests. Used in pack_cl()
                    self.recipient_cl[packet.name] = Name(components[:1])
                else:  # This is not the requesting node --> pass on to neighbour
                    to_lower.put([
                        faceid,
                        Content(self.increase_name(packet.name),
                                packet.content)
                    ])
                    return

            request_entry = self.get_request_entry(packet.name)
            if request_entry is None:
                return
            self._request_table.remove(request_entry)
            if "CA" in components[-2].decode("utf-8"):
                if self.pass_through:
                    return
                ca_content = True
                request_entry.chunked = True
            self.handle_content(faceid, packet, request_entry, ca_content,
                                cl_content, to_lower, to_higher)

        elif isinstance(packet, Nack):
            if self.prefix not in string_components[0]:
                request_entry = self.get_request_entry(packet.name)
                if request_entry:
                    self._request_table.remove(request_entry)
                if "CA" in string_components[-1]:
                    if string_components[
                            -1] == f"CA{self.num_of_forwards}":  # This is the requesting node
                        unpacked = self.unpack(packet.name)
                        ca_entry = self._ca_table.get(unpacked)
                        if components[0] == b"nL":
                            ca_entry.answer_L = True
                        else:
                            ca_entry.answer_R = True
                        if ca_entry.answer_L and ca_entry.answer_R:  # We have an answer from both neighbour
                            if ca_entry.ca:  # We have chunks available from one of the neighbours
                                packet = ca_entry.ca
                                self._ca_table[unpacked] = ca_entry
                                self.handle_content(
                                    faceid, packet,
                                    RequestTableEntry(packet.name), True,
                                    False, to_lower, to_higher)
                                return
                            else:
                                ca_entry.received_all = True
                                request_entry = self.get_request_entry(
                                    unpacked)
                                if request_entry and not request_entry.requested_md and not self.pass_through:
                                    self.create_chunk_interests(
                                        faceid, request_entry, ca_entry,
                                        to_lower)

                        self._ca_table[unpacked] = ca_entry
                    else:  # This is not the requesting node --> pass on to neighbour
                        name = self.increase_name(packet.name)
                        nack = Nack(name, NackReason.NO_CONTENT,
                                    Interest(name))
                        to_lower.put([faceid, nack])
                elif "CL" in string_components[-1]:
                    if string_components[
                            -1] != f"CL{self.num_of_forwards}":  # This is not the requesting node --> pass on to neighbour
                        name = self.increase_name(packet.name)
                        nack = Nack(name, NackReason.NO_CONTENT,
                                    Interest(name))
                        to_lower.put([faceid, nack])
                else:
                    to_higher.put([faceid, packet])
                self.pit.remove_pit_entry(packet.name)
            else:
                if "c" in string_components[-1]:
                    packet.name.components = components[:-1]
                    to_higher.put([
                        faceid,
                        Nack(packet.name, NackReason.NO_CONTENT,
                             Interest(packet.name))
                    ])
                else:  #FIXME What to do here?
                    # to_higher.put([faceid, packet])
                    pass

    def handle_content(self, faceid: int, packet: Content,
                       request_entry: RequestTableEntry, ca_content: bool,
                       cl_content: bool, to_lower: multiprocessing.Queue,
                       to_higher: multiprocessing.Queue):
        """Handle incoming content"""
        if request_entry.chunked is False:  # Not chunked content
            if not packet.get_bytes().startswith(b'mdo:'):
                if ca_content:
                    self.handle_ca(faceid, packet, to_lower)
                else:
                    to_higher.put([faceid, packet])
                return
            else:  # Received metadata data --> chunked content
                request_entry.chunked = True
        if packet.get_bytes().startswith(
                b'mdo:'):  # Request all frames from meta data
            self.handle_received_meta_data(faceid, packet, request_entry,
                                           to_lower, ca_content, cl_content)
        else:
            self.handle_received_chunk_data(faceid, packet, request_entry,
                                            to_lower, to_higher, ca_content)

    def handle_received_meta_data(self, faceid: int, packet: Content,
                                  request_entry: RequestTableEntry,
                                  to_lower: multiprocessing.Queue,
                                  ca_content: bool, cl_content: bool):
        """Handle meta data"""
        if not request_entry.md_complete:
            if packet.name in request_entry.requested_md:
                request_entry.requested_md.remove(packet.name)
            md, chunks, size = self.chunkifyer.parse_meta_data(packet.content)
            for chunk in chunks:  # Request all chunks from the meta data file if not already received or requested
                if chunk not in request_entry.requested_chunks and chunk not in [
                        i.name for i in request_entry.chunks
                ]:
                    request_entry.requested_chunks.append(chunk)
            if md is not None:  # There is another md file
                if md not in request_entry.requested_md:
                    request_entry.requested_md.append(md)
                    if cl_content:
                        md = self.pack_cl(md)
                    to_lower.put([faceid, Interest(md)])
            else:
                # Only create interests if it is the requesting node handling this metadata and
                # either the packet is CA content or there is no CA content available
                request_entry.md_complete = True
                if not self.pass_through:
                    if ca_content:
                        self.create_chunk_interests(faceid, request_entry,
                                                    CaEntry(), to_lower)
                    elif self._ca_table.get(
                            request_entry.name
                    ).received_all:  # We have an answer from both neighbours
                        self.create_chunk_interests(
                            faceid, request_entry,
                            self._ca_table.get(request_entry.name), to_lower)

        self._chunk_table[packet.name] = (packet, time.time())
        self._request_table.append(request_entry)

    def handle_received_chunk_data(self, faceid: int, packet: Content,
                                   request_entry: RequestTableEntry,
                                   to_lower: multiprocessing.Queue,
                                   to_higher: multiprocessing.Queue,
                                   ca_content: bool):
        """Handle chunk data"""
        if packet.name in request_entry.requested_chunks:
            request_entry.requested_chunks.remove(packet.name)
            request_entry.chunks.append(packet)
        self._chunk_table[packet.name] = (packet, time.time())
        if not request_entry.requested_chunks:
            if not request_entry.requested_md:  # All chunks are available
                data = request_entry.chunks
                data = sorted(data,
                              key=lambda content: int(''.join(
                                  filter(str.isdigit, content.name.
                                         string_components[-1]))))
                cont = self.chunkifyer.reassamble_data(request_entry.name,
                                                       data)
                if ca_content:
                    self.handle_ca(faceid, cont, to_lower)
                else:
                    del self._ca_table[request_entry.name]
                    to_higher.put([faceid, cont])
                return

        self._request_table.append(request_entry)

    def handle_ca(self, faceid: int, content: Content,
                  to_lower: multiprocessing.Queue):
        """Unpack the received ca message and create interests for all available chunks."""
        content.name = self.unpack(content.name)
        ca_entry = self._ca_table.get(content.name)
        ca_entry.received_all = True

        chunks_str = content.content.split(";")

        request_entry = self.get_request_entry(content.name)
        if request_entry and not self.pass_through:
            if chunks_str[0] == "complete":  # Neighbour has complete data
                ca_entry.completely_available = True
                if request_entry.md_complete:
                    self.create_chunk_interests(faceid, request_entry,
                                                ca_entry, to_lower)
            else:
                ca_entry.chunks = [Name(chunk) for chunk in chunks_str]
                # Create interests for all chunks that are available from neighbour
                for chunk in ca_entry.chunks:
                    self.logger.info("TO NEIGHBOUR:",
                                     self.pack_ca(chunk, ca_entry))
                    if chunk not in request_entry.requested_chunks and chunk not in [
                            i.name for i in request_entry.chunks
                    ]:
                        request_entry.requested_chunks.append(chunk)
                    to_lower.put(
                        [faceid,
                         Interest(self.pack_ca(chunk, ca_entry))])
                self._request_table.append(request_entry)
                # If there is no name in requested_md try to request the remaining chunks.
                # This is only necessary to get a NACK, so the simulation continues.
                if not request_entry.requested_md:
                    for chunk in [
                            i for i in request_entry.requested_chunks
                            if i not in ca_entry.chunks
                    ]:
                        self.logger.info("TO ORIGINAL SOURCE:", chunk)
                        to_lower.put([faceid, Interest(chunk)])
                self._request_table.remove(request_entry)
            self._ca_table[content.name] = ca_entry

    def get_request_entry(self, name: Name):
        """
        Check if a name is in the request table.
        Return entry or None.
        """
        for entry in self._request_table:
            if entry.name == name or name in entry.requested_chunks or name in entry.requested_md:
                return entry
        return None

    def get_chunks_available(self, packet: Packet):
        """
        Check if chunks are available for a given name.
        Return a content object containing the names of the available chunks, or NACK
        """
        chunks_available = []
        name = self.unpack(packet.name)
        request_entry = self.get_request_entry(name)
        cs_entry = self.cs.find_content_object(name)

        if request_entry is not None:
            chunks_available = [
                str(chunk.name) for chunk in request_entry.chunks
            ]

        elif cs_entry:
            chunks_available.append("complete;")

        if chunks_available:
            chunks_available = Content(packet.name, ";".join(chunks_available))
            if len(chunks_available.content) > self.chunk_size:
                meta_data, chunks = self.chunkifyer.chunk_data(
                    chunks_available)
                meta_data.extend(chunks)
                for data in meta_data:
                    self.cs.remove_content_object(data.name)
                    self.cs.add_content_object(data)
                return meta_data[0]
            else:
                return chunks_available

        return Nack(packet.name, NackReason.NO_CONTENT, packet)

    def get_matching_content_from_packed_name(self, packet: Packet):
        """Return either the content matching the unpacked name or NACK"""
        name_in = self.unpack(packet.name)
        if name_in in self._chunk_table:
            matching_content = self._chunk_table.get(name_in)[0]
            matching_content.name = packet.name
            return matching_content
        else:
            return Nack(packet.name, NackReason.NO_CONTENT, packet)

    def create_chunk_interests(self, faceid: int,
                               request_entry: RequestTableEntry,
                               ca_entry: CaEntry,
                               to_lower: multiprocessing.Queue):
        """Create interests for all the chunks in requested_md of the specified request table entry."""

        if ca_entry.completely_available:
            for chunk in [i for i in request_entry.requested_chunks]:
                self.logger.info("TO NEIGHBOUR:",
                                 self.pack_ca(chunk, ca_entry))
                if chunk not in request_entry.requested_chunks and chunk not in [
                        i.name for i in request_entry.chunks
                ]:
                    request_entry.requested_chunks.append(chunk)
                to_lower.put([faceid, Interest(self.pack_ca(chunk, ca_entry))])
        else:
            for chunk in [
                    i for i in request_entry.requested_chunks
                    if i not in ca_entry.chunks
            ]:
                self.logger.info("TO ORIGINAL SOURCE:", chunk)
                to_lower.put([faceid, Interest(chunk)])

    def save_if_longest(self, packet: Content, ca_entry: CaEntry):
        """
        Check if the received ca content is longer than the existing one and if so, replace it.
        In the case where both neighbours have chunks available, we want to send the interests only to the one
        which has more.
        """
        if packet.get_bytes().startswith(
                b'mdo:'):  # Content is metadata, read size from metadata
            _, _, content_size = self.chunkifyer.parse_meta_data(
                packet.content)
        else:  # Content is string, size equals length of the string
            content_size = (len(packet.content))
        content_size = int(content_size)
        if content_size > ca_entry.size:
            ca_entry.ca = packet
            ca_entry.size = content_size

    def pack_ca(self, name: Name, ca_entry: CaEntry) -> Name:
        """Prepend the recipient, append "CL" and the number of forwards."""
        return ca_entry.recipient + name + f"CL{self.num_of_forwards}"

    def pack_cl(self, name: Name) -> Name:
        """Prepend the recipient, append "CA" and the number of forwards."""
        lookup_name = Name(name.components[:-1])
        return self.recipient_cl.get(
            lookup_name) + name + f"CL{self.num_of_forwards}"

    def unpack(self, name: Name) -> Name:
        return Name(name.components[1:-1])

    def increase_name(self, name: Name) -> Name:
        """
        Increase the number at the end of the name.
        The number is used to determine whether or not a packet gets forwarded to the next neighbour.
        """
        components = name.components
        last = components[-1].decode("utf-8")
        i = int(''.join(filter(str.isdigit, last)))
        if "CA" in last:
            return Name(components[:-1]) + f"CA{i+1}"
        elif "CL" in last:
            return Name(components[:-1]) + f"CL{i+1}"
        return name

    def decrease_name(self, name: Name) -> Name:
        """
        Decrease the number at the end of the name.
        The number is used to determine whether or not a packet gets forwarded to the next neighbour.
        """
        components = name.components
        last = components[-1].decode("utf-8")
        i = int(''.join(filter(str.isdigit, last)))
        if "CA" in last:
            return Name(components[:-1]) + f"CA{i-1}"
        elif "CL" in last:
            return Name(components[:-1]) + f"CL{i-1}"
        return name

    def set_number_of_forwards(self, number_of_forwards: int):
        self.num_of_forwards = number_of_forwards
Пример #10
0
    def __init__(self,
                 port=9000,
                 log_level=255,
                 encoder: BasicEncoder = None,
                 interfaces: List[BaseInterface] = None,
                 executors: BaseNFNExecutor = None,
                 ageing_interval: int = 3,
                 use_thunks=False):
        # debug level
        logger = Logger("NFNForwarder", log_level)
        logger.info("Start PiCN NFN Forwarder on port " + str(port))

        # packet encoder
        if encoder is None:
            self.encoder = SimpleStringEncoder(log_level=log_level)
        else:
            encoder.set_log_level(log_level)
            self.encoder = encoder

    # setup data structures
        synced_data_struct_factory = PiCNSyncDataStructFactory()
        synced_data_struct_factory.register("cs", ContentStoreMemoryExact)
        synced_data_struct_factory.register(
            "fib", ForwardingInformationBaseMemoryPrefix)
        synced_data_struct_factory.register("pit",
                                            PendingInterstTableMemoryExact)
        synced_data_struct_factory.register("faceidtable", FaceIDDict)

        synced_data_struct_factory.register("computation_table",
                                            NFNComputationList)
        synced_data_struct_factory.register("timeoutprevention_dict",
                                            TimeoutPreventionMessageDict)
        if use_thunks:
            synced_data_struct_factory.register("thunktable", ThunkList)
            synced_data_struct_factory.register("plantable", PlanTable)

        synced_data_struct_factory.create_manager()

        cs = synced_data_struct_factory.manager.cs()
        fib = synced_data_struct_factory.manager.fib()
        pit = synced_data_struct_factory.manager.pit()
        faceidtable = synced_data_struct_factory.manager.faceidtable()

        self.parser = DefaultNFNParser()
        if use_thunks:
            thunktable = synced_data_struct_factory.manager.thunktable()
            plantable = synced_data_struct_factory.manager.plantable(
                self.parser)

        #setup chunkifier
        self.chunkifier = SimpleContentChunkifyer()

        # default interface
        if interfaces is not None:
            self.interfaces = interfaces
            mgmt_port = port
        else:
            interfaces = [UDP4Interface(port)]
            mgmt_port = interfaces[0].get_port()

        # initialize layers
        self.linklayer = BasicLinkLayer(interfaces,
                                        faceidtable,
                                        log_level=log_level)
        self.packetencodinglayer = BasicPacketEncodingLayer(
            self.encoder, log_level=log_level)
        self.icnlayer = BasicICNLayer(log_level=log_level,
                                      ageing_interval=ageing_interval)
        self.chunklayer = BasicChunkLayer(self.chunkifier, log_level=log_level)

        # setup nfn
        self.icnlayer._interest_to_app = True
        if executors is None:
            self.executors = {"PYTHON": NFNPythonExecutor()}
        else:
            self.executors = executors
        self.r2cclient = TimeoutR2CHandler()
        comp_table = synced_data_struct_factory.manager.computation_table(
            self.r2cclient, self.parser)
        self.nfnlayer = BasicNFNLayer(cs,
                                      fib,
                                      pit,
                                      faceidtable,
                                      comp_table,
                                      self.executors,
                                      self.parser,
                                      self.r2cclient,
                                      log_level=log_level)
        if use_thunks:
            self.thunk_layer = BasicThunkLayer(cs,
                                               fib,
                                               pit,
                                               faceidtable,
                                               thunktable,
                                               plantable,
                                               self.parser,
                                               log_level=log_level)
            self.nfnlayer.optimizer = ThunkPlanExecutor(
                cs, fib, pit, faceidtable, plantable)

        timeoutprevention_dict = synced_data_struct_factory.manager.timeoutprevention_dict(
        )
        self.timeoutpreventionlayer = BasicTimeoutPreventionLayer(
            timeoutprevention_dict, comp_table, pit=pit, log_level=log_level)

        if use_thunks:
            self.lstack: LayerStack = LayerStack([
                self.nfnlayer, self.chunklayer, self.timeoutpreventionlayer,
                self.thunk_layer, self.icnlayer, self.packetencodinglayer,
                self.linklayer
            ])
        else:
            self.lstack: LayerStack = LayerStack([
                self.nfnlayer, self.chunklayer, self.timeoutpreventionlayer,
                self.icnlayer, self.packetencodinglayer, self.linklayer
            ])

        self.icnlayer.cs = cs
        self.icnlayer.fib = fib
        self.icnlayer.pit = pit

        # mgmt
        self.mgmt = Mgmt(self.icnlayer.cs,
                         self.icnlayer.fib,
                         self.icnlayer.pit,
                         self.linklayer,
                         mgmt_port,
                         self.stop_forwarder,
                         log_level=log_level)
Пример #11
0
class DetectionMapSimulation():
    """Run a simple Data Offloading Scenario Simulation"""
    def setUp(self):
        self.encoder_type = SimpleStringEncoder()
        self.simulation_bus = SimulationBus(packetencoder=self.encoder_type)
        chunk_size = 8192
        self.chunkifyer = SimpleContentChunkifyer(chunk_size)

        # Initialize two cars
        self.cars = []
        self.fetch_tool_cars = []
        self.mgmt_client_cars = []
        for i in range(2):
            self.cars.append(
                ICNForwarder(
                    0,
                    encoder=self.encoder_type,
                    routing=True,
                    interfaces=[self.simulation_bus.add_interface(f"car{i}")]))
            self.fetch_tool_cars.append(
                Fetch(f"car{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[
                          self.simulation_bus.add_interface(f"ftcar{i}")
                      ]))
            self.mgmt_client_cars.append(
                MgmtClient(self.cars[i].mgmt.mgmt_sock.getsockname()[1]))
            self.cars[i].icnlayer.cs.set_cs_timeout(40)

        # Initialize RSUs
        self.rsus = []
        self.fetch_tools = []
        self.mgmt_clients = []
        for i in range(3):
            self.rsus.append(
                NFNForwarderData(
                    0,
                    encoder=self.encoder_type,
                    interfaces=[self.simulation_bus.add_interface(f"rsu{i}")],
                    chunk_size=chunk_size,
                    num_of_forwards=1,
                    ageing_interval=10))
            self.fetch_tools.append(
                Fetch(f"rsu{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[self.simulation_bus.add_interface(f"ft{i}")
                                  ]))
            self.rsus[i].nfnlayer.optimizer = EdgeComputingOptimizer(
                self.rsus[i].icnlayer.cs, self.rsus[i].icnlayer.fib,
                self.rsus[i].icnlayer.pit, self.rsus[i].linklayer.faceidtable)
            self.mgmt_clients.append(
                MgmtClient(self.rsus[i].mgmt.mgmt_sock.getsockname()[1]))
            self.fetch_tools[i].timeoutpreventionlayer.timeout_interval = 40
            self.rsus[i].icnlayer.cs.set_cs_timeout(60)

    def tearDown(self):
        for car in self.cars:
            car.stop_forwarder()

        for fetch_tool_car in self.fetch_tool_cars:
            fetch_tool_car.stop_fetch()

        for rsu in self.rsus:
            rsu.stop_forwarder()

        for fetch_tool in self.fetch_tools:
            fetch_tool.stop_fetch()

        self.simulation_bus.stop_process()

    def setup_faces_and_connections(self):
        for car in self.cars:
            car.start_forwarder()

        for rsu in self.rsus:
            rsu.start_forwarder()

        self.simulation_bus.start_process()

        function1 = "PYTHON\nf\ndef f(a, b, c):\n return detection_map(a, b, c)"
        function2 = "PYTHON\nf\ndef f(a, b, c):\n return detection_map_2(a, b, c)"

        # Setup rsu0
        self.mgmt_clients[0].add_face("car0", None, 0)
        self.mgmt_clients[0].add_face("car1", None, 0)
        self.mgmt_clients[0].add_face("rsu1", None, 0)
        self.mgmt_clients[0].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[0].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[0].add_forwarding_rule(Name("/nR"), [2])
        self.mgmt_clients[0].add_new_content(Name("/rsu/func/f1"), function1)
        self.mgmt_clients[0].add_new_content(Name("/rsu/func/f2"), function2)

        # Setup rsu1
        self.mgmt_clients[1].add_face("car0", None, 0)
        self.mgmt_clients[1].add_face("car1", None, 0)
        self.mgmt_clients[1].add_face("rsu0", None, 0)
        self.mgmt_clients[1].add_face("rsu2", None, 0)
        self.mgmt_clients[1].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[1].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nL"), [2])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nR"), [3])
        self.mgmt_clients[1].add_new_content(Name("/rsu/func/f1"), function1)
        self.mgmt_clients[1].add_new_content(Name("/rsu/func/f2"), function2)

        # Setup rsu2
        self.mgmt_clients[2].add_face("car0", None, 0)
        self.mgmt_clients[2].add_face("car1", None, 0)
        self.mgmt_clients[2].add_face("rsu1", None, 0)
        self.mgmt_clients[2].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[2].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[2].add_forwarding_rule(Name("/nL"), [2])

        # Setup car0
        self.mgmt_client_cars[0].add_face("rsu0", None, 0)
        self.mgmt_client_cars[0].add_face("rsu1", None, 0)
        self.mgmt_client_cars[0].add_face("rsu2", None, 0)
        self.mgmt_client_cars[0].add_forwarding_rule(Name("/rsu"), [0])

        # Setup car1
        self.mgmt_client_cars[1].add_face("rsu0", None, 0)
        self.mgmt_client_cars[1].add_face("rsu1", None, 0)
        self.mgmt_client_cars[1].add_face("rsu2", None, 0)
        self.mgmt_client_cars[1].add_forwarding_rule(Name("/rsu"), [0])

        # Add image to content store of car0
        # Streetview 1
        # image_path = os.path.join(ROOT_DIR, "Demos/DetectionMap/Assets/street1.jpg")
        # image_path = os.path.join(ROOT_DIR, "Demos/DetectionMap/Assets/image1_small.jpg")
        image_path = os.path.join(ROOT_DIR,
                                  "Demos/DetectionMap/Assets/image8.jpg")
        base64_image = Helper.pre_process(image_path, 2048, 2, ".jpg")
        # map_det_obj0 = DetectionMapObject(base64_image, 47.566192, 7.590686, 65, 0.4, 1.8)
        # map_det_obj0 = DetectionMapObject(base64_image, 47.377391, 8.539347, 294, 0.8)
        map_det_obj0 = DetectionMapObject(base64_image, 47.369645, 8.539652,
                                          300, 0.7, 0.8)
        image = Content(Name("/car0/image"), map_det_obj0.to_string())
        self.meta_data0, self.data0 = self.chunkifyer.chunk_data(image)

        for md in self.meta_data0:
            self.cars[0].icnlayer.cs.add_content_object(md)

        for d in self.data0[:10]:
            self.cars[0].icnlayer.cs.add_content_object(d)

        # Add image to content store of car1
        # Streetview 2
        image_path = os.path.join(ROOT_DIR,
                                  "Demos/DetectionMap/Assets/street2.jpg")
        base64_image = Helper.pre_process(image_path, 1024, 2, ".jpg")
        map_det_obj1 = DetectionMapObject(base64_image, 47.566463, 7.590982,
                                          345, 0.4, 1.8)
        image = Content(Name("/car1/image"), map_det_obj1.to_string())
        self.meta_data1, self.data1 = self.chunkifyer.chunk_data(image)

        for md in self.meta_data1:
            self.cars[1].icnlayer.cs.add_content_object(md)

        for d in self.data1:
            self.cars[1].icnlayer.cs.add_content_object(d)

    def test_map_detection_1_image(self):
        self.setup_faces_and_connections()

        name = Name("/rsu/func/f1")
        name += "_(/car0/image,1,0)"
        name += "NFN"

        print("RSU 0 FETCHING")
        result = self.fetch_tools[0].fetch_data(name, 360)
        print(result)

        sleep(2)
        print("\n" * 20 + "RSU 1 FETCHING")

        for d in self.data0[10:]:
            self.cars[0].icnlayer.cs.add_content_object(d)
        for d in self.data0[:10]:
            self.cars[0].icnlayer.cs.remove_content_object(d.name)
        # for md in self.meta_data0:
        #     self.cars[0].icnlayer.cs.remove_content_object(md.name)
        # for md in self.meta_data0:
        #     self.cars[0].icnlayer.cs.add_content_object(md)

        result = self.fetch_tools[1].fetch_data(name, 60)
        print(result)
Пример #12
0
class TestDisconnections(unittest.TestCase):
    """Run the data-offloading simulation"""
    def setUp(self):
        self.encoder_type = SimpleStringEncoder()
        self.simulation_bus = SimulationBus(packetencoder=self.encoder_type)
        chunk_size = 4
        self.chunkifyer = SimpleContentChunkifyer(chunk_size)

        self.car = ICNForwarder(
            0,
            encoder=self.encoder_type,
            routing=True,
            interfaces=[self.simulation_bus.add_interface("car")])
        self.fetch_tool_car = Fetch(
            "car",
            None,
            255,
            self.encoder_type,
            interfaces=[self.simulation_bus.add_interface("ftcar")])
        self.mgmt_client_car = MgmtClient(
            self.car.mgmt.mgmt_sock.getsockname()[1])

        self.rsus = []
        self.fetch_tools = []
        self.mgmt_clients = []

        for i in range(4):
            self.rsus.append(
                NFNForwarderData(
                    0,
                    encoder=self.encoder_type,
                    interfaces=[self.simulation_bus.add_interface(f"rsu{i}")],
                    chunk_size=chunk_size,
                    num_of_forwards=0))
            self.fetch_tools.append(
                Fetch(f"rsu{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[self.simulation_bus.add_interface(f"ft{i}")
                                  ]))
            self.rsus[i].nfnlayer.optimizer = EdgeComputingOptimizer(
                self.rsus[i].icnlayer.cs, self.rsus[i].icnlayer.fib,
                self.rsus[i].icnlayer.pit, self.rsus[i].linklayer.faceidtable)
            self.mgmt_clients.append(
                MgmtClient(self.rsus[i].mgmt.mgmt_sock.getsockname()[1]))
            self.fetch_tools[i].timeoutpreventionlayer.timeout_interval = 30

    def tearDown(self):
        self.car.stop_forwarder()
        self.fetch_tool_car.stop_fetch()

        for rsu in self.rsus:
            rsu.stop_forwarder()

        for fetch_tool in self.fetch_tools:
            fetch_tool.stop_fetch()

        self.simulation_bus.stop_process()

    def setup_faces_and_connections(self):
        self.car.start_forwarder()

        for rsu in self.rsus:
            rsu.start_forwarder()

        self.simulation_bus.start_process()

        function = "PYTHON\nf\ndef f(a):\n return a + ' World'"

        # Setup rsu0
        self.mgmt_clients[0].add_face("car", None, 0)
        self.mgmt_clients[0].add_face("rsu1", None, 0)
        self.mgmt_clients[0].add_forwarding_rule(Name("/car"), [0])
        self.mgmt_clients[0].add_forwarding_rule(Name("/nR"), [1])
        self.mgmt_clients[0].add_new_content(Name("/rsu/func/f1"), function)

        # Setup rsu1
        self.mgmt_clients[1].add_face("car", None, 0)
        self.mgmt_clients[1].add_face("rsu0", None, 0)
        self.mgmt_clients[1].add_face("rsu2", None, 0)
        self.mgmt_clients[1].add_forwarding_rule(Name("/car"), [0])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nL"), [1])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nR"), [2])
        self.mgmt_clients[1].add_new_content(Name("/rsu/func/f1"), function)

        # Setup rsu2
        self.mgmt_clients[2].add_face("car", None, 0)
        self.mgmt_clients[2].add_face("rsu1", None, 0)
        self.mgmt_clients[2].add_face("rsu3", None, 0)
        self.mgmt_clients[2].add_forwarding_rule(Name("/car"), [0])
        self.mgmt_clients[2].add_forwarding_rule(Name("/nL"), [1])
        self.mgmt_clients[2].add_forwarding_rule(Name("/nR"), [2])
        self.mgmt_clients[2].add_new_content(Name("/rsu/func/f1"), function)

        # Setup rsu3
        self.mgmt_clients[3].add_face("car", None, 0)
        self.mgmt_clients[3].add_face("rsu2", None, 0)
        self.mgmt_clients[3].add_forwarding_rule(Name("/car"), [0])
        self.mgmt_clients[3].add_forwarding_rule(Name("/nL"), [1])
        self.mgmt_clients[3].add_new_content(Name("/rsu/func/f1"), function)

        # Setup car
        self.mgmt_client_car.add_face("rsu0", None, 0)
        self.mgmt_client_car.add_face("rsu1", None, 0)
        self.mgmt_client_car.add_face("rsu2", None, 0)
        self.mgmt_client_car.add_forwarding_rule(Name("/rsu"), [0])

        self.d = Content(Name("/car/data/test1"), "Test" * 10)
        self.meta_data, self.data = self.chunkifyer.chunk_data(self.d)

        for md in self.meta_data[:]:
            self.car.icnlayer.cs.add_content_object(md)

        # Only load first 6 chunks to car to simulate car getting out of each while uploading
        for d in self.data[:5]:
            self.mgmt_client_car.add_new_content(d.name, d.content)

    def test_two_disconnections(self):
        self.setup_faces_and_connections()

        name = Name("/rsu/func/f1")
        name += '_(/car/data/test1)'
        name += "NFN"

        print("RSU 0 FETCHING")
        start = time()
        res0 = self.fetch_tools[0].fetch_data(name, timeout=10)
        print(res0)

        sleep(1)
        print("\n" * 20 + "RSU 1 FETCHING")

        for d in self.data[5:-2]:
            self.mgmt_client_car.add_new_content(d.name, d.content)
        for d in self.data[:5]:
            self.car.icnlayer.cs.remove_content_object(d.name)

        res1 = self.fetch_tools[1].fetch_data(name, timeout=10)
        print(res1)

        sleep(1)
        print("\n" * 20 + "RSU 2 FETCHING")

        for d in self.data[-2:]:
            self.mgmt_client_car.add_new_content(d.name, d.content)
        for d in self.data[:-2]:
            self.car.icnlayer.cs.remove_content_object(d.name)

        res2 = self.fetch_tools[2].fetch_data(name, timeout=10)
        print(res2, time() - start)
        self.assertEqual(self.d.content + " World", res2)

    def test_skip_two_rsus(self):

        # Increase the number of forwards
        for rsu in self.rsus:
            rsu.chunklayer.set_number_of_forwards(2)

        self.setup_faces_and_connections()
        name = Name("/rsu/func/f1")
        name += '_(/car/data/test1)'
        name += "NFN"

        res0 = self.fetch_tools[0].fetch_data(name, timeout=10)
        print(res0)

        sleep(1)
        print("\n" * 20 + "RSU 4 STARTING")

        for d in self.data[5:]:
            self.mgmt_client_car.add_new_content(d.name, d.content)
        for d in self.data[:5]:
            self.car.icnlayer.cs.remove_content_object(d.name)

        res1 = self.fetch_tools[3].fetch_data(name, timeout=10)
        print(res1)
        self.assertEqual(self.d.content + " World", res1)
Пример #13
0
class BasicChunkLayer(LayerProcess):
    """"Basic Chunking Layer for PICN"""
    def __init__(self,
                 chunkifyer: BaseChunkifyer = None,
                 chunk_size: int = 4096,
                 manager: multiprocessing.Manager = None,
                 log_level=255):
        super().__init__("ChunkLayer", log_level=log_level)
        self.chunk_size = chunk_size
        if chunkifyer == None:
            self.chunkifyer = SimpleContentChunkifyer(chunk_size)
        else:
            self.chunkifyer: BaseChunkifyer = chunkifyer
        if manager is None:
            manager = multiprocessing.Manager()
        self._chunk_table: Dict[Name, (Content, float)] = manager.dict()
        self._request_table: List[RequestTableEntry] = manager.list()

    def data_from_higher(self, to_lower: multiprocessing.Queue,
                         to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from higher")
        faceid = data[0]
        packet = data[1]
        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest " + str(packet.name))
            requestentry = self.get_request_table_entry(packet.name)
            if requestentry is None:
                self._request_table.append(RequestTableEntry(packet.name))
            to_lower.put([faceid, packet])
            return
        if isinstance(packet, Content):
            self.logger.info("Packet is Content (name=%s, %d bytes)" % \
                                      (str(packet.name), len(packet.content)))
            if len(packet.content) < self.chunk_size:
                to_lower.put([faceid, packet])
            else:
                self.logger.info("Chunking Packet")
                metadata, chunks = self.chunkifyer.chunk_data(
                    packet)  #create metadata and chunks
                self.logger.info("Metadata: " + metadata[0].content)
                to_lower.put(
                    [faceid, metadata[0]]
                )  #return first name TODO HANDLE THE CASE, WHERE CHUNKS CAN TIMEOUT AND MUST BE REPRODUCED
                for md in metadata:  #add metadata to chunktable
                    if md.name not in self._chunk_table:
                        self._chunk_table[md.name] = (md, time.time())
                for c in chunks:  #add chunks to chunktable
                    if c.name not in self._chunk_table:
                        self._chunk_table[c.name] = (c, time.time())
        if isinstance(packet, Nack):
            requestentry = self.get_request_table_entry(packet.name)
            if requestentry is not None:
                self._request_table.remove(requestentry)
            to_lower.put([faceid, packet])

    def data_from_lower(self, to_lower: multiprocessing.Queue,
                        to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from lower")
        faceid = data[0]
        packet = data[1]
        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest")
            if packet.name in self._chunk_table:  #Check if Interest is in chunktable
                matching_content = self._chunk_table.get(packet.name)[0]
                to_lower.put([faceid, matching_content])
            else:
                to_higher.put([faceid, packet])
            return
        if isinstance(packet, Content):
            self.logger.info("Packet is Content")
            request_table_entry = self.get_request_table_entry(packet.name)
            if request_table_entry is None:
                return
            self._request_table.remove(request_table_entry)
            if request_table_entry.chunked is False:  #not chunked content
                if not packet.get_bytes().startswith(b'mdo:'):
                    to_higher.put([faceid, packet])
                    return
                else:  # Received metadata data --> chunked content
                    request_table_entry.chunked = True
            if packet.get_bytes().startswith(
                    b'mdo:'):  # request all frames from metadata
                request_table_entry = self.handle_received_meta_data(
                    faceid, packet, request_table_entry, to_lower)
            else:
                request_table_entry = self.handle_received_chunk_data(
                    faceid, packet, request_table_entry, to_higher)
                if request_table_entry is None:
                    return  #deletes entry if data was completed
            self._request_table.append(request_table_entry)
        if isinstance(packet, Nack):
            requestentry = self.get_request_table_entry(packet.name)
            if requestentry is not None:
                self._request_table.remove(requestentry)
            to_higher.put([faceid, packet])

    def handle_received_meta_data(
            self, faceid: int, packet: Content,
            request_table_entry: RequestTableEntry,
            to_lower: multiprocessing.Queue) -> RequestTableEntry:
        """Handle the case, where metadata are received from the network"""
        md_entry = self.metadata_name_in_request_table(packet.name)
        if md_entry is None:
            return request_table_entry
        request_table_entry = self.remove_metadata_name_from_request_table(
            request_table_entry, packet.name)
        md, chunks, size = self.chunkifyer.parse_meta_data(packet.content)
        if md is not None:  # there is another md file
            request_table_entry.requested_md.append(md)
            to_lower.put([faceid, Interest(md)])
        else:
            request_table_entry.lastchunk = chunks[-1]
        for chunk in chunks:  # request all chunks from the metadata file
            request_table_entry.requested_chunks.append(chunk)
            to_lower.put([faceid, Interest(chunk)])
        self._chunk_table[packet.name] = (packet, time.time())
        return request_table_entry

    def handle_received_chunk_data(
            self, faceid: int, packet: Content,
            request_table_entry: RequestTableEntry,
            to_higher: multiprocessing.Queue) -> RequestTableEntry:
        """Handle the case wehere chunk data are received """
        chunk_entry = self.chunk_name_in_request_table(packet.name)
        if chunk_entry is None:
            return request_table_entry
        request_table_entry.chunks.append(packet)
        request_table_entry = self.remove_chunk_name_from_request_table_entry(
            request_table_entry, packet.name)
        self._chunk_table[packet.name] = (packet, time.time())
        if request_table_entry.chunked and len(request_table_entry.requested_chunks) == 0 \
                and len(request_table_entry.requested_md) == 0:  # all chunks are available
            data = request_table_entry.chunks
            #data = sorted(data, key=lambda content: content.name.to_string()) #broken with more than 10 chunks
            data = sorted(
                data,
                key=lambda content: int(''.join(
                    filter(str.isdigit, content.name.string_components[-1]))))
            cont = self.chunkifyer.reassamble_data(request_table_entry.name,
                                                   data)
            to_higher.put([faceid, cont])
            return None
        else:
            return request_table_entry

    def get_chunk_list_from_chunk_table(self,
                                        data_names: Name) -> List[Content]:
        """get a list of content objects from a list of names"""
        res = []
        for name in data_names:
            if name in self._chunk_table:
                res.append(self._chunk_table[name][0])
        return res

    def get_request_table_entry(self, name: Name) -> RequestTableEntry:
        """check if a name is in the chunktable"""
        for entry in self._request_table:
            if entry.name == name or name in entry.requested_chunks or name in entry.requested_md:
                return entry

        return None

    def chunk_name_in_request_table(self, name):
        """check if a received chunk is expected by the requesttable"""
        for entry in self._request_table:
            if name in entry.requested_chunks:
                return True
        return False

    def remove_chunk_name_from_request_table_entry(self, request_table_entry: RequestTableEntry, name: Name)\
            -> RequestTableEntry:
        """remove chunk from chunktable"""
        if name not in request_table_entry.requested_chunks:
            return request_table_entry
        request_table_entry.requested_chunks.remove(name)
        return request_table_entry

    def metadata_name_in_request_table(self, name):
        """check if a received metadata is expected by the chunktable"""

        for entry in self._request_table:
            if name in entry.requested_md:
                return True
        return False

    def remove_metadata_name_from_request_table(self, request_table_entry: RequestTableEntry, name: Name) \
            -> RequestTableEntry:
        """remove metadata from chunktable"""
        if name not in request_table_entry.requested_md:
            return request_table_entry
        request_table_entry.requested_md.remove(name)
        return request_table_entry
Пример #14
0
class DataOffloadingChunklayerSimple(LayerProcess):
    """This Chunklayer handles interrupted data uploads by asking neighbouring nodes for available content."""
    def __init__(self,
                 cs: BaseContentStore,
                 pit: BasePendingInterestTable,
                 fib: BaseForwardingInformationBase,
                 chunkifyer: BaseChunkifyer = None,
                 chunk_size: int = 4096,
                 num_of_forwards: int = 1,
                 prefix: str = "car",
                 log_level: int = 255):
        super().__init__("ChunkLayer", log_level=log_level)
        self.cs = cs
        self.pit = pit
        self.fib = fib
        self.chunk_size = chunk_size
        if chunkifyer is None:
            self.chunkifyer = SimpleContentChunkifyer(chunk_size)
        else:
            self.chunkifyer: BaseChunkifyer = chunkifyer
        self.num_of_forwards = num_of_forwards
        self.prefix = prefix

        manager = multiprocessing.Manager()
        self._chunk_table: Dict[Name, (Content, float)] = manager.dict()
        self._request_table: List[RequestTableEntry] = manager.list()
        self._cl_table: Dict[Name, ClEntry] = manager.dict()

        self.pass_through = False
        self.cl_sent = False

    def data_from_higher(self, to_lower: multiprocessing.Queue,
                         to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from higher")
        faceid = data[0]
        packet = data[1]

        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest " + str(packet.name))
            request_entry = self.get_request_entry(packet.name)
            if request_entry is None:
                self._request_table.append(RequestTableEntry(packet.name))

            # If the interest starts with "/car" and not ends with "NFN", request metadata and available chunks from neighbours
            components = packet.name.string_components
            if self.prefix in components[
                    0] and components[-1] != "NFN" and not request_entry:
                self.pass_through = False
                self._cl_table[packet.name] = ClEntry(data)
                cl_entry = self._cl_table.get(packet.name)

                name1 = Name("/nL") + packet.name + f"CL{self.num_of_forwards}"
                name2 = Name("/nR") + packet.name + f"CL{self.num_of_forwards}"
                if not self.pit.find_pit_entry(
                        name1) and self.fib.find_fib_entry(name1):
                    cl_entry.nack_L = False
                    self.cl_sent = True
                    to_lower.put([faceid, Interest(name1)])
                if not self.pit.find_pit_entry(
                        name2) and self.fib.find_fib_entry(name2):
                    cl_entry.nack_R = False
                    self.cl_sent = True
                    to_lower.put([faceid, Interest(name2)])
                if self.cl_sent:
                    self._cl_table[packet.name] = cl_entry
                    return

            to_lower.put(data)
            return

        elif isinstance(packet, Content):
            self.logger.info("Packet is Content (name=%s, %d bytes)" %
                             (str(packet.name), len(packet.content)))
            if len(packet.content) < self.chunk_size:
                to_lower.put(data)
            else:
                self.logger.info("Chunking Packet")
                metadata, chunks = self.chunkifyer.chunk_data(
                    packet)  # Create metadata and chunks
                self.logger.info("Metadata: " + metadata[0].content)
                to_lower.put([faceid, metadata[0]
                              ])  # Return name of first metadata object
                for md in metadata:  # Add metadata to chunktable
                    if md.name not in self._chunk_table:
                        self._chunk_table[md.name] = (md, time.time())
                for c in chunks:  # Add chunks to chunktable
                    if c.name not in self._chunk_table:
                        self._chunk_table[c.name] = (c, time.time())

        elif isinstance(packet, Nack):
            request_entry = self.get_request_entry(packet.name)
            if request_entry is not None:
                self._request_table.remove(request_entry)
            to_lower.put(data)

    def data_from_lower(self, to_lower: multiprocessing.Queue,
                        to_higher: multiprocessing.Queue, data):
        self.logger.info("Got Data from lower")
        faceid = data[0]
        packet = data[1]
        components = packet.name.components
        string_components = packet.name.string_components
        last = components[-1]

        if isinstance(packet, Interest):
            self.logger.info("Packet is Interest")

            # General Interest passed on to chunklayer
            if "CL" in string_components[-1]:
                matching_content = self.get_matching_content(packet)
                if last == b"CL0" or isinstance(
                        matching_content,
                        Content):  # If there is matching content, return it
                    to_lower.put([faceid, matching_content])
                else:  # Otherwise try to pass on to neighbour
                    to_lower.put(
                        [faceid,
                         Interest(self.decrease_name(packet.name))])

            elif packet.name in self._chunk_table:
                matching_content = self._chunk_table.get(packet.name)[0]
                to_lower.put([faceid, matching_content])
            else:
                to_higher.put(data)

        elif isinstance(packet, Content):
            self.logger.info("Packet is Content")
            cl_content = False

            # Content from the chunklayer of a neighbouring node
            if "CL" in string_components[-1]:
                if string_components[
                        -1] == f"CL{self.num_of_forwards}":  # This is the requesting node --> unpack
                    cl_content = True
                    packet.name = self.unpack(packet.name)
                    # Save the sender of this packet as the recipient for further interests. Used in pack_cl()
                    request_entry = self.get_request_entry(packet.name)
                    if request_entry:
                        cl_entry = self._cl_table.get(request_entry.name)
                        if cl_entry.interest_requested:  # If we already resent requests to source, don't consider it
                            return
                        cl_entry.recipient = Name(components[:1])
                        self._cl_table[request_entry.name] = cl_entry

                else:  # This is not the requesting node --> pass on to neighbour
                    to_lower.put([
                        faceid,
                        Content(self.increase_name(packet.name),
                                packet.content)
                    ])
                    return

            request_entry = self.get_request_entry(packet.name)
            if request_entry is None:
                return
            self.handle_content(faceid, packet, request_entry, cl_content,
                                to_lower, to_higher)

        elif isinstance(packet, Nack):
            if self.prefix not in string_components[0]:
                request_entry = self.get_request_entry(packet.name)
                if request_entry:
                    self._request_table.remove(request_entry)

                if "CL" in string_components[-1]:
                    if string_components[
                            -1] == f"CL{self.num_of_forwards}":  # This is the requesting node --> unpack
                        name_unpacked = self.unpack(packet.name)
                        request_entry = self.get_request_entry(name_unpacked)
                        if request_entry:
                            cl_entry = self._cl_table.get(request_entry.name)
                            if components[0] == b"nL":
                                cl_entry.nack_L = True
                            else:
                                cl_entry.nack_R = True
                            if cl_entry.nack_L and cl_entry.nack_R and not cl_entry.interest_requested:
                                # No more data available from neighbours, get it from car
                                self.get_missing_data_from_original_source(
                                    faceid, request_entry, cl_entry, to_lower)

                            self._cl_table[request_entry.name] = cl_entry
                    else:
                        name = self.increase_name(packet.name)
                        nack = Nack(name, NackReason.NO_CONTENT,
                                    Interest(name))
                        to_lower.put([faceid, nack])
                else:
                    to_higher.put([faceid, packet])
                self.pit.remove_pit_entry(packet.name)
            else:
                if "c" in string_components[-1] or "m" in string_components[-1]:
                    packet.name.components = components[:-1]
                    to_higher.put([
                        faceid,
                        Nack(packet.name, NackReason.NO_CONTENT,
                             Interest(packet.name))
                    ])
                else:
                    pass
                    # to_higher.put([faceid, packet])

    def handle_content(self, faceid: int, packet: Content,
                       request_entry: RequestTableEntry, cl_content: bool,
                       to_lower: multiprocessing.Queue,
                       to_higher: multiprocessing.Queue):
        """Handle incoming content"""
        self._request_table.remove(request_entry)
        if request_entry.chunked is False:  # Not chunked content
            if not packet.get_bytes().startswith(b'mdo:'):
                to_higher.put([faceid, packet])
                return
            else:  # Received metadata data --> chunked content
                request_entry.chunked = True
        if packet.get_bytes().startswith(
                b'mdo:'):  # Request all frames from metadata
            self.handle_received_meta_data(faceid, packet, request_entry,
                                           to_lower, cl_content)
        else:
            self.handle_received_chunk_data(faceid, packet, request_entry,
                                            to_higher)

    def handle_received_meta_data(self, faceid: int, packet: Content,
                                  request_entry: RequestTableEntry,
                                  to_lower: multiprocessing.Queue,
                                  cl_content: bool):
        """Handle meta data"""
        if packet.name in request_entry.requested_md:
            request_entry.requested_md.remove(packet.name)
        md, chunks, size = self.chunkifyer.parse_meta_data(packet.content)
        for chunk in chunks:  # Request all chunks from the metadata file if not already received or requested
            # if chunk not in request_entry.requested_chunks and chunk not in [i.name for i in request_entry.chunks]:
            if chunk not in [i.name for i in request_entry.chunks]:
                request_entry.requested_chunks.append(chunk)
                if not self.pass_through:
                    if cl_content:
                        cl_entry = self._cl_table.get(request_entry.name)
                        if cl_entry.nack_L and cl_entry.nack_R:
                            break
                        chunk = self.pack_cl(chunk)
                    to_lower.put([faceid, Interest(chunk)])
        if md is not None:  # There is another md file
            if md not in request_entry.requested_md:
                request_entry.requested_md.append(md)
            if not self.pass_through:
                if cl_content:
                    cl_entry = self._cl_table.get(request_entry.name)
                    if not (cl_entry.nack_L and cl_entry.nack_R):
                        cl_entry.interest = [faceid, Interest(packet.name)]
                        self._cl_table[request_entry.name] = cl_entry
                        md = self.pack_cl(md)
                        to_lower.put([faceid, Interest(md)])
                else:
                    to_lower.put([faceid, Interest(md)])
        else:
            request_entry.last_chunk = chunks[-1]
        self._chunk_table[packet.name] = (packet, time.time())
        self._request_table.append(request_entry)

    def handle_received_chunk_data(self, faceid: int, packet: Content,
                                   request_entry: RequestTableEntry,
                                   to_higher: multiprocessing.Queue):
        """Handle chunk data"""
        if packet.name in request_entry.requested_chunks:
            request_entry.requested_chunks.remove(packet.name)
            request_entry.chunks.append(packet)
        self._chunk_table[packet.name] = (packet, time.time())
        if not request_entry.requested_chunks and not request_entry.requested_md:
            if not request_entry.requested_md:  # All chunks are available
                data = request_entry.chunks
                data = sorted(data,
                              key=lambda content: int(''.join(
                                  filter(str.isdigit, content.name.
                                         string_components[-1]))))
                cont = self.chunkifyer.reassamble_data(request_entry.name,
                                                       data)
                to_higher.put([faceid, cont])
                return

        self._request_table.append(request_entry)

    def get_request_entry(self, name: Name):
        """
        Check if a name is in the request table.
        Return entry or None.
        """
        for entry in self._request_table:
            if entry.name == name or name in entry.requested_chunks or name in entry.requested_md:
                return entry
        return None

    def get_matching_content(self, packet: Packet):
        """Return either the content matching the packet name or NACK"""
        name_in = self.unpack(packet.name)
        cs_entry = self.cs.find_content_object(name_in)
        if name_in in self._chunk_table:
            matching_content = self._chunk_table.get(name_in)[0]
            matching_content.name = packet.name
            return matching_content
        elif cs_entry:
            matching_content = cs_entry.content
            matching_content.name = packet.name
            return matching_content
        else:
            return Nack(packet.name, NackReason.NO_CONTENT, packet)

    def pack_cl(self, name: Name) -> Name:
        """Prepend the recipient, append "CL" and the number of forwards."""
        lookup_name = Name(name.components[:-1])
        return self._cl_table.get(
            lookup_name).recipient + name + f"CL{self.num_of_forwards}"

    def unpack(self, name: Name) -> Name:
        return Name(name.components[1:-1])

    def increase_name(self, name: Name) -> Name:
        """
        Increase the number at the end of the name.
        The number is used to determine whether or not a packet gets forwarded to the next neighbour.
        """
        components = name.components
        last = components[-1].decode("utf-8")
        i = int(''.join(filter(str.isdigit, last)))
        if "CL" in last:
            return Name(components[:-1]) + f"CL{i+1}"
        return name

    def decrease_name(self, name: Name) -> Name:
        """
        Decrease the number at the end of the name.
        The number is used to determine whether or not a packet gets forwarded to the next neighbour.
        """
        components = name.components
        last = components[-1].decode("utf-8")
        i = int(''.join(filter(str.isdigit, last)))
        if "CL" in last:
            return Name(components[:-1]) + f"CL{i-1}"
        return name

    def get_missing_data_from_original_source(self, faceid: int,
                                              request_entry: RequestTableEntry,
                                              cl_entry: ClEntry,
                                              to_lower: multiprocessing.Queue):
        """
        Start requesting the missing files from the original source.
        """
        if not cl_entry.interest_requested:
            if request_entry.requested_chunks:
                # Request again all chunks that have been requested but not satisfied yet
                for chunk in request_entry.requested_chunks:
                    to_lower.put([faceid, Interest(chunk)])

            # If requested_md is not empty, request them again from source
            if request_entry.requested_md:
                for md in request_entry.requested_md:
                    to_lower.put([faceid, Interest(md)])
            else:  # if empty, request orginal interest from source
                to_lower.put(cl_entry.interest)
            cl_entry.interest_requested = True

    def set_number_of_forwards(self, number_of_forwards: int):
        self.num_of_forwards = number_of_forwards
Пример #15
0
 def setUp(self):
     self.chunkifyer = SimpleContentChunkifyer()
Пример #16
0
    def __init__(self, foldername: Optional[str], prefix: Name,
                 port=9000, log_level=255, encoder: BasicEncoder = None,
                 autoconfig: bool = False, autoconfig_routed: bool = False, interfaces: List[BaseInterface] = None,
                 use_thunks=False):
        """
        :param foldername: If None, use an in-memory repository. Else, use a file system repository.
        """

        logger = Logger("ICNRepoSession", log_level)
        logger.info("Start PiCN Data Repository with Sessions")

        # packet encoder
        if encoder is None:
            self.encoder = SimpleStringEncoder(log_level=log_level)
        else:
            encoder.set_log_level(log_level)
            self.encoder = encoder

        # chunkifyer
        self.chunkifyer = SimpleContentChunkifyer()

        # repo
        manager = multiprocessing.Manager()

        if foldername is None:
            self.repo: BaseRepository = SimpleMemoryRepository(prefix, manager, logger)
        else:
            self.repo: BaseRepository = SimpleFileSystemRepository(foldername, prefix, manager, logger)

        # initialize layers
        synced_data_struct_factory = PiCNSyncDataStructFactory()
        synced_data_struct_factory.register("faceidtable", FaceIDDict)

        if use_thunks:
            synced_data_struct_factory.register("thunktable", ThunkList)
            synced_data_struct_factory.register("plantable", PlanTable)
        synced_data_struct_factory.create_manager()
        faceidtable = synced_data_struct_factory.manager.faceidtable()

        if use_thunks:
            self.parser = DefaultNFNParser()
            thunktable = synced_data_struct_factory.manager.thunktable()
            plantable = synced_data_struct_factory.manager.plantable(self.parser)

        if interfaces is not None:
            self.interfaces = interfaces
            mgmt_port = port
        else:
            interfaces = [UDP4Interface(port)]
            mgmt_port = interfaces[0].get_port()

        self.linklayer = BasicLinkLayer(interfaces, faceidtable, log_level=log_level)
        self.packetencodinglayer = BasicPacketEncodingLayer(self.encoder, log_level=log_level)
        self.chunklayer = BasicChunkLayer(self.chunkifyer, log_level=log_level)
        self.repolayer = SessionRepositoryLayer(self.repo, log_level=log_level)

        if use_thunks:
            self.thunklayer = BasicThunkLayer(None, None, None, faceidtable, thunktable, plantable, self.parser, self.repo, log_level=log_level)
            logger.info("Using Thunks")

        if use_thunks:
            self.lstack: LayerStack = LayerStack([
                self.repolayer,
                self.chunklayer,
                self.thunklayer,
                self.packetencodinglayer,
                self.linklayer
            ])
        else:
            self.lstack: LayerStack = LayerStack([
                self.repolayer,
                self.chunklayer,
                self.packetencodinglayer,
                self.linklayer
            ])

        if autoconfig:
            self.autoconfiglayer = AutoconfigRepoLayer(name=prefix.string_components[-1],
                                                       addr='127.0.0.1',
                                                       linklayer=self.linklayer, repo=self.repo,
                                                       register_global=autoconfig_routed, log_level=log_level)
            self.lstack.insert(self.autoconfiglayer, below_of=self.chunklayer)

        # mgmt
        self.mgmt = Mgmt(None, None, None, self.linklayer, mgmt_port,
                         self.start_repo, repo_path=foldername,
                         repo_prfx=prefix, log_level=log_level)
Пример #17
0
class DetectionMapVideoSimulation():
    """Run a simple data-offloading simulation"""
    def setUp(self):
        self.encoder_type = SimpleStringEncoder()
        self.simulation_bus = SimulationBus(packetencoder=self.encoder_type)
        chunk_size = 8192
        self.chunkifyer = SimpleContentChunkifyer(chunk_size)

        # initialize two cars
        self.cars = []
        self.fetch_tool_cars = []
        self.mgmt_client_cars = []
        for i in range(2):
            self.cars.append(
                ICNForwarder(
                    0,
                    encoder=self.encoder_type,
                    routing=True,
                    interfaces=[self.simulation_bus.add_interface(f"car{i}")]))
            self.fetch_tool_cars.append(
                Fetch(f"car{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[
                          self.simulation_bus.add_interface(f"ftcar{i}")
                      ]))
            self.mgmt_client_cars.append(
                MgmtClient(self.cars[i].mgmt.mgmt_sock.getsockname()[1]))
            self.cars[i].icnlayer.cs.set_cs_timeout(30)

        # initialize RSUs
        self.rsus = []
        self.fetch_tools = []
        self.mgmt_clients = []
        for i in range(3):
            self.rsus.append(
                NFNForwarderData(
                    0,
                    encoder=self.encoder_type,
                    interfaces=[self.simulation_bus.add_interface(f"rsu{i}")],
                    chunk_size=chunk_size,
                    num_of_forwards=1))
            self.fetch_tools.append(
                Fetch(f"rsu{i}",
                      None,
                      255,
                      self.encoder_type,
                      interfaces=[self.simulation_bus.add_interface(f"ft{i}")
                                  ]))
            self.rsus[i].nfnlayer.optimizer = EdgeComputingOptimizer(
                self.rsus[i].icnlayer.cs, self.rsus[i].icnlayer.fib,
                self.rsus[i].icnlayer.pit, self.rsus[i].linklayer.faceidtable)
            self.mgmt_clients.append(
                MgmtClient(self.rsus[i].mgmt.mgmt_sock.getsockname()[1]))
            self.fetch_tools[i].timeoutpreventionlayer.timeout_interval = 30

    def tearDown(self):
        for car in self.cars:
            car.stop_forwarder()

        for fetch_tool_car in self.fetch_tool_cars:
            fetch_tool_car.stop_fetch()

        for rsu in self.rsus:
            rsu.stop_forwarder()

        for fetch_tool in self.fetch_tools:
            fetch_tool.stop_fetch()

        self.simulation_bus.stop_process()

    def setup_faces_and_connections(self):
        for car in self.cars:
            car.start_forwarder()

        for rsu in self.rsus:
            rsu.start_forwarder()

        self.simulation_bus.start_process()

        function = "PYTHON\nf\ndef f(a, b, c):\n return detection_map(a, b, c)"

        # setup rsu0
        self.mgmt_clients[0].add_face("car0", None, 0)
        self.mgmt_clients[0].add_face("car1", None, 0)
        self.mgmt_clients[0].add_face("rsu1", None, 0)
        self.mgmt_clients[0].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[0].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[0].add_forwarding_rule(Name("/nR"), [2])
        self.mgmt_clients[0].add_new_content(Name("/rsu/func/f1"), function)

        # setup rsu1
        self.mgmt_clients[1].add_face("car0", None, 0)
        self.mgmt_clients[1].add_face("car1", None, 0)
        self.mgmt_clients[1].add_face("rsu0", None, 0)
        self.mgmt_clients[1].add_face("rsu2", None, 0)
        self.mgmt_clients[1].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[1].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nL"), [2])
        self.mgmt_clients[1].add_forwarding_rule(Name("/nR"), [3])
        self.mgmt_clients[1].add_new_content(Name("/rsu/func/f1"), function)

        # setup rsu2
        self.mgmt_clients[2].add_face("car0", None, 0)
        self.mgmt_clients[2].add_face("car1", None, 0)
        self.mgmt_clients[2].add_face("rsu1", None, 0)
        self.mgmt_clients[2].add_forwarding_rule(Name("/car0"), [0])
        self.mgmt_clients[2].add_forwarding_rule(Name("/car1"), [1])
        self.mgmt_clients[2].add_forwarding_rule(Name("/nL"), [2])

        # setup car0
        self.mgmt_client_cars[0].add_face("rsu0", None, 0)
        self.mgmt_client_cars[0].add_face("rsu1", None, 0)
        self.mgmt_client_cars[0].add_face("rsu2", None, 0)
        self.mgmt_client_cars[0].add_forwarding_rule(Name("/rsu"), [0])

        # extract frames from video
        fps, frames = Helper.video_to_frames(
            os.path.join(ROOT_DIR, "Demos/DetectionMap/Assets/108.mp4"))
        fps = int(np.round(fps))

        # read json file containing gps coords and bearing
        # file contains one entry per second
        with open(
                os.path.join(ROOT_DIR,
                             "Demos/DetectionMap/Assets/locations.json")) as f:
            data = json.load(f)
            locations = data.get("locations")

        # duplicate last entry, needed for the interpolation in the next step
        locations.append(locations[-1])

        # interpolate between the values to create one entry per frame
        lats = []
        longs = []
        bearings = []
        for i in range(len(locations) - 1):
            lat_current = locations[i].get("latitude")
            lat_next = locations[i + 1].get("latitude")

            long_current = locations[i].get("longitude")
            long_next = locations[i + 1].get("longitude")

            bearing_current = locations[i].get("course")
            bearing_next = locations[i + 1].get("course")

            if np.abs(lat_next - lat_current) > 0:
                lats.append(
                    np.arange(lat_current, lat_next,
                              (lat_next - lat_current) / fps))
            else:
                lats.append([lat_current] * fps)
            if np.abs(long_next - long_current) > 0:
                longs.append(
                    np.arange(long_current, long_next,
                              (long_next - long_current) / fps))
            else:
                longs.append([long_current] * fps)
            if np.abs(bearing_next - bearing_current) > 0:
                bearings.append(
                    np.arange(bearing_current, bearing_next,
                              (bearing_next - bearing_current) / fps))
            else:
                bearings.append([bearing_current] * fps)

        # flatten all the lists
        lats = [item for sublist in lats for item in sublist]
        longs = [item for sublist in longs for item in sublist]
        bearings = [item for sublist in bearings for item in sublist]

        self.mdos = []
        for i, frame in enumerate(frames):
            encoded = cv2.imencode('.jpg', frame)[1].tostring()
            base64_image = base64.b64encode(encoded)
            map_det_obj0 = DetectionMapObject(base64_image, 0.8, lats[i],
                                              longs[i], bearings[i])
            self.mdos.append(map_det_obj0)

    def test_detection_map_video(self):
        self.setup_faces_and_connections()
        n = 0
        for i, mdo in enumerate(self.mdos[n:60]):
            name = Name("/rsu/func/f1")
            name += f"_(/car0/image{i+n},1,{i+n})"
            name += "NFN"

            image = Content(Name(f"/car0/image{i+n}"), mdo.to_string())
            self.meta_data, self.data = self.chunkifyer.chunk_data(image)
            for md in self.meta_data:
                self.cars[0].icnlayer.cs.add_content_object(md)
            for d in self.data:
                self.cars[0].icnlayer.cs.add_content_object(d)

            print("\t" * 5)
            print("RSU 0 FETCHING")
            result = self.fetch_tools[0].fetch_data(name, 360)
            print(result)
            sleep(1)

        # create video file from the resulting plots and detections
        print("Creating video files...")
        path_in = os.path.join(ROOT_DIR, "Demos/DetectionMap/Assets/Plots/")
        path_out = os.path.join(
            ROOT_DIR, "Demos/DetectionMap/Assets/Videos/plots_video.mp4")
        Helper.frames_to_video(path_in, path_out, 30)

        path_in = os.path.join(ROOT_DIR,
                               "Demos/DetectionMap/Assets/Classified/")
        path_out = os.path.join(
            ROOT_DIR, "Demos/DetectionMap/Assets/Videos/detections_video.mp4")
        Helper.frames_to_video(path_in, path_out, 30)