def generate_data(self, network_name: Name, chunk_size: int = 4096): """ Generates manifest and chunks for a file in the repo :param network_name: Network name of high-level object :param chunk_size: chunk size :return: True if successful, False otherwise """ try: fs_name = self._files_in_repo[network_name.to_string()] except: return False with open(fs_name, "r+") as f: # open file and determine number of chunks file = mmap.mmap(f.fileno(), 0) file_length = len(file) num_chunks = math.ceil(file_length / chunk_size) # generate data packets (manifest and chunk) chunk_names = list() for n in range(0, num_chunks): # extract chunk and compute digest chunk = file[chunk_size * n:min(chunk_size * (n + 1), file_length)] m = hashlib.sha256() m.update(chunk) digest = m.hexdigest() chunk_network_name = Name(network_name.to_string() + '/chunk/' + digest) # add to cache and chunk list chunk_names.append(chunk_network_name.to_string()) self.add_to_cache(Content(chunk_network_name, chunk)) # generate manifest manifest_data = "\n".join(chunk_names) manifest = Content(network_name, manifest_data) self.add_to_cache(manifest) return True
def set_content(self, icnname: Name, chunk: bytes): try: os.stat(self._foldername) except: os.mkdir(self._foldername) with open(self._foldername + icnname.to_string(), 'w+') as f: f.write(chunk)
def test_multiple_calls_params(self): """Test, if ToDataFirstOptimizer works correctly with multiple function calls with parameter""" cmp_name1 = Name("/test/data") cmp_name1 = cmp_name1 + "/func/f1(_,/lib/f2(2,/data/test))" cmp_name1 = cmp_name1 + "NFN" cmp_name2 = Name("/lib/f2") cmp_name2 = cmp_name2 + "/func/f1(/test/data,_(2,/data/test))" cmp_name2 = cmp_name2 + "NFN" workflow = "/func/f1(/test/data,/lib/f2(2,/data/test))" fib = self.optimizer.fib fib.add_fib_entry(Name("/lib"), [1], False) fib.add_fib_entry(Name("/test"), [2], False) self.optimizer.fib = fib ast = self.parser.parse(workflow) self.assertTrue(self.optimizer.compute_fwd(None, ast, Interest(cmp_name1))) self.assertFalse(self.optimizer.compute_local(None, ast, Interest(cmp_name1))) rules = self.optimizer.rewrite(None, ast) self.assertEqual(rules, ['/func/f1(%/test/data%,/lib/f2(2,/data/test))', '/func/f1(/test/data,%/lib/f2%(2,/data/test))', 'local']) name1 = self.parser.nfn_str_to_network_name(rules[0]) self.assertEqual(name1.to_string(), cmp_name1.to_string()) name_str1, prepended1 = self.parser.network_name_to_nfn_str(name1) self.assertEqual(name_str1, workflow) self.assertEqual(prepended1, Name("/test/data")) name2 = self.parser.nfn_str_to_network_name(rules[1]) self.assertEqual(name2, cmp_name2) name_str2, prepended2 = self.parser.network_name_to_nfn_str(name2) self.assertEqual(name_str2, workflow) self.assertEqual(prepended2, Name("/lib/f2"))
def executePinnedFunction(self, function, params, interest_name: Name): result = function(params) new_components = interest_name.to_string().split("/")[1:-1] new_components.append("resultpNFN") new_name = "/" + '/'.join(new_components) content_object = Content(new_name, str(result)) self.queue_to_lower.put([-1, content_object])
def generate_meta_data(self, startindex: int, endindex: int, md_num: int, next: int, name: Name) -> Content: """Generate the meta data""" metadata = "mdo:" for i in range(startindex, endindex): metadata = metadata + name.to_string() + "/c" + str(i) if i != endindex - 1: metadata = metadata + ";" metadata = metadata + ":" if next > 0: metadata = metadata + name.to_string() + "/m" + str(next) md_name = name.to_string() if md_num > 0: md_name = md_name + "/m" + str(md_num) md_name_obj = Name(md_name) metadata_obj = Content(md_name_obj, metadata.encode('ascii')) return metadata_obj
def add_new_content(self, name: Name, data) -> str: """adding new content to the content store of a relay :param name: name of the content :param data: data of the content :return: reply message of the relay """ param = name.to_string() + ":" + data return self.layercommand("icnlayer", "newcontent", param.replace("/", "%2F"))
def add_forwarding_rule(self, name: Name, faceid: int) -> str: """adding a new forwarding rule to a face :param name: name for the forwarding rule which should be bound to the face :param faceid: face id to identify the face on which the name should be bound :return: reply message of the relay """ param = name.to_string() + ":" + str(faceid) return self.layercommand("icnlayer", "newforwardingrule", param.replace("/", "%2F"))
def executePinnedFunction(self, packet_id, function, params, interest_name: Name): # start heartbeat self.logger.info("Start heartbeat for: " + interest_name.to_string()) heartbeat_interval = 2 stop_heartbeat_event = threading.Event() arguments = [ packet_id, interest_name, heartbeat_interval, stop_heartbeat_event ] t = threading.Thread(target=self.heartbeat, args=arguments) t.setDaemon(True) t.start() # start computation self.logger.info("Start computation for: " + interest_name.to_string()) result = function(params) content_object = Content(interest_name, str(result)) # return result and stop heartbeat self.queue_to_lower.put([packet_id, content_object]) stop_heartbeat_event.set() self.logger.info("Return result for: " + interest_name.to_string())
def network_name_to_nfn_str(self, name: Name) -> (str, Name): if len(name.components) == 2: return name.string_components[0], None if name.string_components[-1] != "NFN": return name, None prepended_name = Name() prepended_name.string_components = name.string_components[:-2] nfn_comp = name.string_components[-2].replace( "_", prepended_name.to_string()) nfn_comp = nfn_comp.replace("\\", "/") return nfn_comp, prepended_name
def remove_content_object(self, name: Name): if name.to_string() in self._container.keys(): del self._container[name.to_string()]
def find_content_object(self, name: Name) -> ContentStoreEntry: if name.to_string() in self._container.keys(): return self._container[name.to_string()] else: return None