def build_validate_queue(self, path=None, hash_dict=None): """ Build the validate queue from a hash dictionary. Loop through a hash dictionary to create one entry for each file that needs to be validated. If the dictionary entry is a file, generate extra information and store it in the queue entry. If the dictionary entry is a directory, pass it back into this function to process its entries. :param dict hash_dict: The dictionary to parse :param list path: The path of the files in this dictionary """ if not path: path = [] if not hash_dict: hash_dict = self.hash_dict["files"] for key in hash_dict: if "hash" in hash_dict[key]: # If a hash is present, fill out the rest of the file info hash_dict[key]["attempted"] = 0 hash_dict[key]["name"] = key hash_dict[key]["path"] = str.join("/", path) hash_dict[key]["url"] = self.__make_url( self.hash_dict["host"], self.hash_dict["version"], hash_dict[key]["path"], hash_dict[key]["name"] + ".gz", ) self.validate_queue.put(hash_dict[key]) else: # If there is no hash, this is a directory which needs to be processed dir_path = path.copy() dir_path.append(key) self.build_validate_queue(dir_path, hash_dict[key]) return
def iter(elt, path=False, enter=None, exit=None): if path is not False: if not isinstance(path, list): # e.g. path = True path = [] args = [elt] if path is not False: args.append(path) if enter is not None: enter(*args) if path is False: yield elt else: yield elt, path if isinstance(elt, dict): elt = elt.items() if hasattr(elt, "__iter__") and not isinstance(elt, types.String): for i, child in enumerate(elt): if path is False: child_path = False else: child_path = path.copy() + [(elt, i)] for subelt in iter(child, path=child_path, enter=enter, exit=exit): yield subelt if exit is not None: exit(*args)
def next_node(graph, node, small_cave, all_paths, path=None): if path is None: path = [] path.append(node) for n in graph[node]: if n == 'end': path.append(n) all_paths.append(path) elif not (is_small(n) and n in path) or n == small_cave and path.count(n) < 2: next_node(graph, n, small_cave, all_paths, path.copy())
def layerPath(self, root, path = []): ''' index layer base recursively by path array ''' base = root # base dir try: hierarchy = path.copy() except AttributeError: hierarchy = path hierarchy.reverse() while base is not None and (base.__class__ == list or self.layer_is_group(base)) and len(hierarchy) != 0: base = self.layer_index(base, hierarchy.pop()) return base
def analyseResult(resultPath,root,results,embedded,benchmark,trace,formatter): calibration = 0 if trace: # First cycle in the trace is the calibration data # The noramlisation factor must be coherent with the C code one. calibration = int(getCyclesFromTrace(trace) / 20) formatter.start() path = [] state = NORMAL prefix="" elem=None theId=None theError=None theLine=None passed=0 cycles=None benchFile = None config="" if embedded: prefix = ".*S:[ ]" # Parse the result file. # NORMAL mode is when we are parsing suite or group. # Otherwise we are parsing a test and we need to analyse the # test result. # TESTPARAM is used to read parameters of the test. # Format of output is: #node ident : s id or g id or t or u #test status : id error linenb status Y or N (Y when passing) #param for this test b x,x,x,x or b alone if not param #node end : p # In FPGA mode: #Prefix S:[ ] before driver dump # D:[ ] before data dump (output patterns) for l in results: l = l.strip() if not re.match(r'^.*D:[ ].*$',l): if state == NORMAL: if len(l) > 0: # Line starting with g or s is a suite or group. # In FPGA mode, those line are prefixed with 'S: ' # and data file with 'D: ' if re.match(r'^%s[gs][ ]+[0-9]+.*$' % prefix,l): # Extract the test id theId=re.sub(r'^%s[gs][ ]+([0-9]+).*$' % prefix,r'\1',l) theId=int(theId) path.append(theId) # From a list of id, find the TreeElem in the Parsed tree # to know what is the node. elem = findItem(root,path) # Display formatted output for this node if elem.params: #print(elem.params.full) benchPath = os.path.join(benchmark,elem.fullPath(),"fullBenchmark.csv") createMissingDir(benchPath) if benchFile: printf("ERROR BENCH FILE %s ALREADY OPEN" % benchPath) benchFile.close() benchFile=None benchFile=open(benchPath,"w") header = "".join(list(joinit(elem.params.full,","))) # A test and a benchmark are different # so we don't dump a status and error # A status and error in a benchmark would # impact the cycles since the test # would be taken into account in the measurement # So benchmark are always passing and contain no test #benchFile.write("ID,%s,PASSED,ERROR,CYCLES\n" % header) csvheaders = "" with open(os.path.join(resultPath,'currentConfig.csv'), 'r') as f: reader = csv.reader(f) csvheaders = next(reader, None) configList = list(reader) #print(configList) config = "".join(list(joinit(configList[0],","))) configHeaders = "".join(list(joinit(csvheaders,","))) benchFile.write("CATEGORY,NAME,ID,OLDID,%s,CYCLES,%s\n" % (header,configHeaders)) formatter.printGroup(elem,theId) # If we have detected a test, we switch to test mode if re.match(r'^%s[t][ ]*$' % prefix,l): state = INTEST # Pop # End of suite or group if re.match(r'^%sp.*$' % prefix,l): if benchFile: benchFile.close() benchFile=None path.pop() formatter.pop() elif state == INTEST: if len(l) > 0: # In test mode, we are looking for test status. # A line starting with S # (There may be empty lines or line for data files) passRe = r'^%s([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)[ ]+([t0-9]+)[ ]+([YN]).*$' % prefix if re.match(passRe,l): # If we have found a test status then we will start again # in normal mode after this. m = re.match(passRe,l) # Extract test ID, test error code, line number and status theId=m.group(1) theId=int(theId) theError=m.group(2) theError=int(theError) theLine=m.group(3) theLine=int(theLine) maybeCycles = m.group(4) if maybeCycles == "t": cycles = getCyclesFromTrace(trace) - calibration else: cycles = int(maybeCycles) status=m.group(5) passed=0 # Convert status to number as used by formatter. if status=="Y": passed = 1 if status=="N": passed = 0 # Compute path to this node newPath=path.copy() newPath.append(theId) # Find the node in the Tree elem = findItem(root,newPath) state = TESTPARAM else: if re.match(r'^%sp.*$' % prefix,l): if benchFile: benchFile.close() benchFile=None path.pop() formatter.pop() if re.match(r'^%s[t][ ]*$' % prefix,l): state = INTEST else: state = NORMAL else: if len(l) > 0: state = INTEST params="" if re.match(r'^.*b[ ]+([0-9,]+)$',l): m=re.match(r'^.*b[ ]+([0-9,]+)$',l) params=m.group(1).strip() # Format the node #print(elem.fullPath()) #createMissingDir(destPath) writeBenchmark(elem,benchFile,theId,theError,passed,cycles,params,config) else: params="" writeBenchmark(elem,benchFile,theId,theError,passed,cycles,params,config) # Format the node formatter.printTest(elem,theId,theError,theLine,passed,cycles,params) formatter.end()
proccess = True for file in list_of_files: if proccess: current_file = file file_category = path.category_selector( path.extension_finder(current_file)) if file_category in locations: if locations[file_category].get( path.get_prefix(current_file)) != None: destination_root = locations[file_category].get( path.get_prefix(current_file)) # Check if there is a whitespace in the path, cause it cause infinite loop. if not (" " in current_file): new_file_destination = path.copy( current_file, destination_root) else: continue if os.path.exists(new_file_destination): os.remove(current_file) # Check if the file is moved and the proccess is done, otherwise wait until it is done. if not os.path.exists(current_file) and os.path.exists( new_file_destination): proccess = True else: proccess = False while not proccess: if not os.path.exists( current_file) and os.path.exists( new_file_destination):
def analyseResult(root, results, embedded, benchmark): path = [] state = NORMAL prefix = "" elem = None theId = None theError = None theLine = None passed = 0 cycles = None benchFile = None if embedded: prefix = ".*S:[ ]" # Parse the result file. # NORMAL mode is when we are parsing suite or group. # Otherwise we are parsing a test and we need to analyse the # test result. # TESTPARAM is used to read parameters of the test. # Format of output is: #node ident : s id or g id or t or u #test status : id error linenb status Y or N (Y when passing) #param for this test b x,x,x,x or b alone if not param #node end : p # In FPGA mode: #Prefix S:[ ] before driver dump # D:[ ] before data dump (output patterns) for l in results: l = l.strip() if not re.match(r'^.*D:[ ].*$', l): if state == NORMAL: if len(l) > 0: # Line starting with g or s is a suite or group. # In FPGA mode, those line are prefixed with 'S: ' # and data file with 'D: ' if re.match(r'^%s[gs][ ]+[0-9]+.*$' % prefix, l): # Extract the test id theId = re.sub(r'^%s[gs][ ]+([0-9]+).*$' % prefix, r'\1', l) theId = int(theId) path.append(theId) # From a list of id, find the TreeElem in the Parsed tree # to know what is the node. elem = findItem(root, path) # Display formatted output for this node if elem.params: #print(elem.params.full) benchPath = os.path.join(benchmark, elem.fullPath(), "fullBenchmark.csv") summaryBenchmark(elem, benchPath) # If we have detected a test, we switch to test mode if re.match(r'^%s[t][ ]*$' % prefix, l): state = INTEST # Pop # End of suite or group if re.match(r'^%sp.*$' % prefix, l): path.pop() elif state == INTEST: if len(l) > 0: # In test mode, we are looking for test status. # A line starting with S # (There may be empty lines or line for data files) passRe = r'^%s([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)[ ]+([YN]).*$' % prefix if re.match(passRe, l): # If we have found a test status then we will start again # in normal mode after this. m = re.match(passRe, l) # Extract test ID, test error code, line number and status theId = m.group(1) theId = int(theId) status = m.group(5) passed = 0 # Convert status to number as used by formatter. if status == "Y": passed = 1 if status == "N": passed = 0 # Compute path to this node newPath = path.copy() newPath.append(theId) # Find the node in the Tree elem = findItem(root, newPath) state = TESTPARAM else: if re.match(r'^%sp.*$' % prefix, l): path.pop() if re.match(r'^%s[t][ ]*$' % prefix, l): state = INTEST else: state = NORMAL else: if len(l) > 0: state = INTEST params = ""
def _visit_for_route_calculation( self, node: Extension, path: list ) -> IntermediateRoutingResult: local_path = path.copy() local_path.append(node.id) # first we check if this node has an immediate forward. If yes, we defer routing there. if node.immediate_forward: return self._visit(node.forwarding_extension, local_path) if YateRoutingGenerationVisitor.node_has_simple_routing(node): return self._make_intermediate_result( target=self.generate_simple_routing_target(node) ) else: # this will require a fork # go through the callgroup ranks to issue the groups of the fork fork_targets = [] accumulated_delay = 0 for rank in node.fork_ranks: if fork_targets: # this is not the first rank, so we need to generate a separator if rank.mode == ForkRank.Mode.DROP: separator = "|drop={}".format(rank.delay) accumulated_delay += rank.delay elif rank.mode == ForkRank.Mode.NEXT: separator = "|next={}".format(rank.delay) accumulated_delay += rank.delay else: separator = "|" # If we see an untimed separator, any time-based forward is not possible anymore if node.forwarding_mode == Extension.ForwardingMode.ENABLED: node.routing_log( "Non time-based fork rank is incompatible with time-based forward. " "Disabling the forward.", "WARN", rank, ) node.forwarding_mode = Extension.ForwardingMode.DISABLED if ( node.forwarding_mode == Extension.ForwardingMode.ENABLED and accumulated_delay >= node.forwarding_delay ): # all of those will not be called, as the forward takes effect now node.routing_log( "Fork rank (and following) are ignored due to time-based forward.", "WARN", rank, ) break # Do not generate default params on pseudo targets fork_targets.append(CallTarget(separator)) for member in rank.members: # do not route inactive members if not member.active: continue member_route = self._visit(member.extension, local_path) if not member_route.is_valid: rank.routing_log( "Extension has no valid (non-empty) routing and is thus ignored.", "WARN", member.extension, ) continue if member.type.is_special_calltype: member_route.target.parameters[ "fork.calltype" ] = member.type.fork_calltype # please note that we ignore the member modes for the time being fork_targets.append(member_route.target) self._cache_intermediate_result(member_route) if fork_targets and fork_targets[-1].target == "|": # We just created an empty default rank. This will cause the call to hang del fork_targets[-1] rank.routing_log( "This created an empty default rank. It will be removed to prevent call hang.", "WARN", ) # if this is a MULTIRING or (SIMPLE with forward), the extension itself needs to be part of the first group if node.type in (Extension.Type.MULTIRING, Extension.Type.SIMPLE): # in difference to groups, the first ForkRank can have type NEXT or DROP and we should respect it if len(node.fork_ranks) > 0: first_fork_rank = node.fork_ranks[0] if first_fork_rank.mode == ForkRank.Mode.NEXT: fork_targets.insert( 0, CallTarget("|next={}".format(first_fork_rank.delay)) ) elif first_fork_rank.mode == ForkRank.Mode.DROP: fork_targets.insert( 0, CallTarget("|drop={}".format(first_fork_rank.delay)) ) # If the fork rank is default, we assume that multiring should start with the main extension # and do nothing here fork_targets.insert(0, self.generate_simple_routing_target(node)) # Handle forwards if node.forwarding_mode == Extension.ForwardingMode.ON_BUSY: # There should be no call waiting on all previous call legs. for target in fork_targets: if not target.is_separator: target.parameters["osip_X-No-Call-Wait"] = "1" if node.forwarding_mode in ( Extension.ForwardingMode.ENABLED, Extension.ForwardingMode.ON_BUSY, Extension.ForwardingMode.ON_UNAVAILABLE, ): # this is non-immediate forward forwarding_route = self._visit(node.forwarding_extension, local_path) if node.forwarding_mode == Extension.ForwardingMode.ENABLED: fwd_delay = node.forwarding_delay - accumulated_delay fork_targets.append(CallTarget("|drop={}".format(fwd_delay))) else: # Add a default rank, call will progress to next rang when all previous calls failed fork_targets.append(CallTarget("|")) fork_targets.append(forwarding_route.target) self._cache_intermediate_result(forwarding_route) return self._make_intermediate_result( fork_targets=fork_targets, target=self._make_calltarget( self.generate_deferred_routestring(local_path) ), )