def __generate_process(self, func_obj, identifier): """ Generate a separate process with a function call. :param func_obj: Function object. :param identifier: Identifier of the function. :return: a new Process object. """ child_proc = Process(f"{func_obj.name}_{identifier}", "manual") child_proc.comment = "Call function {!r}.".format(func_obj.name) child_proc.self_parallelism = False # Make register action reg_name = self.__reg_name(identifier) # Make deregister action dereg_name = self.__dereg_name(identifier) # Make actions string process = f"(!{str(reg_name)}).<call>.({str(dereg_name)})" # Populate actions parse_process(child_proc, process) child_proc.actions.populate_with_empty_descriptions() # Set up Call action call = child_proc.actions['call'] call.statements = [self.__generate_call(child_proc, func_obj.name, func_obj, identifier)] call.comment = f"Call the function {func_obj.name}." return child_proc
def __generate_separate_processes(self, functions_collection): """ Generate the main process and child processes. The main process registers child processes and each of which calls a separate function. This would allow to spawn a thread per process. :param functions_collection: Dictionary: function name -> a list of Function objects. :return: name -> child Process, main Process object. """ processes = dict() # Make a process main_process = Process("main") main_process.comment = "Main entry point." main_process.self_parallelism = False # Split and get identifiers and processes reg_list = [] dereg_list = [] for identifier, pair in enumerate(((func, obj) for func in functions_collection for obj in functions_collection[func])): func, obj = pair self.logger.info("Call function {!r} from {!r}".format(func, obj.definition_file)) decl = obj.declaration.to_string(func, typedef='none', scope={obj.definition_file}) self.logger.debug(f"Function has the signature: '{decl}'") child_process = self.__generate_process(obj, identifier) processes[str(child_process)] = child_process reg_name = self.__reg_name(identifier) reg_list.append(f"[{reg_name}]") dereg_name = self.__dereg_name(identifier) dereg_list.insert(0, f"[{dereg_name}]") else: if len(reg_list) == 0: raise RuntimeError("There is no any functions to call") process = ".".join(reg_list + dereg_list) self.logger.debug(f"Going to parse main process: '{process}'") parse_process(main_process, process) main_process.actions.populate_with_empty_descriptions() # Now establish peers for child in processes.values(): child.establish_peers(main_process) return processes, main_process
def test_method(*args, **kwargs): for test in original_method(*args, **kwargs): process = Process('test') obj = parse_process(process, test) assert obj desc = CollectionEncoder._export_process(process) assert desc
def process(): process = Process('test') test = "(((a).<b> | [c]) . [d]) | [e]" # Parse assert parse_process(process, test) process.actions['a'] = Receive('a') process.actions['b'] = Block('b') for name in 'cde': process.actions[name] = Dispatch(name) return process
def test_detailed_parsing(): process = Process('test') test = "(((a).<b> | [c]) . {d}) | [e]" subp = "([f].<g>) | {d}" # Parse assert parse_process(process, test) next_action = parse_process(process, subp) assert next_action process.actions['d'] = Subprocess('d') process.actions['d'].action = next_action check_parsed_object(process.actions) # Then export, import and repeat checks desc = CollectionEncoder()._serialize_process(process) process = Process('test') assert parse_process(process, desc['process']) next_action = parse_process(process, desc['actions']['d']['process']) assert next_action process.actions['d'] = Subprocess('d') process.actions['d'].action = next_action check_parsed_object(process.actions)
def test_first_actions(): p1 = Process('x') parse_process(p1, '<a> | <b>') p1.actions.populate_with_empty_descriptions() assert p1.actions.first_actions() == {'a', 'b'} p1 = Process('x') parse_process(p1, '<a>.<b>') p1.actions.populate_with_empty_descriptions() assert p1.actions.first_actions() == {'a'} p1 = Process('x') parse_process(p1, '<a>.<b> | <c>') p1.actions.populate_with_empty_descriptions() assert p1.actions.first_actions() == {'a', 'c'} p1 = Process('x') parse_process(p1, '<a> | {b}') t = parse_process(p1, '<c>') p1.actions.populate_with_empty_descriptions() p1.actions['b'].action = t assert p1.actions.first_actions() == {'a', 'c'} assert p1.actions.first_actions(t) == {'c'}
def _prepare_empty_process(process): test = "(((a).<b> | [c]) . [d]) | [e]" assert parse_process(process, test)
def _import_process(self, source, name, category, dic): process = self.PROCESS_CONSTRUCTOR(name, category) for label_name in dic.get('labels', {}): label = self._import_label(label_name, dic['labels'][label_name]) process.labels[label_name] = label # Import process if 'process' in dic: parse_process(process, dic['process']) else: raise KeyError( "Each process must have 'process' attribute, but {!r} misses it" .format(name)) # Then import subprocesses next_actions = sortedcontainers.SortedDict() for name, desc in dic.get('actions', {}).items(): subp = desc.get('process') if subp: next_action = parse_process(process, subp) next_actions[name] = next_action # Connect actions for action in process.actions.filter(include={Subprocess}): action.action = next_actions[action.reference_name] # Import comments if 'comment' in dic: process.comment = dic['comment'] else: raise KeyError( "You must specify manually 'comment' attribute within the description of {!r} kernel " "function model process".format(name)) # Import actiones for some_name, description in dic.get('actions', {}).items(): names = some_name.split(", ") for act_name in names: if not process.actions.get(act_name): if description.get('process'): for act in (a for a in process.actions.filter( include={Subprocess}) if a.reference_name == act_name): self._import_action(process, act, dict(description)) else: raise ValueError( 'Action {!r} was not used in {!r} process'.format( act_name, str(process))) else: self._import_action(process, process.actions[act_name], description) for att in self.PROCESS_ATTRIBUTES: if att in dic: if self.PROCESS_ATTRIBUTES[att]: attname = self.PROCESS_ATTRIBUTES[att] else: attname = att setattr(process, attname, dic[att]) # Fix paths in manual specification for att in ('definitions', 'declarations'): # Avoid iterating over the dictionary that can change its content if att in dic: dic_copy = dict(dic[att]) for def_file in dic[att]: dic_copy[source.find_file(def_file)] = dic_copy.pop( def_file) # Update object to be sure that changes are saved there setattr(process, att, dic_copy) unused_labels = {str(l) for l in process.unused_labels} if unused_labels: raise RuntimeError( "Found unused labels in process {!r}: {}".format( str(process), ', '.join(unused_labels))) if process.file != 'entry point': process.file = source.find_file(process.file) if not process.actions.initial_action: raise RuntimeError('Process {!r} has no initial action'.format( str(process))) process.accesses() return process
def __generate_calls_together(self, functions_collection): """ Generate a single process with a large switch for all given functions. :param functions_collection: dictionary from functions to lists of Function objects. :return: Main process """ def indented_line(t, s): return (t * "\t") + s loop = self.conf.get("infinite calls sequence") # Generate process ep = Process("main") ep.comment = "Call exported functions." ep.pretty_id = 'generic' ep.process = '' # Generate actions for all sequence expressions = [] identifier = 0 for func in functions_collection: for obj in functions_collection[func]: self.logger.info("Call function {!r} from {!r}".format(func, obj.definition_file)) expr = self.__generate_call(ep, func, obj, identifier) expressions.append(expr) # Generate process description code = [] tab = 0 if loop: code.append(indented_line(tab, "while (1) {")) tab += 1 code.append(indented_line(tab, "switch (ldv_undef_int()) {")) tab += 1 cnt = 0 for expr in expressions: # Add a break after a function call code.append(indented_line(tab, "case {}: ".format(cnt) + '{')) code.append(indented_line(tab + 1, "{}".format(expr))) code.append(indented_line(tab + 1, "break;")) code.append(indented_line(tab, "}")) cnt += 1 if loop: code.append(indented_line(tab, "default: break;")) else: code.append(indented_line(tab, "default: ldv_assume(0);")) tab -= 1 code.append(indented_line(tab, "}")) if loop: code.append("}") tab -= 1 ep.actions.add_condition('function_calls', [], code, 'Call all functions independently.') ep.process = "<function_calls>" parse_process(ep, ep.process) ep.actions.populate_with_empty_descriptions() return ep
def __generate_insmod_process(self, source, inits, exits, kernel_initializations): self.logger.info( "Generate artificial process description to call Init and Exit module functions 'insmod'" ) ep = Process("insmod") ep.comment = "Initialize or exit module." ep.self_parallelism = False # Add subprocesses finally process = '' for i, pair in enumerate(inits): process += "<{0}>.(<init_failed_{1}>".format(pair[1], i) for j, pair2 in enumerate(exits[::-1]): if pair2[0] == pair[0]: break j = 1 for _, exit_name in exits[:j - 1:-1]: process += ".<{}>".format(exit_name) process += "|<init_success_{}>.".format(i) for _, exit_name in exits: process += "<{}>.".format(exit_name) # Remove the last dot process = process[:-1] process += ")" * len(inits) if kernel_initializations and inits: process += "<kernel_initialization>." \ "(<kerninit_success> | <kerninit_failed>.(" + process + "))" elif kernel_initializations and not inits: process += "<kernel_initialization>.(<kernel_initialization_success> | <kernel_initialization_fail>)" elif not inits and not kernel_initializations: raise NotImplementedError( "There is no both kernel initialization functions and module initialization " "functions") # This populates all actions parse_process(ep, process) ep.actions.populate_with_empty_descriptions() if len(kernel_initializations) > 0: body = ["int ret;"] label_name = 'emg_kernel_initialization_exit' # Generate kernel initializations for name, calls in kernel_initializations: for filename, func_name in calls: func = source.get_source_function(func_name, filename) if func: retval = not func.declaration.return_value == 'void' else: raise RuntimeError( "Cannot resolve function {!r} in file {!r}".format( name, filename)) new_name = self.__generate_alias(ep, func_name, filename, retval) statements = [] if retval: statements.extend([ "ret = {}();".format(new_name), "ret = ldv_post_init(ret);", "if (ret)", "\tgoto {};".format(label_name) ]) else: statements.append("{}();".format(new_name)) body.extend(statements) body.extend(["{}:".format(label_name), "return ret;"]) func = Function('emg_kernel_init', 'int emg_kernel_init(void)') func.body = body addon = func.define() ep.add_definition('environment model', 'emg_kernel_init', addon) ki_subprocess = ep.actions['kernel initialization'] ki_subprocess.statements = ["%ret% = emg_kernel_init();"] ki_subprocess.comment = 'Kernel initialization stage.' ki_subprocess.trace_relevant = True ki_success = ep.actions['ki_success'] ki_success.condition = ["%ret% == 0"] ki_success.comment = "Kernel initialization is successful." ki_failed = ep.actions['kerninit_failed'] ki_failed.condition = ["%ret% != 0"] ki_failed.comment = "Kernel initialization is unsuccessful." if len(inits) > 0: # Generate init subprocess for filename, init_name in inits: self.logger.debug("Found init function {!r}".format(init_name)) new_name = self.__generate_alias(ep, init_name, filename, True) init_subprocess = ep.actions[init_name] init_subprocess.comment = 'Initialize the module after insmod with {!r} function.'.format( init_name) init_subprocess.statements = [ "%ret% = {}();".format(new_name), "%ret% = ldv_post_init(%ret%);" ] init_subprocess.trace_relevant = True # Add ret label ep.add_label('ret', import_declaration("int label")) # Generate exit subprocess if len(exits) == 0: self.logger.debug("There is no exit function found") else: for filename, exit_name in exits: self.logger.debug("Found exit function {!r}".format(exit_name)) new_name = self.__generate_alias(ep, exit_name, filename, False) exit_subprocess = ep.actions[exit_name] exit_subprocess.comment = 'Exit the module before its unloading with {!r} function.'.format( exit_name) exit_subprocess.statements = ["{}();".format(new_name)] exit_subprocess.trace_relevant = True # Generate successful conditions for action in (a for a in ep.actions.filter(include={Block}) if str(a).startswith('init_success')): action.condition = ["%ret% == 0"] action.comment = "Module has been initialized." # Generate else branch for action in (a for a in ep.actions.filter(include={Block}) if str(a).startswith('init_failed')): action.condition = ["%ret% != 0"] action.comment = "Failed to initialize the module." return ep
def test_method(*args, **kwargs): for test in original_method(*args, **kwargs): process = Process('test') obj = parse_process(process, test) assert obj, f'Cannot parse {test}'
def _import_process(self, source, name, category, dic): # This helps to avoid changing the original specification dic = copy.deepcopy(dic) process = self.PROCESS_CONSTRUCTOR(name, category) for label_name in dic.get('labels', dict()): label = self._import_label(label_name, dic['labels'][label_name]) process.labels[label_name] = label # Import process if 'process' in dic: parse_process(process, dic['process']) else: raise KeyError( "Each process must have 'process' attribute, but {!r} misses it" .format(name)) # Then import subprocesses next_actions = sortedcontainers.SortedDict() for name, desc in dic.get('actions', dict()).items(): subp = desc.get('process') if subp: next_action = parse_process(process, subp) next_actions[name] = next_action # Import comments if 'comment' in dic and isinstance(dic['comment'], str): process.comment = dic['comment'] else: raise KeyError( "You must specify manually 'comment' attribute within the description of {!r} kernel " "function model process".format(name)) # Import actions for some_name, description in dic.get('actions', {}).items(): names = some_name.split(", ") for act_name in names: if act_name not in (x.name for x in process.actions.final_actions): raise ValueError( f"Action '{act_name}' was not used in '{str(process)}' process" ) self._import_action(process, act_name, description) # Connect actions for name in next_actions: process.actions[name].action = next_actions[name] for att in self.PROCESS_ATTRIBUTES: if att in dic: if self.PROCESS_ATTRIBUTES[att]: attname = self.PROCESS_ATTRIBUTES[att] else: attname = att setattr(process, attname, dic[att]) # Fix paths in manual specification for att in ('definitions', 'declarations'): # Avoid iterating over the dictionary that can change its content if att in dic: dic_copy = dict(dic[att]) for def_file in dic[att]: dic_copy[source.find_file(def_file)] = dic_copy.pop( def_file) # Update object to be sure that changes are saved there setattr(process, att, dic_copy) # Check unused recursive subprocesses reachable_actions = process.actions.used_actions( enter_subprocesses=True) unrechable_actions = {a.name for a in process.actions.final_actions }.difference(reachable_actions) if unrechable_actions: raise RuntimeError("Process {!r} has unreachable actions: {}".\ format(str(process), ', '.join(sorted(unrechable_actions)))) unused_labels = {str(label) for label in process.unused_labels} if unused_labels: raise RuntimeError( "Found unused labels in process {!r}: {}".format( str(process), ', '.join(unused_labels))) if process.file != 'entry point': process.file = source.find_file(process.file) if not process.actions.initial_action: raise RuntimeError('Process {!r} has no initial action'.format( str(process))) intrs = set(process.actions.keys()).intersection( process.actions.savepoints) assert not intrs, "Process must not have savepoints with the same names as actions, but there is an" \ " intersection: %s" % ', '.join(intrs) process.accesses() return process