def __read_one_requirement(self, fileinfo, input_mods, object_cache): '''Read in one requirement from the file info.''' tracer.debug("Called.") # Check for correct filename if not fileinfo.get_filename().endswith(".req"): tracer.info("skipping file [%s]", fileinfo.get_filename()) return # Handle caching. vcs_id = fileinfo.get_vcs_id() rid = fileinfo.get_filename_sub_part()[:-4] req = object_cache.get("Requirement", vcs_id) tracer.info("Reading requirement [%s]", rid) if req is None: file_content = fileinfo.get_content() req = Requirement(file_content, rid, fileinfo.get_filename(), input_mods, self._config) # Add the requirement to the cache. object_cache.add(vcs_id, "Requirement", req) self._adapt_usablility(req) if req.is_usable(): # Store in the map, so that it is easy to access the # node by id. self.add_requirement(req) # Also store it in the digraph's node list for simple # access to the digraph algorithms. # self.nodes.append(req) else: logger.error(LogFormatter.format( 45, "could not be parsed", req.get_id())) tracer.debug("Finished.")
def __read_one_testcase(self, fileinfo, input_mods, object_cache): '''Read in one testcase from the file info.''' tracer.debug("Called.") # Check for correct filename if not fileinfo.get_filename().endswith(".tec"): tracer.info("skipping file [%s]" % fileinfo.get_filename()) return # Handle caching. vcs_id = fileinfo.get_vcs_id() rid = fileinfo.get_filename_sub_part()[:-4] testcase = object_cache.get("TestCase", vcs_id) tracer.info("Reading testcase [%s]" % rid) if testcase == None: file_content = fileinfo.get_content() testcase = TestCase(file_content, rid, fileinfo.get_filename(), input_mods, self._config) # Add the requirement to the cache. object_cache.add(vcs_id, "TestCase", testcase) self._adapt_usablility(testcase) if testcase.is_usable(): # Store in the map, so that it is easy to access the # node by id. self._add_testcase(testcase) # Also store it in the digraph's node list for simple # access to the digraph algorithms. # self.nodes.append(req) else: logger.error(LogFormatter.format( 115, "could not be parsed", testcase.id)) tracer.debug("Finished.")
def __get_file_infos_from_tree(self, tree, base_dir): '''Returns all the file infos recursive starting with the given directory.''' tracer.info("called: base [%s]" % base_dir) base_dir_split = base_dir.split("/") ltree = self.__get_tree(tree, base_dir_split) return self.__get_file_infos_from_tree_rec(ltree, base_dir_split, [])
def __init__(self, oconfig): '''Create a graph output object.''' tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self) self.__ce3set = None self.__fd = None self.__constraints_reqs_ref = {} self.__testcases = None # Jinja2 initialisation template_loader = jinja2.FileSystemLoader( searchpath=oconfig['template_path']) template_env_unmodded = jinja2.Environment(loader=template_loader) self._template_env = template_env_unmodded.overlay( block_start_string='((*', block_end_string='*))', variable_start_string='(((', variable_end_string=')))', comment_start_string='((=', comment_end_string='=))') if not self._config.is_available('req_attributes'): self._config.set_value('req_attributes', [ "Id", "Priority", "Owner", "Invented on", "Invented by", "Status", "Class" ]) self.__level = -1
def __init__(self, config): cfg = Cfg(config) Interface.__init__(self, cfg) tracer.info("called") self.__topic_root_node = cfg.get_rvalue("topic_root_node") self.__dirs = {} self.__setup_directories(cfg)
def __init__(self, oconfig): '''Create a graph output object.''' tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self) self.__ce3set = None self.__fd = None self.__constraints_reqs_ref = {} self.__testcases = None # Jinja2 initialisation template_loader = jinja2.FileSystemLoader( searchpath=oconfig['template_path']) template_env_unmodded = jinja2.Environment(loader=template_loader) self._template_env = template_env_unmodded.overlay( block_start_string='((*', block_end_string='*))', variable_start_string='(((', variable_end_string=')))', comment_start_string='((=', comment_end_string='=))') if not self._config.is_available('req_attributes'): self._config.set_value( 'req_attributes', ["Id", "Priority", "Owner", "Invented on", "Invented by", "Status", "Class"]) self.__level = -1
def __get_file_infos_from_tree(self, tree, base_dir): '''Returns all the file infos recursive starting with the given directory.''' tracer.info("called: base [%s]", base_dir) base_dir_split = base_dir.split("/") ltree = self.__get_tree(tree, base_dir_split) return self.__get_file_infos_from_tree_rec(ltree, base_dir_split, [])
def __init__(self, self_cfg, import_dest): tracer.info("called") self.useable = False self._cfg = dict(self.default_config) self._cfg.update(self_cfg) self._dest = dict() self._entries = None self._topics = None import_dest_cfg = Cfg(import_dest) try: req_dirs = import_dest_cfg.get_rvalue("requirements_dirs") if req_dirs[0] and os.path.isdir(req_dirs[0]): self.useable = True self._dest["requirements_dirs"] = req_dirs[0] except RMTException: self.useable = False try: topics_dirs = import_dest_cfg.get_rvalue("topics_dirs") if topics_dirs[0] and os.path.isdir(topics_dirs[0]): self.useable = True self._dest["topics_dirs"] = topics_dirs[0] except RMTException: self.useable = False self._wb = None tracer.debug("Finished.")
def __read_one_requirement(self, fileinfo, input_mods, object_cache): '''Read in one requirement from the file info.''' tracer.debug("Called.") # Check for correct filename if not fileinfo.get_filename().endswith(".req"): tracer.info("skipping file [%s]" % fileinfo.get_filename()) return # Handle caching. vcs_id = fileinfo.get_vcs_id() rid = fileinfo.get_filename_sub_part()[:-4] req = object_cache.get("Requirement", vcs_id) tracer.info("Reading requirement [%s]" % rid) if req == None: file_content = fileinfo.get_content() req = Requirement(file_content, rid, fileinfo.get_filename(), input_mods, self._config) # Add the requirement to the cache. object_cache.add(vcs_id, "Requirement", req) self._adapt_usablility(req) if req.is_usable(): dnreq = RequirementDNode(req) # Store in the map, so that it is easy to access the # node by id. ### ToDo: needed self._add_requirement(req) self.add_node(dnreq) # Also store it in the digraph's node list for simple # access to the digraph algorithms. # self.nodes.append(req) else: logger.error(LogFormatter.format( 45, "could not be parsed", req.id)) tracer.debug("Finished.")
def __init__(self, oconfig): """Create an openpyxl output object.""" tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self) self.__ce3set = None self.__fd = None self._opiface = XlsHandler(self._output_filename, self._config)
def log_stats(self): '''Prints out the usage statistics.''' tracer.info("Usage statistics: objects [%d] object types [%d] " "get called [%d] get called (cached found) [%d] " "cache hit ratio [%4.3f]." % (self.__stats_cnt_objects, self.__stats_cnt_object_types, self.__stats_cnt_get, self.__stats_cnt_get_found, float(self.__stats_cnt_get_found) / self.__stats_cnt_get))
def find_master_nodes(self): '''Find all the available master nodes and stored them in a class field.''' self.__master_nodes = set() for req in self.nodes: if not req.incoming: tracer.debug("Found master nodes [%s]", req.get_id()) self.__master_nodes.add(req) tracer.info("Found [%d] master nodes", len(self.__master_nodes))
def __init__(self, oconfig): '''Create a graph output object.''' tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self) self.__used_vcs_id = None self.__output_file = None if not self._config.is_available('node_attributes'): self._config.set_value('node_attributes', ["Type", "Status", "Class", "Topic", "Priority", ])
def __get_file_infos_from_tree_rec(self, tree, base_dir, sub_dir): '''Returns recursively all file infos.''' tracer.info("called: base [%s] sub [%s]", base_dir, sub_dir) result = [] for blob in tree.blobs: result.append(Git.FileInfo(base_dir, sub_dir, blob)) for stree in tree.trees: sub_sub_dir = copy.deepcopy(sub_dir) sub_sub_dir.append(stree.name) result.extend(self.__get_file_infos_from_tree_rec( stree, base_dir, sub_sub_dir)) return result
def create(input_method, input_config): '''Create new input handler from given parameters.''' tracer.info("Called: name [%s]." % input_method) if input_method.startswith("ignore:"): tracer.info("Ignoring factory entry.") return None if input_method not in Factory.known_input_types: assert False return Factory.known_input_types[input_method](input_config)
def __common_topic_continuum_pre(self, topic_continuum, special): '''Common method used by cmad_ and normal callback.''' tracer.info("Called.") output_config = topic_continuum.get_output_config() for oconfig_name, oconfig in iteritems(output_config): for cfg in oconfig: output_obj = self.__plugin_manager[oconfig_name].plugin(cfg) if special != "": FuncCall.pcall(output_obj, "init_" + special, self.__cmad_file) topic_continuum.execute(output_obj, special) tracer.info("Finished.")
def __init__(self, oconfig): '''Create a graph output object.''' tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self) self.__ce3set = None self.__fd = None if not self._config.is_available('req_attributes'): self._config.set_value('req_attributes', [ "Id", "Priority", "Owner", "Invented on", "Invented by", "Status", "Class" ]) self.__level = -1
def __init__(self, input_mods, config): '''Sets up a TopicContinuum for use.''' tracer.info("called") UsableFlag.__init__(self) self.__input_mods = input_mods self._config = config # This dictionary holds all the TopicSetCollections # available in the configured time period. self.__continuum = {} # Store objects with IDs also in the cache - so that they can be reused. self.__object_cache = ObjectCache() self.__init_continuum_set() self.__object_cache.log_stats() tracer.debug("Finished.")
def resolve_solved_by(self): '''Step through the internal list of collected requirements and evaluate the 'Solved by'. This is done by creating the appropriate digraph nodes.''' tracer.debug("Called.") # Run through all the requirements and look for the 'Solved # by' success = True for req in self.__requirements.values(): if not self.__resolve_solved_by_one_req(req): tracer.info("Handling of requirement [%s] was not successful", req.get_id()) success = False tracer.debug("Finished; success [%s]", success) return success
def __common_topic_continuum_pre(self, topic_continuum, special): '''Common method used by cmad_ and normal callback.''' tracer.info("Called.") output_config = topic_continuum.get_output_config() for oconfig_name, oconfig in output_config.iteritems(): self.__ostats.append(oconfig_name) output_module_cstr = self.__create_output_module(oconfig_name) for cfg in oconfig: output_obj = output_module_cstr(cfg) if special != "": FuncCall.pcall(output_obj, "init_" + special, self.__cmad_file) topic_continuum.execute(output_obj, special) tracer.info("Finished.")
def __init__(self, filename, config=None): tracer.info("Creating XLS workbook: %s", filename) self.__filename = filename self._cfg = self.default_config for key, value in config.items(): self._cfg[key] = value # We require those headers at least self._req_headers = self._cfg["req_attributes"] self._headers = list(self._cfg["headers"]) self.req_row = 1 self._reqlist = [] self._topiclist = [] self._prepare_template()
def __setup_directories(self, cfg): '''Cleans up and unifies the directories.''' tracer.debug("Called.") for dir_type in ["requirements", "topics", "constraints", "testcases"]: dirs = cfg.get_rvalue_default(dir_type + "_dirs", None) if dirs == None: tracer.info("Directory [%s] not configured - skipping.", dir_type) continue self._check_list_of_strings(dir_type, dirs) new_directories = [] for directory in dirs: new_directories.append(directory) self.__dirs[dir_type] = new_directories for dir_type, directory in self.__dirs.iteritems(): tracer.debug("[%s] directories [%s]" % (dir_type, directory))
def __init__(self, config): tracer.info("called") cfg = Cfg(config) Interface.__init__(self, cfg) self.__start_vers = cfg.get_rvalue("start_vers") self.__end_vers = cfg.get_rvalue("end_vers") self.__topic_root_node = cfg.get_rvalue("topic_root_node") tracer.debug( "start version [%s] end version [%s] " "topic root node [%s]" % (self.__start_vers, self.__end_vers, self.__topic_root_node)) # When the directory is not absolute, convert it to an # absolute path that it can be compared to the outcome of the # git.Repo. self.__dirs = {} self.__repo_base_dir = None self.__repo = None self.__setup_directories(cfg)
def __init__(self, config): """Sets up Import for use.""" tracer.info("called") self.__plugin_manager = extension.ExtensionManager( namespace="rmtoo.imports.plugin", invoke_on_load=False) assert config # we need a configuration if "import" in config: self._config = config["import"] else: self._config = self.DEFAULT_CONFIG self._cfg = Cfg(self._config) self._input_dir = {"requirements_dirs": None, "topics_dirs": None} self._extract_input_dir(config) self._import_obj = [] self._set_run_modules() tracer.debug("Finished.")
def __store_add_topic(self, tree_store, iter_topic, topic): tracer.info("Add topic [%s]" % topic.get_id()) titer = tree_store.append(iter_topic, [topic.get_id(), "Topic"]) req_set = topic.get_requirement_set() if req_set != None: tracer.info( "RequirementSet is available; requirements count [%d]" % req_set.get_requirements_cnt()) req_set.find_master_nodes() # for n in req_set.outgoing: # self.__store_add_requirements(tree_store, titer, n) # for master_node in req_set.get_master_nodes(): # self.__store_add_requirements(tree_store, titer, master_node) for n in topic.outgoing: self.__store_add_topic(tree_store, titer, n)
def __init__(self, ts_name, config, ts_config, object_cache, input_mods): UsableFlag.__init__(self) self.__name = ts_name tracer.info("Called: name [%s]", self.__name) self._config = config self.__topic_sets = {} # This is the list of all version control system ids. # Those ids are sorted by time. # The first is the vcs id of the (sub-)element, # The second is the commit. # The oldest versions is the first one - sorted. # Note: this does not contain any other data, only the ids. # To access the data, use some construct like: # self.__topic_sets[self.__vcs_commit_ids[n].get_commit()] # self.__vcs_commit_ids = [] self.__object_cache = object_cache self.__input_mods = input_mods self.__read_topic_sets(ts_config) self.__ts_config = ts_config tracer.debug("Finished; topic set count [%d]", len(self.__topic_sets))
def __setup_directories(self, cfg): '''Cleans up and unifies the directories.''' tracer.debug("called") # TODO: double code - also in FileSystem for dir_type in ["requirements", "topics", "constraints", "testcases"]: config_dirs = cfg.get_rvalue_default(dir_type + "_dirs", None) if config_dirs == None: tracer.info("Directory [%s] not configured - skipping." % dir_type) continue # pylint: disable=W0141 dirs = map(self.__abs_path, config_dirs) self._check_list_of_strings(dir_type, dirs) new_directories = [] for directory in dirs: self.__check_if_dir_is_in_repo(directory) new_directories.append(self.__cut_off_repo_dir(directory)) self.__dirs[dir_type] = new_directories for dir_type, directory in self.__dirs.iteritems(): tracer.debug("[%s] directories [%s]" % (dir_type, directory))
def __init__(self, config, input_handler, commit, object_cache, input_mods): '''Read in all the dependent topics and the requirements.''' tracer.info("Called; commit timestamp [%s]" % input_handler.get_timestamp(commit)) Digraph.__init__(self) UsableFlag.__init__(self) self._config = config self.__input_handler = input_handler self.__commit = commit self.__object_cache = object_cache self.__input_mods = input_mods # Because it is possible that things are failing, there is the need to # have some defaults here: self.__complete_requirement_set = None self.__topic = None self.__requirement_set = None tracer.debug("Read in all the requirements.") self.__read_requirement_set() if not self.is_usable(): tracer.error("Errors during reading the requirements.") return tracer.debug("Read in all the topics.") # Stored here is the initial node of the topic digraph. self.__topic = self.__read_topics() if not self.is_usable(): tracer.error("Errors during reading the topics.") return tracer.debug("Restrict requirements to those which are " "needed in the topic.") self.__requirement_set = self.__restrict_requirements_set() if not self.is_usable(): tracer.error("Errors during restriction of the requirements.") return tracer.debug("Finished; success.")
def _setup_directories(self, cfg): '''Cleans up and unifies the directories.''' all_dirs = {} tracer.debug("Called.") for dir_type in ["requirements", "topics", "constraints", "testcases"]: config_dirs = cfg.get_rvalue_default(dir_type + "_dirs", None) if config_dirs is None: tracer.info("Directory [%s] not configured - skipping.", dir_type) continue dirs = list(map(self._adapt_dir_path, config_dirs)) self._check_list_of_strings(dir_type, dirs) new_directories = [] for directory in dirs: self._extended_directory_check(directory) new_directories.append(self._adapt_ext_path(directory)) all_dirs[dir_type] = new_directories for dir_type, directory in iteritems(all_dirs): tracer.debug("[%s] directories [%s]", dir_type, directory) return all_dirs
def find_master_nodes(self): '''Find all the available master nodes and stored them in a class field.''' # assert False # # This is completely wrong: # The digraph is only and always the digraph of the complete # requirement set (not restricted to a topic) # The only way to check, if a requirement is in the topic # is to check if it is available in the topic._requirements dict. tracer.debug("Looking for master nodes in [%d] nodes." % self.get_node_cnt()) self.__master_nodes = set() for req_node in self.get_iter_nodes_values(): if req_node.get_incoming_cnt() == 0: tracer.debug("Found master nodes [%s]" % req_node.get_requirement().get_id()) self.__master_nodes.add(req_node) else: tracer.debug("[%s] is not a master node; incoming from " % req_node.get_requirement().get_id()) for i in req_node.get_iter_incoming(): tracer.debug(" -> [%s]" % i.get_requirement().get_id()) tracer.info("Found [%d] master nodes" % len(self.__master_nodes))
def __read_one_element(self, fileinfo, input_mods, object_cache, file_suffix, type_name): '''Read in one element from the file info.''' tracer.debug("Called.") # Check for correct filename if not fileinfo.get_filename().endswith(file_suffix): tracer.info("skipping file [%s]", fileinfo.get_filename()) return None # Handle caching. vcs_id = fileinfo.get_vcs_id() rid = fileinfo.get_filename_sub_part()[:-4] ctr = object_cache.get("Constraint", vcs_id) tracer.info("Reading constraint [%s]", rid) if ctr is None: file_content = fileinfo.get_content() element = Constraint(file_content, rid, fileinfo.get_filename(), input_mods, self._config) # Add the requirement to the cache. object_cache.add(vcs_id, type_name, element) self._adapt_usablility(element) tracer.debug("Finished.") return element
def __init__(self, oconfig): '''Create a prios output object.''' tracer.info("Called.") StdOutputParams.__init__(self, oconfig) CreateMakeDependencies.__init__(self)
def _set_not_usable(self): '''The object is marked as not usable (any more).''' tracer.info("Setting object to unusable.") self.__is_usable = False
def topic_continuum_post(self, _topics_continuum_set): '''Output all the collected statistics. This is the last-to-get phase of this object.''' tracer.info("Usage statistics: %s" % self.__stats)