Exemple #1
0
    def _mine_stories(self, stories, systemname, **kw):
        # Keep track of all errors
        errors = ""

        # Keeps track of all succesfully created User Stories objects
        us_instances = []
        failed_stories = []

        # Parse every user story (remove punctuation and mine)
        for us_id, s in enumerate(stories, start=1):
            try:
                user_story = self.parse(s, us_id, systemname, StoryMiner())
                user_story = Counter.count(user_story)
                us_instances.append(user_story)
            except ValueError as err:
                failed_stories.append([us_id, s, err.args])
                errors += "\n[User Story {} ERROR] {}! (\"{}\")".format(
                    us_id, err.args[0], " ".join(str.split(s)))

        # Print errors (if found)
        if errors:
            Printer._print_head("PARSING ERRORS")
            print(errors)

        return us_instances, failed_stories
Exemple #2
0
    def _get_gen(self, us_instances, m, systemname, print_ont, **kw):
        patterns = Constructor(self.nlp, us_instances, m)
        out = patterns.make(systemname, self.threshold, self.link)

        # Print out the ontology in the terminal, if argument '-o'/'--print_ont' is chosen
        if print_ont:
            Printer._print_head("MANCHESTER OWL")
            print(out[0])

        return out
Exemple #3
0
    def _get_stats(self, us_instances, m, **kw):
        statsarr = None
        if self.stats:
            statsarr = Statistics.to_stats_array(us_instances)

            Printer._print_head("USER STORY STATISTICS")
            Printer.print_stats(statsarr[0], True)
            #Printer.print_stats(statsarr[1], True)
            Printer._print_subhead("Term - by - User Story Matrix ( Terms w/ total weight 0 hidden )")
            hide_zero = m[(m['sum'] > 0)]
            print(hide_zero)

        return statsarr
Exemple #4
0
    def run(self,
            filename,
            systemname,
            print_us = False,
            print_ont = False,
            stories = None,
            write_local = True):
        """Single run of Visual Narrator

        Args:
            filename (str): File name to read
            systemname (str): Name of System (for output and in model)

            print_us (bool): print data per user story in the console
            print_ont (bool): print ontology in the console

            stories (list): preprocessed stories (from filename)
        Returns:
            dict: dictionary with US objects, Ontology + Prolog + JSON objects, matrix
        """
        if stories is None:
            stories = Reader.parse(filename)

        # Mine stories
        us_instances, failed_stories  = self._mine_stories(stories, systemname, log_time=self.time)

        # Generate the term-by-user story matrix (m), and additional data in two other matrices
        m, count_matrix = self._get_matrix(us_instances, log_time=self.time)

        # Print details per user story, if argument '-u'/'--print_us' is chosen
        if print_us:
            print("Details:\n")
            for us in us_instances:
                Printer.print_us_data(us)

        # Generate the outputs
        output_ontology, output_prolog, onto_per_role = \
            self._get_gen(us_instances, m, systemname, print_ont, log_time=self.time)

        # Gather statistics and print the results
        statsarr = self._get_stats(us_instances, m, log_time=self.time)

        # Print the used ontology generation settings
        Printer.print_gen_settings(self.matrix, self.base, self.threshold)

        # Print details of the generation
        fail = len(failed_stories)
        success = len(us_instances)
        time_nlp = self.time['INITIALIZE_NLP']
        time_mine = self.time['_MINE_STORIES']
        time_matr = self.time['_GET_MATRIX']
        time_gen = self.time['_GET_GEN']
        time_stats = self.time['_GET_STATS']

        Printer.print_details(fail, success, time_nlp, time_mine, time_matr, time_gen, time_stats)
        self.time['INITIALIZE_NLP'] = 0

        output_json = json.dumps([us.toJSON() for us in us_instances], indent=4)

        files = []
        if write_local:
            w = Writer
            files, reports_folder = self.write_files(w,
                                                     systemname,
                                                     str(output_ontology),
                                                     str(output_prolog),
                                                     output_json,
                                                     statsarr,
                                                     m,
                                                     onto_per_role)

        report_dict = {
            "stories": us_instances,
            "failed_stories": failed_stories,
            "systemname": systemname,
            "us_success": success,
            "us_fail": fail,
            "times": [["Initializing Natural Language Processor (<em>spaCy</em> v" + pkg_resources.get_distribution("spacy").version + ")" , time_nlp],
                      ["Mining User Stories", time_mine],
                      ["Creating Factor Matrix", time_matr],
                      ["Generating Manchester Ontology / Prolog", time_gen],
                      ["Gathering statistics", time_stats]],
            "dir": sys.path[0],
            "inputfile": filename,
            "inputfile_lines": len(stories),
            "outputfiles": files,
            "threshold": self.threshold,
            "base": self.base,
            "matrix": self.matrix,
            "weights": m['sum'].copy().reset_index().sort_values(['sum'], ascending=False).values.tolist(),
            "counts": count_matrix.reset_index().values.tolist(),
            "classes": output_ontology.classes,
            "relationships": output_ontology.relationships,
            "types": list(count_matrix.columns.values),
            "ontology": multiline(str(output_ontology)),
            "print_prolog": self.prolog,
            "prolog": multiline(str(output_prolog)),
            "write_local": write_local
        }

        # Finally, generate a report
        output_report = self.generate_report(report_dict)

        # Write output files
        if write_local:
            report = w.make_file(reports_folder, str(systemname) + "_REPORT", "html", output_report)
            files.append(["Report", report])

            # Print the location and name of all output files
            for file in files:
                if str(file[1]) != "":
                    print(f"{file[0]} file succesfully created at: \"{file[1]}\"")

        # Return objects so that they can be used as input for other tools
        return {'us_instances': us_instances,
                'output_ontobj': str(output_ontology),
                'output_prologobj': str(output_prolog),
                'output_json': output_json,
                'matrix': m,
                'report': output_report}