def eventize_and_enrich(commits, git_enrich): logging.info("New commits: " + str(len(commits))) # Create events from commits # TODO add tests for eventize method git_events = Git(commits, git_enrich) events_df = git_events.eventize(2) logging.info("New events: " + str(len(events_df))) # Filter information data_filtered = FilterRows(events_df) events_df = data_filtered.filter_(["filepath"], "-") logging.info("New events filtered: " + str(len(events_df))) # Add filetype info enriched_filetype = FileType(events_df) events_df = enriched_filetype.enrich('filepath') logging.info("New Filetype events: " + str(len(events_df))) # Split filepath info enriched_filepath = FilePath(events_df) events_df = enriched_filepath.enrich('filepath') logging.info("New Filepath events: " + str(len(events_df))) # Deal with surrogates convert = ToUTF8(events_df) events_df = convert.enrich(["owner"]) logging.info("Final new events: " + str(len(events_df))) return events_df
def process(self, items_block): """Process items to add file related information. Eventize items creating one new item per each file found in the commit (excluding files with no actions performed on them). For each event, file path, file name, path parts, file type and file extension are added as fields. :param items_block: items to be processed. Expects to find ElasticSearch hits _source part only. """ logger.debug("{} New commits: {}".format(self.__log_prefix, len(items_block))) # Create events from commits git_events = Git(items_block, self._git_enrich) events_df = git_events.eventize(2) logger.debug("{} New events: {}".format(self.__log_prefix, len(events_df))) if len(events_df) > 0: # Filter information data_filtered = FilterRows(events_df) events_df = data_filtered.filter_(["filepath"], "-") logger.debug("{} New events filtered: {}".format( self.__log_prefix, len(events_df))) events_df['message'] = events_df['message'].str.slice( stop=AreasOfCode.MESSAGE_MAX_SIZE) logger.debug("{} Remove message content".format(self.__log_prefix)) # Add filetype info enriched_filetype = FileType(events_df) events_df = enriched_filetype.enrich('filepath') logger.debug("{} New Filetype events: {}".format( self.__log_prefix, len(events_df))) # Split filepath info enriched_filepath = FilePath(events_df) events_df = enriched_filepath.enrich('filepath') logger.debug("{} New Filepath events: {}".format( self.__log_prefix, len(events_df))) events_df['origin'] = events_df['repository'] # Deal with surrogates convert = ToUTF8(events_df) events_df = convert.enrich(["owner"]) logger.debug("{} Final new events: {}".format(self.__log_prefix, len(events_df))) return self.ProcessResults(processed=len(events_df), out_items=events_df)
def process(self, items_block): """Process items to add file related information. Eventize items creating one new item per each file found in the commit (excluding files with no actions performed on them). For each event, file path, file name, path parts, file type and file extension are added as fields. :param items_block: items to be processed. Expects to find ElasticSearch hits _source part only. """ logger.info("New commits: " + str(len(items_block))) # Create events from commits git_events = Git(items_block, self._git_enrich) events_df = git_events.eventize(2) logger.info("New events: " + str(len(events_df))) if len(events_df) > 0: # Filter information data_filtered = FilterRows(events_df) events_df = data_filtered.filter_(["filepath"], "-") logger.info("New events filtered: " + str(len(events_df))) # Add filetype info enriched_filetype = FileType(events_df) events_df = enriched_filetype.enrich('filepath') logger.info("New Filetype events: " + str(len(events_df))) # Split filepath info enriched_filepath = FilePath(events_df) events_df = enriched_filepath.enrich('filepath') logger.info("New Filepath events: " + str(len(events_df))) # Deal with surrogates convert = ToUTF8(events_df) events_df = convert.enrich(["owner"]) logger.info("Final new events: " + str(len(events_df))) return self.ProcessResults(processed=len(events_df), out_items=events_df)
def test_GitEvents(self): """ Test several cases for the PairProgramming class """ connector = "git" with open(os.path.join(self.__events_dir, connector + ".json")) as f: items = json.load(f) enrich_backend = self.connectors[connector][2]( json_projects_map="./data/projects_map.json") enrich_backend.sortinghat = True events = Git(items, enrich_backend) events_df = events.eventize(1) self.assertFalse(events_df.empty) self.assertEqual(len(events_df), 6) self.assertEqual(len(events_df.columns), 28) self.assertIn("metadata__timestamp", events_df) self.assertIn("metadata__updated_on", events_df) self.assertIn("metadata__enriched_on", events_df) self.assertIn("grimoire_creation_date", events_df) self.assertIn("project", events_df) self.assertIn("project_1", events_df) self.assertIn("perceval_uuid", events_df) self.assertIn("author_id", events_df) self.assertIn("author_org_name", events_df) self.assertIn("author_name", events_df) self.assertIn("author_uuid", events_df) self.assertIn("author_domain", events_df) self.assertIn("author_user_name", events_df) self.assertIn("author_bot", events_df) self.assertIn("author_multi_org_names", events_df) self.assertIn("id", events_df) self.assertIn("date", events_df) self.assertIn("owner", events_df) self.assertIn("committer", events_df) self.assertIn("committer_date", events_df) self.assertIn("repository", events_df) self.assertIn("message", events_df) self.assertIn("hash", events_df) self.assertIn("git_author_domain", events_df) events_df = events.eventize(2) self.assertFalse(events_df.empty) self.assertEqual(len(events_df), 10) self.assertEqual(len(events_df.columns), 30) self.assertIn("eventtype", events_df) self.assertIn("date", events_df) self.assertIn("owner", events_df) self.assertIn("committer", events_df) self.assertIn("committer_date", events_df) self.assertIn("repository", events_df) self.assertIn("message", events_df) self.assertIn("hash", events_df) self.assertIn("git_author_domain", events_df) self.assertIn("files", events_df) self.assertIn("fileaction", events_df) self.assertIn("filepath", events_df) self.assertIn("addedlines", events_df) self.assertIn("removedlines", events_df)