def its_report(dbcon, filters): # Per release its information from vizgrimoire.ITS import ITS ITS.set_backend("launchpad") project_name = filters.type_analysis[1] project_name = project_name.replace(" ", "") if project_name == 'Documentation': ITS._get_backend( ).closed_condition = "(new_value='Fix Committed' or new_value='Fix Released')" else: ITS.closed_condition = "(new_value='Fix Committed')" opened = its.Opened(dbcon, filters) createJSON(opened.get_agg(), "./release/its_opened_" + project_name + ".json") closed = its.Closed(dbcon, filters) createJSON(closed.get_agg(), "./release/its_closed_" + project_name + ".json") dataset = {} dataset["opened"] = opened.get_agg()["opened"] dataset["closed"] = closed.get_agg()["closed"] return dataset
def projects_efficiency(opts, people_out, affs_out): # BMI and time to review in mean per general project scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities) scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities) its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities) projects = integrated_projects(scm_dbcon) projects_ids = projects["subproject_id"] projects_list = [] bmi_list = [] time2review_list = [] bmi_its = [] period = "month" releases = opts.releases.split(",")[-2:] startdate = "'"+releases[0]+"'" enddate = "'"+releases[1]+"'" for project_id in projects_ids: project_title = "'" + project_id + "'" type_analysis = ["project", project_id] project_filters = MetricFilters(period, startdate, enddate, type_analysis, 10, people_out, affs_out) scr_bmi = scr.BMISCR(scr_dbcon, project_filters) time2review = scr.TimeToReview(scr_dbcon, project_filters) # ITS BMI index from vizgrimoire.ITS import ITS ITS.set_backend("launchpad") if project_id == 'Documentation': ITS._get_backend().closed_condition = "(new_value='Fix Committed' or new_value='Fix Released')" else: ITS.closed_condition = "(new_value='Fix Committed')" opened = its.Opened(its_dbcon, project_filters) closed = its.Closed(its_dbcon, project_filters) tickets_opened = opened.get_agg()["opened"] tickets_closed = closed.get_agg()["closed"] its_bmi = 0 if tickets_closed > 0: its_bmi = round(float(tickets_closed)/float(tickets_opened), 2) projects_list.append(project_id) bmi_list.append(round(scr_bmi.get_agg()["bmiscr"], 2)) time2review_list.append(round(time2review.get_agg()["review_time_days_median"], 2)) bmi_its.append(its_bmi) createCSV({"projects":projects_list, "bmi":bmi_list, "timereview":time2review_list, "bmiits":bmi_its}, "./release/integrated_projects_efficiency.csv")
def create_json(dbcon, filters): ITS.set_backend("jira") ITS._get_backend().closed_condition = " i.status = 'Closed' " pullrequests = PullRequests(dbcon, filters) submitters = Submitters(dbcon, filters) abandoned = Abandoned(dbcon, filters) merged = Merged(dbcon, filters) trackers = Trackers(dbcon, filters) #timeseries data data = dict(pullrequests.get_ts().items() + submitters.get_ts().items() + abandoned.get_ts().items() + merged.get_ts().items() + trackers.get_ts().items()) if filters.type_analysis == []: createJSON(data, "scr-evolutionary.json") else: tracker = filters.type_analysis[1] tracker = tracker.replace("'", "") name = tracker.split("/")[-1:][0] createJSON(data, name + "-scr-evolutionary.json") #aggregated data data = dict(pullrequests.get_agg().items() + submitters.get_agg().items() + abandoned.get_agg().items() + merged.get_agg().items() + trackers.get_agg().items()) enddate = filters.enddate for i in [7, 365, 0]: data = dict(data.items() + pullrequests.get_trends(enddate, i).items() + submitters.get_trends(enddate, i).items() + abandoned.get_trends(enddate, i).items() + merged.get_trends(enddate, i).items() + trackers.get_trends(enddate, i).items()) if filters.type_analysis == []: createJSON(data, "scr-static.json") else: tracker = filters.type_analysis[1] tracker = tracker.replace("'", "") name = tracker.split("/")[-1:][0] createJSON(data, name + "-scr-static.json")
def its_report(dbcon, filters): # basic metrics for ticketing systems dataset = {} from vizgrimoire.ITS import ITS ITS.set_backend("launchpad") opened = its.Opened(dbcon, filters) dataset["its_opened"] = opened.get_agg()["opened"] closed = its.Closed(dbcon, filters) dataset["its_closed"] = closed.get_agg()["closed"] return dataset
def data_source_increment_activity(opts, people_out, affs_out): # Per data source, the increment or decrement of the activity is displayed dataset = {} data_sources = ["Gits", "Tickets", "Mailing Lists", "Gerrit", "Askbot", "IRC"] action = ["commits", "closed tickets", "sent emails", "submitted reviews", "posted questions", "messages"] net_values = [] rel_values = [] #percentage wrt the previous 365 days scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities) its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities) mls_dbcon = MLSQuery(opts.dbuser, opts.dbpassword, opts.dbmlstats, opts.dbidentities) scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities) qaforums_dbcon = QAForumsQuery(opts.dbuser, opts.dbpassword, opts.dbqaforums, opts.dbidentities) irc_dbcon = IRCQuery(opts.dbuser, opts.dbpassword, opts.dbirc, opts.dbidentities) period = "month" type_analysis = None releases = opts.releases.split(",")[-2:] startdate = "'"+releases[0]+"'" enddate = "'"+releases[1]+"'" filters = MetricFilters(period, startdate, enddate, None, 10, people_out, affs_out) commits = scm.Commits(scm_dbcon, filters) closed = its.Closed(its_dbcon, filters) emails = mls.EmailsSent(mls_dbcon, filters) submitted = scr.Submitted(scr_dbcon, filters) questions = qa.Questions(qaforums_dbcon, filters) messages = irc.Sent(irc_dbcon, filters) from vizgrimoire.ITS import ITS ITS.set_backend("launchpad") net_values.append(commits.get_trends(releases[1], 365)["commits_365"]) rel_values.append(commits.get_trends(releases[1], 365)["percentage_commits_365"]) net_values.append(closed.get_trends(releases[1], 365)["closed_365"]) rel_values.append(closed.get_trends(releases[1], 365)["percentage_closed_365"]) net_values.append(emails.get_trends(releases[1], 365)["sent_365"]) rel_values.append(emails.get_trends(releases[1], 365)["percentage_sent_365"]) net_values.append(submitted.get_trends(releases[1], 365)["submitted_365"]) rel_values.append(submitted.get_trends(releases[1], 365)["percentage_submitted_365"]) net_values.append(questions.get_trends(releases[1], 365)["qsent_365"]) rel_values.append(questions.get_trends(releases[1], 365)["percentage_qsent_365"]) net_values.append(messages.get_trends(releases[1], 365)["sent_365"]) rel_values.append(messages.get_trends(releases[1], 365)["percentage_sent_365"]) createCSV({"datasource":data_sources, "metricsnames":action, "relativevalues":rel_values, "netvalues":net_values}, "./release/data_source_evolution.csv")
def data_source_increment_activity(opts, people_out, affs_out): # Per data source, the increment or decrement of the activity is displayed dataset = {} data_sources = ["Gits", "Tickets", "Mailing Lists", "Gerrit", "Askbot", "IRC"] action = ["commits", "closed tickets", "sent emails", "submitted reviews", "posted questions", "messages"] net_values = [] rel_values = [] #percentage wrt the previous 365 days scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities) its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities) mls_dbcon = MLSQuery(opts.dbuser, opts.dbpassword, opts.dbmlstats, opts.dbidentities) scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities) qaforums_dbcon = QAForumsQuery(opts.dbuser, opts.dbpassword, opts.dbqaforums, opts.dbidentities) irc_dbcon = IRCQuery(opts.dbuser, opts.dbpassword, opts.dbirc, opts.dbidentities) period = "month" type_analysis = None releases = opts.releases.split(",")[-2:] startdate = "'"+releases[0]+"'" enddate = "'"+releases[1]+"'" filters = MetricFilters(period, startdate, enddate, None, 10, people_out, affs_out) commits = scm.Commits(scm_dbcon, filters) closed = its.Closed(its_dbcon, filters) emails = mls.EmailsSent(mls_dbcon, filters) submitted = scr.Submitted(scr_dbcon, filters) questions = qa.Questions(qaforums_dbcon, filters) messages = irc.Sent(irc_dbcon, filters) from vizgrimoire.ITS import ITS ITS.set_backend("jira") net_values.append(commits.get_trends(releases[1], 90)["commits_90"]) rel_values.append(commits.get_trends(releases[1], 90)["percentage_commits_90"]) net_values.append(closed.get_trends(releases[1], 90)["closed_90"]) rel_values.append(closed.get_trends(releases[1], 90)["percentage_closed_90"]) net_values.append(emails.get_trends(releases[1], 90)["sent_90"]) rel_values.append(emails.get_trends(releases[1], 90)["percentage_sent_90"]) net_values.append(submitted.get_trends(releases[1], 90)["submitted_90"]) rel_values.append(submitted.get_trends(releases[1], 90)["percentage_submitted_90"]) net_values.append(questions.get_trends(releases[1], 90)["qsent_90"]) rel_values.append(questions.get_trends(releases[1], 90)["percentage_qsent_90"]) net_values.append(messages.get_trends(releases[1], 90)["sent_90"]) rel_values.append(messages.get_trends(releases[1], 90)["percentage_sent_90"]) createCSV({"datasource":data_sources, "metricsnames":action, "relativevalues":rel_values, "netvalues":net_values}, "./release/data_source_evolution.csv")
def GetClosedSummaryCompanies (period, startdate, enddate, identities_db, closed_condition, num_organizations): from vizgrimoire.ITS import ITS count = 1 first_organizations = {} metric = DataSource.get_metrics("organizations", ITS) organizations = metric.get_list() organizations = organizations['name'] for company in organizations: type_analysis = ["company", "'"+company+"'"] filter_com = MetricFilters(period, startdate, enddate, type_analysis) mclosed = ITS.get_metrics("closed", ITS) mclosed.filters = filter_com closed = mclosed.get_ts() # Rename field closed to company name closed[company] = closed["closed"] del closed['closed'] if (count <= num_organizations): #Case of organizations with entity in the dataset first_organizations = dict(first_organizations.items() + closed.items()) else : #Case of organizations that are aggregated in the field Others if 'Others' not in first_organizations: first_organizations['Others'] = closed[company] else: first_organizations['Others'] = [a+b for a, b in zip(first_organizations['Others'],closed[company])] count = count + 1 first_organizations = completePeriodIds(first_organizations, period, startdate, enddate) return(first_organizations)
def get_agg(self): fields = Set([]) tables = Set([]) filters = Set([]) fields.add( "TIMESTAMPDIFF(SECOND, i.submitted_on, t1.changed_on) as timeto") tables.add("issues i") tables.union_update(self.db.GetSQLReportFrom(self.filters)) closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition # TODO: if RESOLVED and CLOSED appear in the same period of study, this # will affect the total time to close the issue. Both timeframes will # be taken into account. table_extra = "(select issue_id, changed_on from changes where " + closed_condition + " and changed_on < " + self.filters.enddate + " and changed_on >= " + self.filters.startdate + ") t1" tables.add(table_extra) filters.add("i.id=t1.issue_id") filters.union_update(self.db.GetSQLReportWhere(self.filters)) query = "select " + self.db._get_fields_query(fields) query = query + " from " + self.db._get_tables_query(tables) query = query + " where " + self.db._get_filters_query(filters) return self.db.ExecuteQuery(query)
def get_list(self): startdate = self.filters.startdate enddate = self.filters.enddate closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition q = "select cou.name "+\ "from issues i, "+\ " changes ch, "+\ " people_uidentities pup, "+\ " "+ self.db.identities_db+ ".profiles pro, "+\ " "+ self.db.identities_db+ ".countries cou "+\ "where i.id = ch.issue_id and "+\ " ch.changed_by = pup.people_id and "+\ " pup.uuid = pro.uuid and "+\ " pro.country_code = cou.code and "+\ " ch.changed_on >= "+ startdate+ " and "+\ " ch.changed_on < "+ enddate+" and "+\ " "+ closed_condition+ " "+\ " group by cou.name "+\ " order by count(distinct(i.id)) desc, cou.name" data = self.db.ExecuteQuery(q) return (data)
def __get_sql_default__(self, evolutionary, close=False): # closed_condition = ITS._get_closed_condition() fields = Set([]) tables = Set([]) filters = Set([]) fields.add("count(distinct(pup.uuid)) as changers") tables.add("issues i") tables.add("changes ch") tables.union_update(self.db.GetSQLReportFrom(self.filters)) filters.add("i.id = ch.issue_id") if close: fields = Set([]) fields.add("count(distinct(pup.uuid)) as closers") closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition filters.add(closed_condition) filters.union_update(self.db.GetSQLReportWhere(self.filters)) #unique identities filters tables.add("people_uidentities pup") filters.add("i.submitted_by = pup.people_id") query = self.db.BuildQuery(self.filters.period, self.filters.startdate, self.filters.enddate, " ch.changed_on ", fields, tables, filters, evolutionary, self.filters.type_analysis) #Action needed to replace issues filters by changes one query = query.replace("i.submitted", "ch.changed") return query
def get_list(self, metric_filters=None, days=0): # Overwriting the filters used. This code should be # removed at some point. if metric_filters is not None: metric_filters_orig = self.filters self.filters = metric_filters startdate = metric_filters.startdate enddate = metric_filters.enddate limit = metric_filters.npeople filter_bots = self.db.get_bots_filter_sql(self.data_source, metric_filters) closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition dtables = Set([]) dfilters = Set([]) if (days > 0): dtables.add("(SELECT MAX(changed_on) as last_date from changes) t") dfilters.add("DATEDIFF (last_date, changed_on) < %s " % (days)) fields = Set([]) tables = Set([]) filters = Set([]) fields.add("up.uuid as id") fields.add("pro.name as closers") fields.add("COUNT(DISTINCT(ch.id)) as closed") tables.union_update(self.db.GetSQLReportFrom(self.filters)) tables.add("people_uidentities pup") tables.add(self.db.identities_db + ".uidentities up") tables.add(self.db.identities_db + ".profiles pro") tables.add("issues i") tables.add("changes ch") tables.union_update(dtables) filters.union_update(self.db.GetSQLReportWhere(self.filters)) filters.add(closed_condition) filters.add("i.id = ch.issue_id") filters.add("ch.changed_by = pup.people_id") filters.add("pup.uuid = up.uuid") filters.add("pup.uuid = pro.uuid") filters.add("ch.changed_on >= " + startdate) filters.add("ch.changed_on < " + enddate) filters.union_update(dfilters) if len(filter_bots) > 0: filters.add(filter_bots) query = "select " + self.db._get_fields_query(fields) query = query + " from " + self.db._get_tables_query(tables) query = query + " where " + self.db._get_filters_query(filters) query = query + " GROUP BY pro.name ORDER BY closed DESC, closers LIMIT " + str( limit) if metric_filters is not None: self.filters = metric_filters_orig return self.db.ExecuteQuery(query)
def get_agg(self): fields = Set([]) tables = Set([]) filters = Set([]) fields.add("TIMESTAMPDIFF(SECOND, i.submitted_on, t1.changed_on) as timeto") tables.add("issues i") tables.union_update(self.db.GetSQLReportFrom(self.filters)) closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition # TODO: if RESOLVED and CLOSED appear in the same period of study, this # will affect the total time to close the issue. Both timeframes will # be taken into account. table_extra = "(select issue_id, changed_on from changes where "+closed_condition+" and changed_on < "+self.filters.enddate+" and changed_on >= "+self.filters.startdate+") t1" tables.add(table_extra) filters.add("i.id=t1.issue_id") filters.union_update(self.db.GetSQLReportWhere(self.filters)) query = "select " + self.db._get_fields_query(fields) query = query + " from " + self.db._get_tables_query(tables) query = query + " where " + self.db._get_filters_query(filters) return self.db.ExecuteQuery(query)
def get_list(self): from vizgrimoire.data_source import DataSource from vizgrimoire.filter import Filter startdate = self.filters.startdate enddate = self.filters.enddate closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition bots = DataSource.get_filter_bots(Filter("domain")) fbots = '' for bot in bots: fbots += " dom.name<>'" + bot + "' and " tables = Set([]) filters = Set([]) tables.union_update(self.db.GetTablesDomains(self.db.identities_db)) tables_str = self.db._get_tables_query(tables) filters.union_update(self.db.GetFiltersDomains()) filters_str = self.db._get_filters_query(filters) q = "SELECT DISTINCT(SUBSTR(email,LOCATE('@',email)+1)) as domain "+\ "FROM "+ tables_str + " "+\ "WHERE " + filters_str +" AND "+\ " "+ fbots +" "+\ " c.changed_on >= "+ startdate+ " AND "+\ " c.changed_on < "+ enddate+ " AND "+\ " "+ closed_condition+" "+\ "GROUP BY domain "+\ "ORDER BY COUNT(DISTINCT(c.issue_id)) DESC, domain LIMIT " + str(Metrics.domains_limit) data = self.db.ExecuteQuery(q) data['name'] = data.pop('domain') return (data)
def _get_top_repository (self, metric_filters, days = None): startdate = metric_filters.startdate enddate = metric_filters.enddate repo_name = metric_filters.type_analysis[1] limit = metric_filters.npeople filter_bots = self.db.get_bots_filter_sql(self.data_source, metric_filters) closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition if filter_bots != '': filter_bots = " AND " + filter_bots dtables = dfilters = "" if (days > 0): dtables = ", (SELECT MAX(changed_on) as last_date from changes) t " dfilters = " AND DATEDIFF (last_date, changed_on) < %s " % (days) q = "SELECT up.uuid as id, up.identifier as closers, "+\ "COUNT(DISTINCT(i.id)) as closed "+\ "FROM issues i, changes c, trackers t, people_uidentities pup, " +\ " "+self.db.identities_db+".uidentities up "+ dtables + \ "WHERE "+closed_condition+" "+\ " AND pup.uuid = up.uuid "+\ " AND c.changed_by = pup.people_id "+\ " AND c.issue_id = i.id "+\ " AND i.tracker_id = t.id "+\ " AND t.url = "+repo_name+" "+\ " AND changed_on >= "+startdate+" AND changed_on < " +enddate +\ " " + filter_bots + " " + dfilters + \ " GROUP BY up.identifier ORDER BY closed DESC, closers LIMIT " + str(limit) data = self.db.ExecuteQuery(q) return (data)
def __get_sql_default__(self, evolutionary, close = False): # closed_condition = ITS._get_closed_condition() fields = Set([]) tables = Set([]) filters = Set([]) fields.add("count(distinct(pup.uuid)) as changers") tables.add("issues i") tables.add("changes ch") tables.union_update(self.db.GetSQLReportFrom(self.filters)) filters.add("i.id = ch.issue_id") if close: fields = Set([]) fields.add("count(distinct(pup.uuid)) as closers") closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition filters.add(closed_condition) filters.union_update(self.db.GetSQLReportWhere(self.filters)) #unique identities filters tables.add("people_uidentities pup") filters.add("i.submitted_by = pup.people_id") query = self.db.BuildQuery(self.filters.period, self.filters.startdate, self.filters.enddate, " ch.changed_on ", fields, tables, filters, evolutionary, self.filters.type_analysis) #Action needed to replace issues filters by changes one query = query.replace("i.submitted", "ch.changed") return query
def get_list(self): from vizgrimoire.data_source import DataSource from vizgrimoire.filter import Filter startdate = self.filters.startdate enddate = self.filters.enddate closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition bots = DataSource.get_filter_bots(Filter("domain")) fbots = '' for bot in bots: fbots += " dom.name<>'"+bot+"' and " tables = Set([]) filters = Set([]) tables.union_update(self.db.GetTablesDomains(self.db.identities_db)) tables.add(self.db.identities_db + ".domains dom") tables_str = self.db._get_tables_query(tables) filters.union_update(self.db.GetFiltersDomains()) filters_str = self.db._get_filters_query(filters) q = "SELECT dom.name "+\ "FROM "+ tables_str + " "+\ "WHERE " + filters_str +" AND "+\ " dom.id = upd.domain_id and "+\ " "+ fbots +" "+\ " c.changed_on >= "+ startdate+ " AND "+\ " c.changed_on < "+ enddate+ " AND "+\ " "+ closed_condition+" "+\ "GROUP BY dom.name "+\ "ORDER BY COUNT(DISTINCT(c.issue_id)) DESC LIMIT " + str(Metrics.domains_limit) data = self.db.ExecuteQuery(q) return (data)
def get_list (self): # Projects activity needs to include subprojects also logging.info ("Getting projects list for ITS") from vizgrimoire.metrics.metrics_filter import MetricFilters q = "SELECT p.id AS name FROM %s.projects p" % (self.db.projects_db) projects = self.db.ExecuteQuery(q) data = [] # Loop all projects getting reviews for project in projects['name']: type_analysis = ['project', project] period = None filter_com = MetricFilters(period, self.filters.startdate, self.filters.enddate, type_analysis) mclosed = ITS.get_metrics("closed", ITS) mclosed.filters = filter_com issues = mclosed.get_agg() issues = issues['closed'] if (issues > 0): data.append([issues,project]) # Order the list using reviews: https://wiki.python.org/moin/HowTo/Sorting from operator import itemgetter data_sort = sorted(data, key=itemgetter(0),reverse=True) names = [name[1] for name in data_sort] return({"name":names})
def projects_efficiency(opts, people_out, affs_out): # BMI and time to review in mean per general project scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities, opts.dbprojects) scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities, opts.dbprojects) its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities, opts.dbprojects) bmi_list = [] time2review_list = [] bmi_its = [] period = "month" releases = opts.releases.split(",")[-2:] startdate = "'"+releases[0]+"'" enddate = "'"+releases[1]+"'" type_analysis = None project_filters = MetricFilters(period, startdate, enddate, type_analysis, 10, people_out, affs_out) scr_bmi = scr.BMISCR(scr_dbcon, project_filters) time2review = scr.TimeToReview(scr_dbcon, project_filters) # ITS BMI index from vizgrimoire.ITS import ITS ITS.set_backend("jira") opened = its.Opened(its_dbcon, project_filters) closed = its.Closed(its_dbcon, project_filters) tickets_opened = opened.get_agg()["opened"] tickets_closed = closed.get_agg()["closed"] its_bmi = 0 if tickets_closed > 0: its_bmi = round(float(tickets_closed)/float(tickets_opened), 2) bmi_list.append(round(scr_bmi.get_agg()["bmiscr"], 2)) time2review_list.append(round(time2review.get_agg()["review_time_days_median"], 2)) bmi_its.append(its_bmi) createCSV({"projects":"general", "bmi":bmi_list, "timereview":time2review_list, "bmiits":bmi_its}, "./release/integrated_projects_efficiency.csv")
def result(self, data_source = None): from vizgrimoire.ITS import ITS if data_source is not None and data_source != ITS: return None period = self.filters.period startdate = self.filters.startdate enddate = self.filters.enddate idb = self.db.identities_db from vizgrimoire.ITS import ITS backend = ITS._get_backend() self.CreateViews() time_to_response = self.ticketsTimeToResponse(period, startdate, enddate, idb, backend) time_from_opened = self.ticketsTimeOpened(period, startdate, enddate, idb, backend) return dict(time_to_response.items() + time_from_opened.items())
def _get_sql(self, evolutionary): """ Implemented using Changers (changed metric should exists first) """ close = True changers = ITS.get_metrics("changers", ITS) if changers is None: # We need to create changers metric changers = Changers(self.db, self.filters) q = changers._get_sql(evolutionary, close) else: cfilters = changers.filters changers.filters = self.filters q = changers._get_sql(evolutionary, close) changers.filters = cfilters return q
def _get_sql(self, evolutionary): """ Implemented using Changed """ close = True changed = ITS.get_metrics("changed", ITS) if changed is None: # We need to create changers metric changed = Changed(self.db, self.filters) q = changed._get_sql(evolutionary, close) else: cfilters = changed.filters changed.filters = self.filters q = changed._get_sql(evolutionary, close) changed.filters = cfilters return q
def __get_sql_trk_prj__(self, evolutionary, close=False): # First get changers and then join with people_upeople fields = Set([]) tables = Set([]) filters = Set([]) # TODO: double check this, it does not make sense that distinct without a group action fields.add("distinct(changed_by) as cpeople, changed_on") #fields.add("changed_on") tables.add("issues i") tables.add("changes ch") tables.union_update(self.db.GetSQLReportFrom(self.filters)) filters.add("i.id = ch.issue_id") if close: closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition filters.add(closed_condition) filters.union_update(self.db.GetSQLReportWhere(self.filters)) tpeople_sql = "select " + self.db._get_fields_query(fields) tpeople_sql = tpeople_sql + " from " + self.db._get_tables_query( tables) tpeople_sql = tpeople_sql + " where " + self.db._get_filters_query( filters) fields = Set([]) tables = Set([]) filters = Set([]) fields.add("count(distinct(pup.uuid)) as changers") tables.add("people_uidentities pup") tables_str = "(%s) tpeople " % (tpeople_sql) tables.add(tables_str) filters.add("tpeople.cpeople = pup.people_id") if close: fields = Set([]) fields.add("count(distinct(pup.uuid)) as closers") query = self.db.BuildQuery(self.filters.period, self.filters.startdate, self.filters.enddate, " tpeople.changed_on ", fields, tables, filters, evolutionary, self.filters.type_analysis) return query
def _get_top_global(self, days=0, metric_filters=None): """ Implemented using Openers """ top = None openers = ITS.get_metrics("stories_openers", ITS) if openers is None: openers = StoriesOpeners(self.db, self.filters) top = openers._get_top(days, metric_filters) else: afilters = openers.filters openers.filters = self.filters top = openers._get_top(days, metric_filters) openers.filters = afilters top['name'] = top.pop('openers') return top
def _get_top_global(self, days=0, metric_filters=None): """ Implemented using Closers """ top = None closers = ITS.get_metrics("closers", ITS) if closers is None: closers = Closers(self.db, self.filters) top = closers._get_top(days, metric_filters) else: afilters = closers.filters closers.filters = self.filters top = closers._get_top(days, metric_filters) closers.filters = afilters top['name'] = top.pop('closers') return top
def __get_sql_trk_prj__(self, evolutionary, close=False): """ First get the issues filtered and then join with changes. Optimization for projects and trackers """ fields = Set([]) tables = Set([]) filters = Set([]) fields.add("i.id as id") tables.add("issues i") tables.union_update(self.db.GetSQLReportFrom(self.filters)) filters.union_update(self.db.GetSQLReportWhere(self.filters, "issues")) issues_sql = "select " + self.db._get_fields_query(fields) issues_sql = issues_sql + " from " + self.db._get_tables_query(tables) issues_sql = issues_sql + " where " + self.db._get_filters_query( filters) #Action needed to replace issues filters by changes one # issues_sql = issues_sql.replace("i.submitted", "ch.changed") # closed_condition = ITS._get_closed_condition() fields = Set([]) tables = Set([]) filters = Set([]) fields.add("count(distinct(t.id)) as changed") tables.add("changes ch") tables.add("(%s) t" % (issues_sql)) filters.add("t.id = ch.issue_id") if close: closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition fields = Set([]) fields.add("count(distinct(t.id)) as closed") filters.add(closed_condition) query = self.db.BuildQuery(self.filters.period, self.filters.startdate, self.filters.enddate, " ch.changed_on ", fields, tables, filters, evolutionary, self.filters.type_analysis) return query
def GetClosedSummaryCompanies(period, startdate, enddate, identities_db, closed_condition, num_organizations): from vizgrimoire.ITS import ITS count = 1 first_organizations = {} metric = DataSource.get_metrics("organizations", ITS) organizations = metric.get_list() organizations = organizations['name'] for company in organizations: type_analysis = ["company", "'" + company + "'"] filter_com = MetricFilters(period, startdate, enddate, type_analysis) mclosed = ITS.get_metrics("closed", ITS) mclosed.filters = filter_com closed = mclosed.get_ts() # Rename field closed to company name closed[company] = closed["closed"] del closed['closed'] if (count <= num_organizations): #Case of organizations with entity in the dataset first_organizations = dict(first_organizations.items() + closed.items()) else: #Case of organizations that are aggregated in the field Others if 'Others' not in first_organizations: first_organizations['Others'] = closed[company] else: first_organizations['Others'] = [ a + b for a, b in zip(first_organizations['Others'], closed[company]) ] count = count + 1 first_organizations = completePeriodIds(first_organizations, period, startdate, enddate) return (first_organizations)
def get_list(self): from vizgrimoire.data_source import DataSource from vizgrimoire.filter import Filter bots = DataSource.get_filter_bots(Filter("company")) fbots = '' for bot in bots: fbots += " org.name<>'"+bot+"' and " startdate = self.filters.startdate enddate = self.filters.enddate closed_condition = ITS._get_closed_condition() if self.filters.closed_condition is not None: closed_condition = self.filters.closed_condition # list each of the organizations analyzed # those are order by number of closed issues q = "select org.name "+\ "from issues i, "+\ " changes ch, "+\ " people_uidentities pup, "+\ " "+ self.db.identities_db+ ".enrollments enr, "+\ " "+ self.db.identities_db+ ".organizations org "+\ "where i.id = ch.issue_id and "+\ " ch.changed_by = pup.people_id and "+\ " pup.uuid = enr.uuid and "+\ " enr.organization_id = org.id and "+\ " ch.changed_on >= "+ startdate+ " and "+\ " ch.changed_on < "+ enddate+" and "+\ " i.submitted_on >= enr.start and "+\ " i.submitted_on < enr.end and "+\ " "+ fbots +\ closed_condition +\ " group by org.name "+\ " order by count(distinct(i.id)) desc" data = self.db.ExecuteQuery(q) return (data)
def result(self, data_source = None): # FIXME: this import is needed to get the list of # states available on the tracker. This should be moved # to configuration file to let the user choose among states. from vizgrimoire.ITS import ITS if data_source is not None and data_source != ITS: return None backend = ITS._get_backend() if backend.its_type == 'bg': backend_type = 'bugzilla' else: backend_type = backend.its_type states = self.get_state_types(backend_type) backlog = self.get_backlog(states, backend_type) current_states = self.get_current_states(states) data = dict(backlog.items() + current_states.items()) prep_data = {} # Capitalize first letter to avoid collision with other metrics capitalize = lambda s: s[0].upper() + s[1:] for k in data: if k in ['id', 'date', 'month', 'unixtime']: prep_data[k] = data[k] else: if k.startswith('current_'): state = capitalize(k.partition('_')[2]) state = 'current_' + state else: state = capitalize(k) prep_data[state] = data[k] return prep_data
def result(self, data_source=None, destdir=None): if data_source != SCM or destdir is None: return None automator = Report.get_config() db_identities = automator["generic"]["db_identities"] dbuser = automator["generic"]["db_user"] dbpass = automator["generic"]["db_password"] start_date = automator['r']['start_date'] if 'end_date' not in automator['r']: end_date = time.strftime('%Y-%m-%d') else: end_date = automator['r']['end_date'] start_year = int(start_date.split("-")[0]) end_year = int(end_date.split("-")[0]) activity = {} activity['name'] = [] # Commits data = self.db.ExecuteQuery(self.get_sql_commits()) activity = self.add_organizations_data(activity, data) self.add_metric_years("commits", activity, start_year, end_year) # Authors data = self.db.ExecuteQuery(self.get_sql_authors()) activity = self.add_organizations_data(activity, data) self.add_metric_years("authors", activity, start_year, end_year) # Committers data = self.db.ExecuteQuery(self.get_sql_committers()) activity = self.add_organizations_data(activity, data) self.add_metric_years("committers", activity, start_year, end_year) # Committers active: only valid for today data = self.db.ExecuteQuery(self.get_sql_committers(None, True)) data = self._convert_dict_field(data, "committers_active", "committers-active") activity = self.add_organizations_data(activity, data) # Committers inactive: only valid for today activity['committers-inactive'] = \ [ activity['committers'][i] - activity['committers-active'][i] \ for i in range(0, len(activity['committers']))] activity['committers-percent-active'] = [] for i in range(0, len(activity['committers'])): if activity['committers'][i] == 0: activity['committers-percent-active'].append(100) else: activity['committers-percent-active'].append(\ (activity['committers-active'][i]*100) / activity['committers'][i]) # Actions data = self.db.ExecuteQuery(self.get_sql_actions()) activity = self.add_organizations_data(activity, data) self.add_metric_years("actions", activity, start_year, end_year) # Source lines of code added data = self.db.ExecuteQuery(self.get_sql_lines_added()) data = self._convert_dict_field(data, "lines_added", "lines-added") activity = self.add_organizations_data(activity, data) self.add_metric_years("lines-added", activity, start_year, end_year) # Source lines of code removed data = self.db.ExecuteQuery(self.get_sql_lines_removed()) data = self._convert_dict_field(data, "lines_removed", "lines-removed") activity = self.add_organizations_data(activity, data) self.add_metric_years("lines-removed", activity, start_year, end_year) # Source lines of code total (added+removed) data = self.db.ExecuteQuery(self.get_sql_lines_total()) data = self._convert_dict_field(data, "lines_total", "lines-total") activity = self.add_organizations_data(activity, data) self.add_metric_years("lines-total", activity, start_year, end_year) # Lines per commit self.add_metric_lines_commit(activity, start_year, end_year) # We need to change the db to tickets dbname = automator["generic"]["db_bicho"] dsquery = ITS.get_query_builder() dbcon = dsquery(dbuser, dbpass, dbname, db_identities) self.db = dbcon # Tickets opened data = self.db.ExecuteQuery(self.get_sql_opened()) activity = self.add_organizations_data(activity, data) self.add_metric_years("opened", activity, start_year, end_year) # Tickets closed data = self.db.ExecuteQuery(self.get_sql_closed()) activity = self.add_organizations_data(activity, data) self.add_metric_years("closed", activity, start_year, end_year) # Messages sent dbname = automator["generic"]["db_mlstats"] dsquery = MLS.get_query_builder() dbcon = dsquery(dbuser, dbpass, dbname, db_identities) self.db = dbcon data = self.db.ExecuteQuery(self.get_sql_sent()) activity = self.add_organizations_data(activity, data) self.add_metric_years("sent", activity, start_year, end_year) createJSON(activity, destdir + "/organizations-activity.json") logging.info(destdir + "/organizations-activity.json created")
def projects_efficiency(opts, people_out, affs_out): # BMI and time to review in mean per general project scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities, opts.dbprojects) scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities, opts.dbprojects) its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities, opts.dbprojects) projects = integrated_projects(scm_dbcon) projects_ids = projects["subproject_id"] projects_list = [] bmi_list = [] time2review_list = [] bmi_its = [] period = "month" releases = opts.releases.split(",")[-2:] startdate = "'" + releases[0] + "'" enddate = "'" + releases[1] + "'" for project_id in projects_ids: project_title = "'" + project_id + "'" type_analysis = ["project", project_id] project_filters = MetricFilters(period, startdate, enddate, type_analysis, 10, people_out, affs_out) scr_bmi = scr.BMISCR(scr_dbcon, project_filters) time2review = scr.TimeToReview(scr_dbcon, project_filters) # ITS BMI index from vizgrimoire.ITS import ITS ITS.set_backend("launchpad") if project_id == 'Documentation': ITS._get_backend( ).closed_condition = "(new_value='Fix Committed' or new_value='Fix Released')" else: ITS.closed_condition = "(new_value='Fix Committed')" opened = its.Opened(its_dbcon, project_filters) closed = its.Closed(its_dbcon, project_filters) tickets_opened = opened.get_agg()["opened"] tickets_closed = closed.get_agg()["closed"] its_bmi = 0 if tickets_closed > 0: its_bmi = round(float(tickets_closed) / float(tickets_opened), 2) projects_list.append(project_id) bmi_list.append(round(scr_bmi.get_agg()["bmiscr"], 2)) time2review_list.append( round(time2review.get_agg()["review_time_days_median"], 2)) bmi_its.append(its_bmi) createCSV( { "projects": projects_list, "bmi": bmi_list, "timereview": time2review_list, "bmiits": bmi_its }, "./release/integrated_projects_efficiency.csv")
project_sloc = float(sonar_metrics["ncloc"]) print "Project: " + str(project) print "NCLOC: " + str(project_sloc) filters = MetricFilters("month", startdate, enddate, ["project", "'" + project + "'"], opts.npeople, people_out, affs_out) #SCM report scm_dbcon = SCMQuery(opts.dbuser, opts.dbpassword, opts.dbcvsanaly, opts.dbidentities) data.update(scm_report(scm_dbcon, filters, project_sloc)) #ITS report ITS.set_backend("bg") its_dbcon = ITSQuery(opts.dbuser, opts.dbpassword, opts.dbbicho, opts.dbidentities) data.update(its_report(its_dbcon, filters, project_sloc)) #SCR Report #scr_dbcon = SCRQuery(opts.dbuser, opts.dbpassword, opts.dbreview, opts.dbidentities) #data["scr"] = scr_report(scr_dbcon, filters) #MLS Report mls_dbcon = MLSQuery(opts.dbuser, opts.dbpassword, opts.dbmlstats, opts.dbidentities) data.update(mls_report(mls_dbcon, filters, project_sloc)) #FUDFORUMS Report fudforums_dbcon = MLSQuery(opts.dbuser, opts.dbpassword, opts.dbfudforums, opts.dbidentities)
tracker = filters.type_analysis[1] tracker = tracker.replace("'", "") name = tracker.split("/")[-1:][0] createJSON(data, name + "-scr-static.json") if __name__ == '__main__': # PYTHONPATH=./:../../:../analysis/:../ python scr_jira_metrics.py filters = MetricFilters("month", "'2008-10-20'", "'2014-10-01'", []) dbcon = ITSQuery("root", "", "lcanas_bicho_gerrit_liferay_4444", "lcanas_cvsanaly_liferay_4444") create_json(dbcon, filters) ITS.set_backend("jira") ITS._get_backend().closed_condition = " i.status = 'Closed' " #per tracker data trackers = Trackers(dbcon, filters) trackers_list = trackers.get_list() trackers_names = [] for tracker in trackers_list["name"]: tracker_name = tracker.split("/")[-1:][0] trackers_names.append(tracker_name) tracker_str = "'" + tracker + "'" filters = MetricFilters("month", "'2008-10-20'", "'2014-10-01'", ['repository', tracker_str]) create_json(dbcon, filters) createJSON({"name": trackers_names}, "scr-repos.json")