def aggData(period, startdate, enddate, identities_db, destdir): # data = vizr.StaticMLSInfo(period, startdate, enddate, identities_db, rfield) # agg = dataFrame2Dict(data) data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield) agg = data if ('companies' in reports): data = MLS.AggMLSCompanies(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('countries' in reports): data = MLS.AggMLSCountries(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('domains' in reports): data = MLS.AggMLSDomains(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) # Tendencies for i in [7,30,365]: # period_data = dataFrame2Dict(vizr.GetDiffSentDays(period, enddate, i)) period_data = MLS.GetDiffSentDays(period, enddate, i) agg = dict(agg.items() + period_data.items()) period_data = MLS.GetDiffSendersDays(period, enddate, i) agg = dict(agg.items() + period_data.items()) # Last Activity: to be removed for i in [7,14,30,60,90,180,365,730]: period_activity = MLS.lastActivity(i) agg = dict(agg.items() + period_activity.items()) createJSON (agg, destdir+"/mls-static.json")
def topData(period, startdate, enddate, idb, destdir, bots, npeople): top_reviewers = {} # top_reviewers['reviewers'] = dataFrame2Dict(vizr.GetTopReviewersSCR(0, startdate, enddate, idb, bots)) top_reviewers['reviewers'] = SCR.GetTopReviewersSCR( 0, startdate, enddate, idb, bots, npeople) top_reviewers['reviewers.last year'] = SCR.GetTopReviewersSCR( 365, startdate, enddate, idb, bots, npeople) top_reviewers['reviewers.last month'] = SCR.GetTopReviewersSCR( 31, startdate, enddate, idb, bots, npeople) # Top openers top_openers = {} top_openers['openers.'] = SCR.GetTopOpenersSCR(0, startdate, enddate, idb, bots, npeople) top_openers['openers.last year'] = SCR.GetTopOpenersSCR( 365, startdate, enddate, idb, bots, npeople) top_openers['openers.last_month'] = SCR.GetTopOpenersSCR( 31, startdate, enddate, idb, bots, npeople) # Top mergers top_mergers = {} top_mergers['mergers.last year'] = SCR.GetTopMergersSCR( 365, startdate, enddate, idb, bots, npeople) top_mergers['mergers.'] = SCR.GetTopMergersSCR(0, startdate, enddate, idb, bots, npeople) top_mergers['mergers.last_month'] = SCR.GetTopMergersSCR( 31, startdate, enddate, idb, bots, npeople) # The order of the list item change so we can not check it top_all = dict(top_reviewers.items() + top_openers.items() + top_mergers.items()) createJSON(top_all, destdir + "/scr-top.json", False) return (top_all)
def tsData(period, startdate, enddate, identities_db, destdir, granularity, conf, backend): closed_condition = backend.closed_condition # data = vizr.EvolITSInfo(period, startdate, enddate, identities_db, closed_condition = closed_condition) # evol = completePeriodIds(dataFrame2Dict(data)) data = ITS.EvolITSInfo(period, startdate, enddate, identities_db, [], closed_condition) evol = completePeriodIds(data) if ('companies' in reports) : data = ITS.EvolIssuesCompanies(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('countries' in reports) : data = ITS.EvolIssuesCountries(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('repositories' in reports) : data = ITS.EvolIssuesRepositories(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('domains' in reports) : data = ITS.EvolIssuesDomains(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) data = ticketsStates(period, startdate, enddate, identities_db, backend) evol = dict(evol.items() + data.items()) createJSON (evol, destdir+"/its-evolutionary.json")
def topData(period, startdate, enddate, identities_db, destdir, bots, closed_condition, npeople): # Top closers top_closers_data = {} # top_closers_data['closers.']=dataFrame2Dict(vizr.GetTopClosers(0, startdate, enddate,identities_db, bots, closed_condition)) top_closers_data['closers.'] = ITS.GetTopClosers(0, startdate, enddate, identities_db, bots, closed_condition, npeople) top_closers_data['closers.last year'] = ITS.GetTopClosers( 365, startdate, enddate, identities_db, bots, closed_condition, npeople) top_closers_data['closers.last month'] = ITS.GetTopClosers( 31, startdate, enddate, identities_db, bots, closed_condition, npeople) # Top openers top_openers_data = {} top_openers_data['openers.'] = ITS.GetTopOpeners(0, startdate, enddate, identities_db, bots, closed_condition, npeople) top_openers_data['openers.last year'] = ITS.GetTopOpeners( 365, startdate, enddate, identities_db, bots, closed_condition, npeople) top_openers_data['openers.last month'] = ITS.GetTopOpeners( 31, startdate, enddate, identities_db, bots, closed_condition, npeople) all_top = dict(top_closers_data.items() + top_openers_data.items()) createJSON(all_top, destdir + "/its-top.json", False) return all_top
def create_projects_json(destdir, name): """Create the projects_hierarchy.json to be used in the dash""" import_grimoirelib(destdir) import report, GrimoireSQL from GrimoireUtils import createJSON logging.info("Creating projects_hierarchy.json file ") automator_file = os.path.join(destdir,"conf/main.conf") automator = read_main_conf(automator_file) db_user = automator['generic']['db_user'] db_password = automator['generic']['db_password'] db_name = automator['generic']['db_identities'] GrimoireSQL.SetDBChannel (database=db_name, user=db_user, password=db_password) # JSON entry #"mylyn.tasks": { # "parent_project": "mylyn", # "title": "Mylyn Tasks" # } # In the current implementation just one leve, all "parent_project":"root" q = "SELECT id, title from projects" res = GrimoireSQL.ExecuteQuery(q) projects = {} for i in range(0,len(res['id'])): projects[res['id'][i]] = {"parent_project":"root","title":res['title'][i]} projects["root"] = {"title": name} createJSON(projects, "projects_hierarchy.json") logging.info("projects_hierarchy.json created.")
def reposData(period, startdate, enddate, identities_db, destdir, conf, closed_condition): # repos = dataFrame2Dict(vizr.GetReposNameITS(startdate, enddate)) repos = ITS.GetReposNameITS(startdate, enddate) repos = repos['name'] if not isinstance(repos, (list)): repos = [repos] createJSON(repos, destdir + "/its-repos.json", False) else: createJSON(repos, destdir + "/its-repos.json") for repo in repos: repo_name = "'" + repo + "'" repo_file = repo.replace("/", "_") evol = ITS.EvolITSInfo(period, startdate, enddate, identities_db, ['repository', repo_name], closed_condition) evol = completePeriodIds(evol) if (repo_file == "http:__tracker.ceph.com_projects_rados-java_"): createJSON( evol, destdir + "/" + repo_file + "-its-rep-evolutionary.json", False) else: createJSON( evol, destdir + "/" + repo_file + "-its-rep-evolutionary.json") agg = ITS.AggITSInfo(period, startdate, enddate, identities_db, ['repository', repo_name], closed_condition) createJSON(agg, destdir + "/" + repo_file + "-its-rep-static.json")
def tsData(period, startdate, enddate, identities_db, destdir, granularity, conf, backend): closed_condition = backend.closed_condition # data = vizr.EvolITSInfo(period, startdate, enddate, identities_db, closed_condition = closed_condition) # evol = completePeriodIds(dataFrame2Dict(data)) data = ITS.EvolITSInfo(period, startdate, enddate, identities_db, [], closed_condition) evol = completePeriodIds(data) if ('companies' in reports): data = ITS.EvolIssuesCompanies(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('countries' in reports): data = ITS.EvolIssuesCountries(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('repositories' in reports): data = ITS.EvolIssuesRepositories(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('domains' in reports): data = ITS.EvolIssuesDomains(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) data = ticketsStates(period, startdate, enddate, identities_db, backend) evol = dict(evol.items() + data.items()) createJSON(evol, destdir + "/its-evolutionary.json")
def aggData(period, startdate, enddate, identities_db, destdir): # data = vizr.StaticMLSInfo(period, startdate, enddate, identities_db, rfield) # agg = dataFrame2Dict(data) data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield) agg = data if ('companies' in reports): data = MLS.AggMLSCompanies(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('countries' in reports): data = MLS.AggMLSCountries(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('domains' in reports): data = MLS.AggMLSDomains(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) # Tendencies for i in [7, 30, 365]: # period_data = dataFrame2Dict(vizr.GetDiffSentDays(period, enddate, i)) period_data = MLS.GetDiffSentDays(period, enddate, i) agg = dict(agg.items() + period_data.items()) period_data = MLS.GetDiffSendersDays(period, enddate, i) agg = dict(agg.items() + period_data.items()) # Last Activity: to be removed for i in [7, 14, 30, 60, 90, 180, 365, 730]: period_activity = MLS.lastActivity(i) agg = dict(agg.items() + period_activity.items()) createJSON(agg, destdir + "/mls-static.json")
def countriesData(period, startdate, enddate, identities_db, destdir, npeople): countries = MLS.countriesNames(identities_db, startdate, enddate) createJSON(countries, destdir + "/mls-countries.json") for country in countries: country_name = "'" + country + "'" type_analysis = ["country", country_name] data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) data = completePeriodIds(data) if (country == "country5" or country == "country2"): # Wrong JSON generated in R. Don't check createJSON(data, destdir + "/" + country + "-mls-cou-evolutionary.json", False) else: createJSON(data, destdir + "/" + country + "-mls-cou-evolutionary.json") top_senders = MLS.countryTopSenders(country, identities_db, startdate, enddate, npeople) createJSON(top_senders, destdir + "/" + country + "-mls-cou-top-senders.json") data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) createJSON(data, destdir + "/" + country + "-mls-cou-static.json")
def domainsData(period, startdate, enddate, identities_db, destdir, npeople): domains = MLS.domainsNames(identities_db, startdate, enddate) createJSON(domains, destdir + "/mls-domains.json") for domain in domains: domain_name = "'" + domain + "'" type_analysis = ["domain", domain_name] data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) data = completePeriodIds(data) if (domain == "everybody" or domain == "hallowelt"): # Wrong JSON generated in R. Don't check createJSON(data, destdir + "/" + domain + "-mls-dom-evolutionary.json", False) else: createJSON(data, destdir + "/" + domain + "-mls-dom-evolutionary.json") data = MLS.domainTopSenders(domain, identities_db, startdate, enddate, npeople) createJSON(data, destdir + "/" + domain + "-mls-dom-top-senders.json", False) data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) createJSON(data, destdir + "/" + domain + "-mls-dom-static.json")
def topData(period, startdate, enddate, identities_db, destdir, bots, npeople): top_authors = {} top_authors['authors.'] = Mediawiki.GetTopAuthorsMediaWiki(0, startdate, enddate, identities_db, bots, npeople) top_authors['authors.last year']= Mediawiki.GetTopAuthorsMediaWiki(365, startdate, enddate, identities_db, bots, npeople) top_authors['authors.last month']= Mediawiki.GetTopAuthorsMediaWiki(31, startdate, enddate, identities_db, bots, npeople) createJSON (top_authors, destdir+"/mediawiki-top.json") return(top_authors)
def topData(period, startdate, enddate, identities_db, destdir, bots, npeople): top_senders_data = {} top_senders_data['senders.']=MLS.top_senders(0, startdate, enddate,identities_db,bots, npeople) top_senders_data['senders.last year']=MLS.top_senders(365, startdate, enddate,identities_db, bots, npeople) top_senders_data['senders.last month']=MLS.top_senders(31, startdate, enddate,identities_db,bots, npeople) createJSON (top_senders_data, destdir+"/mls-top.json", False) return top_senders_data
def tsData(period, startdate, enddate, idb, destdir, granularity, conf): evol = {} # data = vizr.EvolReviewsSubmitted(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = SCR.EvolReviewsSubmitted(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsOpened(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsNew(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsNewChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # data = SCR.EvolReviewsInProgress(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsClosed(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMerged(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMergedChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandoned(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandonedChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # TODO: We can not use this R API because Python conf can't be pass to R # data = dataFrame2Dict(vizr.EvolReviewsPendingChanges(period, startdate, enddate, conf)) # data = SCR.EvolReviewsPendingChanges(period, startdate, enddate, conf, []) data = SCR.EvolReviewsPending(period, startdate, enddate, conf, []) evol = dict(evol.items() + completePeriodIds(data).items()) #Patches info data = SCR.EvolPatchesVerified(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # data = SCR.EvolPatchesApproved(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolPatchesCodeReview(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolPatchesSent(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) #Waiting for actions info data = SCR.EvolWaiting4Reviewer(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolWaiting4Submitter(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) #Reviewers info data = SCR.EvolReviewers(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # Time to Review info if period == "month": # only month supported now data = SCR.EvolTimeToReviewSCR(period, startdate, enddate) for i in range(0, len(data['review_time_days_avg'])): val = data['review_time_days_avg'][i] data['review_time_days_avg'][i] = float(val) if (val == 0): data['review_time_days_avg'][i] = 0 evol = dict(evol.items() + completePeriodIds(data).items()) # Create JSON createJSON(evol, destdir + "/scr-evolutionary.json")
def tsData(period, startdate, enddate, idb, destdir, granularity, conf): evol = {} # data = vizr.EvolReviewsSubmitted(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = SCR.EvolReviewsSubmitted(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsOpened(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsNew(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsNewChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # data = SCR.EvolReviewsInProgress(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsClosed(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMerged(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMergedChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandoned(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandonedChanges(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # TODO: We can not use this R API because Python conf can't be pass to R # data = dataFrame2Dict(vizr.EvolReviewsPendingChanges(period, startdate, enddate, conf)) # data = SCR.EvolReviewsPendingChanges(period, startdate, enddate, conf, []) data = SCR.EvolReviewsPending(period, startdate, enddate, conf, []) evol = dict(evol.items() + completePeriodIds(data).items()) #Patches info data = SCR.EvolPatchesVerified(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # data = SCR.EvolPatchesApproved(period, startdate, enddate) # evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolPatchesCodeReview(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolPatchesSent(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) #Waiting for actions info data = SCR.EvolWaiting4Reviewer(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolWaiting4Submitter(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) #Reviewers info data = SCR.EvolReviewers(period, startdate, enddate) evol = dict(evol.items() + completePeriodIds(data).items()) # Time to Review info if period == "month": # only month supported now data = SCR.EvolTimeToReviewSCR (period, startdate, enddate) for i in range(0,len(data['review_time_days_avg'])): val = data['review_time_days_avg'][i] data['review_time_days_avg'][i] = float(val) if (val == 0): data['review_time_days_avg'][i] = 0 evol = dict(evol.items() + completePeriodIds(data).items()) # Create JSON createJSON(evol, destdir+"/scr-evolutionary.json")
def topData(period, startdate, enddate, identities_db, destdir, bots, npeople): top_authors_data = {} top_authors_data['authors.'] = SCM.top_people(0, startdate, enddate, "author" , "" , npeople) top_authors_data['authors.last year']= SCM.top_people(365, startdate, enddate, "author", "", npeople) top_authors_data['authors.last month']= SCM.top_people(31, startdate, enddate, "author", "", npeople) createJSON (top_authors_data, destdir+"/scm-top.json") # Top files top_files_modified_data = SCM.top_files_modified() return top_authors_data
def topData(period, startdate, enddate, identities_db, destdir, bots, npeople): top_authors = {} top_authors['authors.'] = Mediawiki.GetTopAuthorsMediaWiki( 0, startdate, enddate, identities_db, bots, npeople) top_authors['authors.last year'] = Mediawiki.GetTopAuthorsMediaWiki( 365, startdate, enddate, identities_db, bots, npeople) top_authors['authors.last month'] = Mediawiki.GetTopAuthorsMediaWiki( 31, startdate, enddate, identities_db, bots, npeople) createJSON(top_authors, destdir + "/mediawiki-top.json") return (top_authors)
def companies_countriesData(period, startdate, enddate, identities_db, destdir): companies = SCM.companies_name(startdate, enddate) companies = companies['name'] for company in companies: company_name = "'"+company+ "'" countries = SCM.scm_countries_names(identities_db,startdate, enddate) countries = countries['name'] for country in countries : print (country, "=>", company) data = SCM.scm_companies_countries_evol(identities_db, company, country, nperiod, startdate, enddate) data = completePeriodIds(data) createJSON (data, destdir + "/"+company+"_"+country+"-scm-evolutionary.json", False)
def aggData(period, startdate, enddate, idb, destdir): # data = vizr.StaticReviewsSubmitted(period, startdate, enddate) # agg = dataFrame2Dict(data) agg = SCR.StaticReviewsSubmitted(period, startdate, enddate) data = SCR.StaticReviewsOpened(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsNew(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsInProgress(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsClosed(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsMerged(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsAbandoned(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsPending(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesVerified(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesApproved(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesCodeReview(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesSent(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticWaiting4Reviewer(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticWaiting4Submitter(period, startdate, enddate) agg = dict(agg.items() + data.items()) #Reviewers info data = SCR.StaticReviewers(period, startdate, enddate) agg = dict(agg.items() + data.items()) # Time to Review info data = SCR.StaticTimeToReviewSCR(startdate, enddate) data['review_time_days_avg'] = float(data['review_time_days_avg']) data['review_time_days_median'] = float(data['review_time_days_median']) agg = dict(agg.items() + data.items()) # Tendencies for i in [7, 30, 365]: period_data = SCR.GetSCRDiffSubmittedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffMergedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffPendingDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffAbandonedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) # Create JSON createJSON(agg, destdir + "/scr-static.json")
def aggData(period, startdate, enddate, idb, destdir): # data = vizr.StaticReviewsSubmitted(period, startdate, enddate) # agg = dataFrame2Dict(data) agg = SCR.StaticReviewsSubmitted(period, startdate, enddate) data = SCR.StaticReviewsOpened(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsNew(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsInProgress(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsClosed(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsMerged(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsAbandoned(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsPending(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesVerified(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesApproved(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesCodeReview(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticPatchesSent(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticWaiting4Reviewer(period, startdate, enddate) agg = dict(agg.items() + data.items()) data = SCR.StaticWaiting4Submitter(period, startdate, enddate) agg = dict(agg.items() + data.items()) #Reviewers info data = SCR.StaticReviewers(period, startdate, enddate) agg = dict(agg.items() + data.items()) # Time to Review info data = SCR.StaticTimeToReviewSCR(startdate, enddate) data['review_time_days_avg'] = float(data['review_time_days_avg']) data['review_time_days_median'] = float(data['review_time_days_median']) agg = dict(agg.items() + data.items()) # Tendencies for i in [7,30,365]: period_data = SCR.GetSCRDiffSubmittedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffMergedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffPendingDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) period_data = SCR.GetSCRDiffAbandonedDays(period, enddate, i, idb) agg = dict(agg.items() + period_data.items()) # Create JSON createJSON(agg, destdir+"/scr-static.json")
def topData(period, startdate, enddate, identities_db, destdir, bots, npeople): top_senders_data = {} top_senders_data['senders.'] = MLS.top_senders(0, startdate, enddate, identities_db, bots, npeople) top_senders_data['senders.last year'] = MLS.top_senders( 365, startdate, enddate, identities_db, bots, npeople) top_senders_data['senders.last month'] = MLS.top_senders( 31, startdate, enddate, identities_db, bots, npeople) createJSON(top_senders_data, destdir + "/mls-top.json", False) return top_senders_data
def domainsData(period, startdate, enddate, identities_db, destdir, closed_condition, bots, npeople): # domains = dataFrame2Dict(vizr.GetDomainsNameITS(startdate, enddate, identities_db, closed_condition, bots)) domains = ITS.GetDomainsNameITS(startdate, enddate, identities_db, closed_condition, bots) domains = domains['name'] createJSON(domains, destdir + "/its-domains.json") for domain in domains: domain_name = "'" + domain + "'" print(domain_name) evol = ITS.EvolITSInfo(period, startdate, enddate, identities_db, ['domain', domain_name], closed_condition) evol = completePeriodIds(evol) createJSON(evol, destdir + "/" + domain + "-its-dom-evolutionary.json") agg = ITS.AggITSInfo(period, startdate, enddate, identities_db, ['domain', domain_name], closed_condition) createJSON(agg, destdir + "/" + domain + "-its-dom-static.json") top = ITS.GetDomainTopClosers(domain_name, startdate, enddate, identities_db, bots, closed_condition, npeople) createJSON(top, destdir + "/" + domain + "-its-dom-top-closers.json", False)
def topData(period, startdate, enddate, idb, destdir, bots, npeople): top_senders = {} top_senders['senders.'] = \ IRC.GetTopSendersIRC(0, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(0, startdate, enddate, idb, bots)) top_senders['senders.last year'] = \ IRC.GetTopSendersIRC(365, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(365, startdate, enddate, idb, bots)) top_senders['senders.last month'] = \ IRC.GetTopSendersIRC(31, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(31, startdate, enddate, idb, bots)) top_file = destdir+"/irc-top.json" createJSON (top_senders, top_file) return(top_senders)
def topData(period, startdate, enddate, idb, destdir, bots, npeople): top_senders = {} top_senders['senders.'] = \ IRC.GetTopSendersIRC(0, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(0, startdate, enddate, idb, bots)) top_senders['senders.last year'] = \ IRC.GetTopSendersIRC(365, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(365, startdate, enddate, idb, bots)) top_senders['senders.last month'] = \ IRC.GetTopSendersIRC(31, startdate, enddate, idb, bots, npeople) # dataFrame2Dict(vizr.GetTopSendersIRC(31, startdate, enddate, idb, bots)) top_file = destdir + "/irc-top.json" createJSON(top_senders, top_file) return (top_senders)
def aggData(period, startdate, enddate, identities_db, destdir): # Tendencies agg = {} for i in [7,30,365]: # data = vizr.GetMediaWikiDiffReviewsDays(period, enddate, i) # agg = dict(agg.items() + data.items()) data = Mediawiki.GetMediaWikiDiffReviewsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = Mediawiki.GetMediaWikiDiffAuthorsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = Mediawiki.GetStaticDataMediaWiki(period, startdate, enddate, identities_db, None) agg = dict(agg.items() + data.items()) createJSON (agg, destdir+"/mediawiki-static.json")
def aggData(period, startdate, enddate, identities_db, destdir, closed_condition): # data = dataFrame2Dict(vizr.AggITSInfo(period, startdate, enddate, identities_db, closed_condition = closed_condition)) data = ITS.AggITSInfo(period, startdate, enddate, identities_db, [], closed_condition) agg = data data = ITS.AggAllParticipants(startdate, enddate) agg = dict(agg.items() + data.items()) data = ITS.TrackerURL() agg = dict(agg.items() + data.items()) if ('companies' in reports): data = ITS.AggIssuesCompanies(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('countries' in reports): data = ITS.AggIssuesCountries(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('domains' in reports): data = ITS.AggIssuesDomains(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) # Tendencies for i in [7, 30, 365]: # period_data = dataFrame2Dict(vizr.GetDiffSentDays(period, enddate, i)) period_data = ITS.GetDiffClosedDays(period, identities_db, enddate, i, [], closed_condition) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffOpenedDays(period, identities_db, enddate, i, []) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffClosersDays(period, identities_db, enddate, i, [], closed_condition) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffChangersDays(period, identities_db, enddate, i, []) agg = dict(agg.items() + period_data.items()) # Last Activity: to be removed for i in [7, 14, 30, 60, 90, 180, 365, 730]: period_activity = ITS.GetLastActivityITS(i, closed_condition) agg = dict(agg.items() + period_activity.items()) createJSON(agg, destdir + "/its-static.json")
def aggData(period, startdate, enddate, identities_db, destdir): # data = dataFrame2Dict(vizr.GetSCMStaticData(period, startdate, enddate, identities_db)) data = SCM.GetSCMStaticData(period, startdate, enddate, identities_db, None) agg = data static_url = SCM.StaticURL() agg = dict(agg.items() + static_url.items()) if ('companies' in reports): data = SCM.evol_info_data_companies (startdate, enddate) agg = dict(agg.items() + data.items()) if ('countries' in reports): data = SCM.evol_info_data_countries (startdate, enddate) agg = dict(agg.items() + data.items()) if ('domains' in reports): data = SCM.evol_info_data_domains (startdate, enddate) agg = dict(agg.items() + data.items()) data = SCM.GetCodeCommunityStructure(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) # TODO: repeated data # data = dataFrame2Dict(vizr.GetDiffCommitsDays(period, enddate, 365)) # agg = dict(agg.items() + data.items()) # Tendencies for i in [7,30,365]: data = SCM.GetDiffCommitsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = SCM.GetDiffAuthorsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = SCM.GetDiffFilesDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = SCM.GetDiffLinesDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) # Last Activity: to be removed for i in [7,14,30,60,90,180,365,730]: data = SCM.last_activity(i) agg = dict(agg.items() + data.items()) # Fields with wrong data in R skip_fields = ['percentage_removed_lines_30','percentage_added_lines_30','diff_netadded_lines_30','diff_netremoved_lines_30'] createJSON (agg, destdir+"/scm-static.json", True, skip_fields)
def aggData(period, startdate, enddate, idb, destdir): agg_data = {} # Tendencies for i in [7,30,365]: # period_data = dataFrame2Dict(vizr.GetIRCDiffSentDays(period, enddate, i)) period_data = IRC.GetIRCDiffSentDays(period, enddate, i) agg_data = dict(agg_data.items() + period_data.items()) # period_data = dataFrame2Dict(vizr.GetIRCDiffSendersDays(period, enddate, idb, i)) period_data = IRC.GetIRCDiffSendersDays(period, enddate, idb, i) agg_data = dict(agg_data.items() + period_data.items()) # Global aggregated data # static_data = vizr.GetStaticDataIRC(period, startdate, enddate, idb) static_data = IRC.GetStaticDataIRC(period, startdate, enddate, idb, None) agg_data = dict(agg_data.items() + static_data.items()) createJSON (agg_data, destdir+"/irc-static.json")
def aggData(period, startdate, enddate, identities_db, destdir): # Tendencies agg = {} for i in [7, 30, 365]: # data = vizr.GetMediaWikiDiffReviewsDays(period, enddate, i) # agg = dict(agg.items() + data.items()) data = Mediawiki.GetMediaWikiDiffReviewsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = Mediawiki.GetMediaWikiDiffAuthorsDays(period, enddate, identities_db, i) agg = dict(agg.items() + data.items()) data = Mediawiki.GetStaticDataMediaWiki(period, startdate, enddate, identities_db, None) agg = dict(agg.items() + data.items()) createJSON(agg, destdir + "/mediawiki-static.json")
def aggData(period, startdate, enddate, idb, destdir): agg_data = {} # Tendencies for i in [7, 30, 365]: # period_data = dataFrame2Dict(vizr.GetIRCDiffSentDays(period, enddate, i)) period_data = IRC.GetIRCDiffSentDays(period, enddate, i) agg_data = dict(agg_data.items() + period_data.items()) # period_data = dataFrame2Dict(vizr.GetIRCDiffSendersDays(period, enddate, idb, i)) period_data = IRC.GetIRCDiffSendersDays(period, enddate, idb, i) agg_data = dict(agg_data.items() + period_data.items()) # Global aggregated data # static_data = vizr.GetStaticDataIRC(period, startdate, enddate, idb) static_data = IRC.GetStaticDataIRC(period, startdate, enddate, idb, None) agg_data = dict(agg_data.items() + static_data.items()) createJSON(agg_data, destdir + "/irc-static.json")
def tsData(period, startdate, enddate, identities_db, destdir, granularity, conf): # data = vizr.GetSCMEvolutionaryData(period, startdate, enddate, identities_db) # evol_data = completePeriodIds(dataFrame2Dict(data)) data = SCM.GetSCMEvolutionaryData(period, startdate, enddate, identities_db, None) evol_data = completePeriodIds(data) if ('companies' in reports) : data = SCM.EvolCompanies(period, startdate, enddate) evol_data = dict(evol_data.items() + completePeriodIds(data).items()) if ('countries' in reports) : data = SCM.EvolCountries(period, startdate, enddate) evol_data = dict(evol_data.items() + completePeriodIds(data).items()) if ('domains' in reports) : data = SCM.EvolDomains(period, startdate, enddate) evol_data = dict(evol_data.items() + completePeriodIds(data).items()) createJSON (evol_data, destdir+"/scm-evolutionary.json")
def launch_metricsdef_config(): filedir = os.path.join(production_dir, "data") if not os.path.isdir(filedir): os.makedirs(filedir) filename = os.path.join(filedir, "metrics.json") compose_msg("Writing metrics definition in: " + filename) report = get_report_module() automator_file = project_dir + '/conf/main.conf' metrics_dir = os.path.join(project_dir, "tools", "GrimoireLib","vizgrimoire","metrics") report.init(automator_file, metrics_dir) dss_active = report.get_data_sources() all_metricsdef = {} for ds in dss_active: compose_msg("Metrics def for " + ds.get_name()) metricsdef = ds.get_metrics_definition(ds) if metricsdef is not None: all_metricsdef[ds.get_name()] = metricsdef from GrimoireUtils import createJSON createJSON(all_metricsdef, filename)
def topData(period, startdate, enddate, identities_db, destdir, bots, closed_condition, npeople): # Top closers top_closers_data = {} # top_closers_data['closers.']=dataFrame2Dict(vizr.GetTopClosers(0, startdate, enddate,identities_db, bots, closed_condition)) top_closers_data['closers.']=ITS.GetTopClosers(0, startdate, enddate,identities_db, bots, closed_condition, npeople) top_closers_data['closers.last year']=ITS.GetTopClosers(365, startdate, enddate,identities_db, bots, closed_condition, npeople) top_closers_data['closers.last month']=ITS.GetTopClosers(31, startdate, enddate,identities_db, bots, closed_condition, npeople) # Top openers top_openers_data = {} top_openers_data['openers.']=ITS.GetTopOpeners(0, startdate, enddate,identities_db, bots, closed_condition, npeople) top_openers_data['openers.last year']=ITS.GetTopOpeners(365, startdate, enddate,identities_db, bots, closed_condition, npeople) top_openers_data['openers.last month']=ITS.GetTopOpeners(31, startdate, enddate,identities_db, bots, closed_condition, npeople) all_top = dict(top_closers_data.items() + top_openers_data.items()) createJSON (all_top, destdir+"/its-top.json", False) return all_top
def domainsData(period, startdate, enddate, identities_db, destdir, npeople): domains = MLS.domainsNames(identities_db, startdate, enddate) createJSON(domains, destdir+"/mls-domains.json") for domain in domains: domain_name = "'"+domain+"'" type_analysis = ["domain", domain_name] data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) data = completePeriodIds(data) if (domain == "everybody" or domain == "hallowelt"): # Wrong JSON generated in R. Don't check createJSON(data, destdir+"/"+domain+"-mls-dom-evolutionary.json", False) else: createJSON(data, destdir+"/"+domain+"-mls-dom-evolutionary.json") data = MLS.domainTopSenders(domain, identities_db, startdate, enddate, npeople) createJSON(data, destdir+"/"+domain+"-mls-dom-top-senders.json", False) data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) createJSON(data, destdir+"/"+domain+"-mls-dom-static.json")
def countriesData(period, startdate, enddate, identities_db, destdir, npeople): countries = MLS.countriesNames(identities_db, startdate, enddate) createJSON (countries, destdir + "/mls-countries.json") for country in countries: country_name = "'" + country + "'" type_analysis = ["country", country_name] data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) data = completePeriodIds(data) if (country == "country5" or country == "country2"): # Wrong JSON generated in R. Don't check createJSON(data, destdir+"/"+country+"-mls-cou-evolutionary.json", False) else: createJSON (data, destdir+"/"+country+"-mls-cou-evolutionary.json") top_senders = MLS.countryTopSenders (country, identities_db, startdate, enddate, npeople) createJSON(top_senders, destdir+"/"+country+"-mls-cou-top-senders.json") data = MLS.StaticMLSInfo(period, startdate, enddate, identities_db, rfield, type_analysis) createJSON (data, destdir+"/"+country+"-mls-cou-static.json")
def launch_metricsdef_config(): filedir = os.path.join(production_dir, "data") if not os.path.isdir(filedir): os.makedirs(filedir) filename = os.path.join(filedir, "metrics.json") compose_msg("Writing metrics definition in: " + filename) report = get_report_module() automator_file = project_dir + '/conf/main.conf' metrics_dir = os.path.join(project_dir, "tools", "GrimoireLib", "vizgrimoire", "metrics") report.init(automator_file, metrics_dir) dss_active = report.get_data_sources() all_metricsdef = {} for ds in dss_active: compose_msg("Metrics def for " + ds.get_name()) metricsdef = ds.get_metrics_definition(ds) if metricsdef is not None: all_metricsdef[ds.get_name()] = metricsdef from GrimoireUtils import createJSON createJSON(all_metricsdef, filename)
def peopleData(period, startdate, enddate, idb, destdir, top_data): top = safeTopIds(top_data['reviewers']) top += safeTopIds(top_data['reviewers.last year']) top += safeTopIds(top_data['reviewers.last month']) top += safeTopIds(top_data['openers.']) top += safeTopIds(top_data['openers.last year']) top += safeTopIds(top_data['openers.last_month']) top += safeTopIds(top_data['mergers.']) top += safeTopIds(top_data['mergers.last year']) top += safeTopIds(top_data['mergers.last_month']) # remove duplicates people = list(set(top)) # the order is not the same than in R json createJSON(people, destdir+"/scr-people.json", False) for upeople_id in people: # evol = vizr.GetPeopleEvolSCR(upeople_id, period, startdate, enddate) # evol = completePeriodIds(dataFrame2Dict(evol)) evol = SCR.GetPeopleEvolSCR(upeople_id, period, startdate, enddate) evol = completePeriodIds(evol) createJSON(evol, destdir+"/people-"+str(upeople_id)+"-scr-evolutionary.json") # agg = dataFrame2Dict(vizr.GetPeopleStaticSCR(upeople_id, startdate, enddate)) agg = SCR.GetPeopleStaticSCR(upeople_id, startdate, enddate) createJSON(agg, destdir+"/people-"+str(upeople_id)+"-scr-static.json")
def countriesData(period, startdate, enddate, idb, destdir): # countries = dataFrame2Dict(vizr.GetCountriesSCRName(startdate, enddate, idb)) countries = SCR.GetCountriesSCRName(startdate, enddate, idb) countries = countries['name'] countries_files = [country.replace('/', '_') for country in countries] createJSON(countries_files, destdir+"/scr-countries.json") # missing information from the rest of type of reviews, patches and # number of patches waiting for reviewer and submitter for country in countries: country_file = country.replace("/","_") type_analysis = ['country', country] # Evol evol = {} # data = vizr.EvolReviewsSubmitted(period, startdate, enddate, type_analysis, idb) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = SCR.EvolReviewsSubmitted(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMerged(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandoned(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) # TODO: when empty abandoned does not appeat at all in R JSON createJSON(evol, destdir+ "/"+country_file+"-scr-cou-evolutionary.json",False) # Static agg = {} # data = vizr.StaticReviewsSubmitted(period, startdate, enddate, type_analysis, idb) # agg = dict(agg.items() + dataFrame2Dict(data).items()) data = SCR.StaticReviewsSubmitted(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsMerged(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsAbandoned(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) createJSON(agg, destdir+"/"+country_file+"-scr-cou-static.json")
def peopleData(period, startdate, enddate, identities_db, destdir, closed_condition, top_data): top = top_data['closers.']["id"] top += top_data['closers.last year']["id"] top += top_data['closers.last month']["id"] top += top_data['openers.']["id"] top += top_data['openers.last year']["id"] top += top_data['openers.last month']["id"] # remove duplicates people = list(set(top)) # the order is not the same than in R json createJSON(people, destdir + "/its-people.json", False) for upeople_id in people: evol = ITS.GetPeopleEvolITS(upeople_id, period, startdate, enddate, closed_condition) evol = completePeriodIds(evol) createJSON( evol, destdir + "/people-" + str(upeople_id) + "-its-evolutionary.json") data = ITS.GetPeopleStaticITS(upeople_id, startdate, enddate, closed_condition) createJSON(data, destdir + "/people-" + str(upeople_id) + "-its-static.json")
def peopleData(period, startdate, enddate, idb, destdir, top_data): top = safeTopIds(top_data['reviewers']) top += safeTopIds(top_data['reviewers.last year']) top += safeTopIds(top_data['reviewers.last month']) top += safeTopIds(top_data['openers.']) top += safeTopIds(top_data['openers.last year']) top += safeTopIds(top_data['openers.last_month']) top += safeTopIds(top_data['mergers.']) top += safeTopIds(top_data['mergers.last year']) top += safeTopIds(top_data['mergers.last_month']) # remove duplicates people = list(set(top)) # the order is not the same than in R json createJSON(people, destdir + "/scr-people.json", False) for upeople_id in people: # evol = vizr.GetPeopleEvolSCR(upeople_id, period, startdate, enddate) # evol = completePeriodIds(dataFrame2Dict(evol)) evol = SCR.GetPeopleEvolSCR(upeople_id, period, startdate, enddate) evol = completePeriodIds(evol) createJSON( evol, destdir + "/people-" + str(upeople_id) + "-scr-evolutionary.json") # agg = dataFrame2Dict(vizr.GetPeopleStaticSCR(upeople_id, startdate, enddate)) agg = SCR.GetPeopleStaticSCR(upeople_id, startdate, enddate) createJSON(agg, destdir + "/people-" + str(upeople_id) + "-scr-static.json")
def tsData(period, startdate, enddate, identities_db, destdir, granularity, conf): evol = {} # data = vizr.EvolMLSInfo(period, startdate, enddate, identities_db, rfield) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield) evol = dict(evol.items() + completePeriodIds(data).items()) if ('companies' in reports): data = MLS.EvolMLSCompanies(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('countries' in reports): data = MLS.EvolMLSCountries(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('domains' in reports): data = MLS.EvolMLSDomains(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) createJSON (evol, destdir+"/mls-evolutionary.json")
def aggData(period, startdate, enddate, identities_db, destdir, closed_condition): # data = dataFrame2Dict(vizr.AggITSInfo(period, startdate, enddate, identities_db, closed_condition = closed_condition)) data = ITS.AggITSInfo(period, startdate, enddate, identities_db, [], closed_condition) agg = data data = ITS.AggAllParticipants(startdate, enddate) agg = dict(agg.items() + data.items()) data = ITS.TrackerURL() agg = dict(agg.items() + data.items()) if ('companies' in reports): data = ITS.AggIssuesCompanies(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('countries' in reports): data = ITS.AggIssuesCountries(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) if ('domains' in reports): data = ITS.AggIssuesDomains(period, startdate, enddate, identities_db) agg = dict(agg.items() + data.items()) # Tendencies for i in [7,30,365]: # period_data = dataFrame2Dict(vizr.GetDiffSentDays(period, enddate, i)) period_data = ITS.GetDiffClosedDays(period, identities_db, enddate, i, [], closed_condition) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffOpenedDays(period, identities_db, enddate, i, []) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffClosersDays(period, identities_db, enddate, i, [], closed_condition) agg = dict(agg.items() + period_data.items()) period_data = ITS.GetDiffChangersDays(period, identities_db, enddate, i, []) agg = dict(agg.items() + period_data.items()) # Last Activity: to be removed for i in [7,14,30,60,90,180,365,730]: period_activity = ITS.GetLastActivityITS(i, closed_condition) agg = dict(agg.items() + period_activity.items()) createJSON (agg, destdir+"/its-static.json")
def tsData(period, startdate, enddate, identities_db, destdir, granularity, conf): evol = {} # data = vizr.EvolMLSInfo(period, startdate, enddate, identities_db, rfield) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = MLS.EvolMLSInfo(period, startdate, enddate, identities_db, rfield) evol = dict(evol.items() + completePeriodIds(data).items()) if ('companies' in reports): data = MLS.EvolMLSCompanies(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('countries' in reports): data = MLS.EvolMLSCountries(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) if ('domains' in reports): data = MLS.EvolMLSDomains(period, startdate, enddate, identities_db) evol = dict(evol.items() + completePeriodIds(data).items()) createJSON(evol, destdir + "/mls-evolutionary.json")
def companiesData(period, startdate, enddate, idb, destdir): # companies = dataFrame2Dict(vizr.GetCompaniesSCRName(startdate, enddate, idb)) companies = SCR.GetCompaniesSCRName(startdate, enddate, idb) companies = companies['name'] companies_files = [company.replace('/', '_') for company in companies] createJSON(companies_files, destdir + "/scr-companies.json") # missing information from the rest of type of reviews, patches and # number of patches waiting for reviewer and submitter for company in companies: company_file = company.replace("/", "_") type_analysis = ['company', company] # Evol evol = {} # data = vizr.EvolReviewsSubmitted(period, startdate, enddate, type_analysis, idb) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = SCR.EvolReviewsSubmitted(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMerged(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandoned(period, startdate, enddate, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) if (period == "month"): data = SCR.EvolTimeToReviewSCR(period, startdate, enddate, idb, type_analysis) data['review_time_days_avg'] = checkFloatArray( data['review_time_days_avg']) data['review_time_days_median'] = checkFloatArray( data['review_time_days_median']) evol = dict(evol.items() + completePeriodIds(data).items()) createJSON(evol, destdir + "/" + company_file + "-scr-com-evolutionary.json") # Static agg = {} # data = vizr.StaticReviewsSubmitted(period, startdate, enddate, type_analysis, idb) # agg = dict(agg.items() + dataFrame2Dict(data).items()) data = SCR.StaticReviewsSubmitted(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsMerged(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsAbandoned(period, startdate, enddate, type_analysis, idb) agg = dict(agg.items() + data.items()) data = SCR.StaticTimeToReviewSCR(startdate, enddate, idb, type_analysis) val = data['review_time_days_avg'] if (not val or val == 0): data['review_time_days_avg'] = 0 else: data['review_time_days_avg'] = float(val) val = data['review_time_days_median'] if (not val or val == 0): data['review_time_days_median'] = 0 else: data['review_time_days_median'] = float(val) agg = dict(agg.items() + data.items()) createJSON(agg, destdir + "/" + company_file + "-scr-com-static.json")
def reposData(period, startdate, enddate, identities_db, destdir, conf, closed_condition): # repos = dataFrame2Dict(vizr.GetReposNameITS(startdate, enddate)) repos = ITS.GetReposNameITS(startdate, enddate) repos = repos['name'] if not isinstance(repos, (list)): repos = [repos] createJSON(repos, destdir+"/its-repos.json", False) else: createJSON(repos, destdir+"/its-repos.json") for repo in repos : repo_name = "'"+ repo+ "'" repo_file = repo.replace("/","_") evol = ITS.EvolITSInfo(period, startdate, enddate, identities_db, ['repository', repo_name], closed_condition) evol = completePeriodIds(evol) if (repo_file == "http:__tracker.ceph.com_projects_rados-java_"): createJSON(evol, destdir+"/"+repo_file+"-its-rep-evolutionary.json", False) else: createJSON(evol, destdir+"/"+repo_file+"-its-rep-evolutionary.json") agg = ITS.AggITSInfo(period, startdate, enddate, identities_db, ['repository', repo_name], closed_condition) createJSON(agg, destdir+"/"+repo_file+"-its-rep-static.json")
def reposData(period, startdate, enddate, idb, destdir, conf): repos = SCR.GetReposSCRName(startdate, enddate) repos = repos["name"] # For repos aggregated data. Include metrics to sort in javascript. repos_list = {"name":[],"review_time_days_median":[],"submitted":[]} # missing information from the rest of type of reviews, patches and # number of patches waiting for reviewer and submitter for repo in repos: repo_file = repo.replace("/","_") logging.info(repo_file) repos_list["name"].append(repo_file) # logging.info("Repo: " + repo_file) type_analysis = ['repository', repo] evol = {} # data = vizr.EvolReviewsSubmitted(period, startdate, enddate, type_analysis) data = SCR.EvolReviewsSubmitted(period, startdate, enddate, type_analysis) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsMerged(period, startdate, enddate, type_analysis) evol = dict(evol.items() + completePeriodIds(data).items()) data = SCR.EvolReviewsAbandoned(period, startdate, enddate, type_analysis) evol = dict(evol.items() + completePeriodIds(data).items()) # data = vizr.EvolReviewsPendingChanges(period, startdate, enddate, conf, type_analysis) # evol = dict(evol.items() + completePeriodIds(dataFrame2Dict(data)).items()) data = SCR.EvolReviewsPending(period, startdate, enddate, conf, type_analysis, idb) evol = dict(evol.items() + completePeriodIds(data).items()) if (period == "month"): data = SCR.EvolTimeToReviewSCR(period, startdate, enddate, idb, type_analysis) data['review_time_days_avg'] = checkFloatArray(data['review_time_days_avg']) data['review_time_days_median'] = checkFloatArray(data['review_time_days_median']) evol = dict(evol.items() + completePeriodIds(data).items()) createJSON(evol, destdir+ "/"+repo_file+"-scr-rep-evolutionary.json") # Static agg = {} data = SCR.StaticReviewsSubmitted(period, startdate, enddate, type_analysis) repos_list["submitted"].append(data["submitted"]) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsMerged(period, startdate, enddate, type_analysis) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsAbandoned(period, startdate, enddate, type_analysis) agg = dict(agg.items() + data.items()) data = SCR.StaticReviewsPending(period, startdate, enddate, type_analysis) agg = dict(agg.items() + data.items()) data = SCR.StaticTimeToReviewSCR(startdate, enddate, idb, type_analysis) val = data['review_time_days_avg'] if (not val or val == 0): data['review_time_days_avg'] = 0 else: data['review_time_days_avg'] = float(val) val = data['review_time_days_median'] if (not val or val == 0): data['review_time_days_median'] = 0 else: data['review_time_days_median'] = float(val) agg = dict(agg.items() + data.items()) repos_list["review_time_days_median"].append(data['review_time_days_median']) createJSON(agg, destdir + "/"+repo_file + "-scr-rep-static.json") createJSON(repos_list, destdir+"/scr-repos.json")
def topData(period, startdate, enddate, idb, destdir, bots, npeople): top_reviewers = {} # top_reviewers['reviewers'] = dataFrame2Dict(vizr.GetTopReviewersSCR(0, startdate, enddate, idb, bots)) top_reviewers['reviewers'] = SCR.GetTopReviewersSCR(0, startdate, enddate, idb, bots, npeople) top_reviewers['reviewers.last year']= SCR.GetTopReviewersSCR(365, startdate, enddate, idb, bots, npeople) top_reviewers['reviewers.last month']= SCR.GetTopReviewersSCR(31, startdate, enddate, idb, bots, npeople) # Top openers top_openers = {} top_openers['openers.']=SCR.GetTopOpenersSCR(0, startdate, enddate,idb, bots, npeople) top_openers['openers.last year']=SCR.GetTopOpenersSCR(365, startdate, enddate,idb, bots, npeople) top_openers['openers.last_month']=SCR.GetTopOpenersSCR(31, startdate, enddate,idb, bots, npeople) # Top mergers top_mergers = {} top_mergers['mergers.last year']=SCR.GetTopMergersSCR(365, startdate, enddate,idb, bots, npeople) top_mergers['mergers.']=SCR.GetTopMergersSCR(0, startdate, enddate,idb, bots, npeople) top_mergers['mergers.last_month']=SCR.GetTopMergersSCR(31, startdate, enddate,idb, bots, npeople) # The order of the list item change so we can not check it top_all = dict(top_reviewers.items() + top_openers.items() + top_mergers.items()) createJSON (top_all, destdir+"/scr-top.json",False) return (top_all)
def create_projects_json(destdir, name): """Create the projects_hierarchy.json to be used in the dash""" import_grimoirelib(destdir) import report, GrimoireSQL from GrimoireUtils import createJSON logging.info("Creating projects_hierarchy.json file ") automator_file = os.path.join(destdir, "conf/main.conf") automator = read_main_conf(automator_file) db_user = automator['generic']['db_user'] db_password = automator['generic']['db_password'] db_name = automator['generic']['db_identities'] GrimoireSQL.SetDBChannel(database=db_name, user=db_user, password=db_password) # JSON entry #"mylyn.tasks": { # "parent_project": "mylyn", # "title": "Mylyn Tasks" # } # In the current implementation just one leve, all "parent_project":"root" q = "SELECT id, title from projects" res = GrimoireSQL.ExecuteQuery(q) projects = {} for i in range(0, len(res['id'])): projects[res['id'][i]] = { "parent_project": "root", "title": res['title'][i] } projects["root"] = {"title": name} createJSON(projects, "projects_hierarchy.json") logging.info("projects_hierarchy.json created.")
def domainsData(period, startdate, enddate, identities_db, destdir, closed_condition, bots, npeople): # domains = dataFrame2Dict(vizr.GetDomainsNameITS(startdate, enddate, identities_db, closed_condition, bots)) domains = ITS.GetDomainsNameITS(startdate, enddate, identities_db, closed_condition, bots) domains = domains['name'] createJSON(domains, destdir+"/its-domains.json") for domain in domains: domain_name = "'"+ domain + "'" print (domain_name) evol = ITS.EvolITSInfo(period, startdate, enddate, identities_db, ['domain', domain_name], closed_condition) evol = completePeriodIds(evol) createJSON(evol, destdir+"/"+domain+"-its-dom-evolutionary.json") agg = ITS.AggITSInfo(period, startdate, enddate, identities_db, ['domain', domain_name], closed_condition) createJSON(agg, destdir+"/"+domain+"-its-dom-static.json") top = ITS.GetDomainTopClosers(domain_name, startdate, enddate, identities_db, bots, closed_condition, npeople) createJSON(top, destdir+"/"+domain+"-its-dom-top-closers.json", False)
def reposData(period, startdate, enddate, identities_db, destdir, conf): # repos = dataFrame2Dict(vizr.repos_name(startdate, enddate)) repos = SCM.repos_name(startdate, enddate) repos = repos['name'] if not isinstance(repos, (list)): repos = [repos] createJSON(repos, destdir+"/scm-repos.json", False) else: createJSON(repos, destdir+"/scm-repos.json") for repo in repos : repo_name = "'"+ repo+ "'" print (repo_name) evol_data = SCM.GetSCMEvolutionaryData(period, startdate, enddate, identities_db, ["repository", repo_name]) evol_data = completePeriodIds(evol_data) createJSON(evol_data, destdir+"/"+repo+"-scm-rep-evolutionary.json") agg = SCM.GetSCMStaticData(period, startdate, enddate, identities_db, ["repository", repo_name]) createJSON(agg, destdir+"/"+repo+"-scm-rep-static.json")
def domainsData(period, startdate, enddate, identities_db, destdir): domains = SCM.scm_domains_names(identities_db,startdate, enddate) domains = domains['name'] createJSON(domains, destdir+"/scm-domains.json") # Some R ts are wrong bad_R_json_domains = ['gerrit','gmx','emsenhuber','bitergia'] for domain in domains : domain_name = "'"+domain+"'" print (domain_name) evol_data = SCM.GetSCMEvolutionaryData(period, startdate, enddate, identities_db, ["domain", domain_name]) evol_data = completePeriodIds(evol_data) if domain in bad_R_json_domains: createJSON(evol_data, destdir+"/"+domain+"-scm-dom-evolutionary.json", False) else: createJSON(evol_data, destdir+"/"+domain+"-scm-dom-evolutionary.json") agg = SCM.GetSCMStaticData(period, startdate, enddate, identities_db, ["domain", domain_name]) createJSON(agg, destdir+ "/"+domain+"-scm-dom-static.json")
def countriesData(period, startdate, enddate, identities_db, destdir): countries = SCM.scm_countries_names(identities_db,startdate, enddate) countries = countries['name'] createJSON(countries, destdir+"/scm-countries.json") for country in countries: print (country) country_name = "'"+country+"'" evol_data = SCM.GetSCMEvolutionaryData(period, startdate, enddate, identities_db, ["country", country_name]) evol_data = completePeriodIds(evol_data) createJSON (evol_data, destdir+"/"+country+"-scm-cou-evolutionary.json") agg = SCM.GetSCMStaticData(period, startdate, enddate, identities_db, ["country", country_name]) createJSON (agg, destdir+"/"+country+"-scm-cou-static.json")
def countriesData(period, startdate, enddate, identities_db, destdir, closed_condition): # countries = dataFrame2Dict(vizr.GetCountriesNamesITS(startdate, enddate, identities_db, closed_condition)) countries = ITS.GetCountriesNamesITS(startdate, enddate, identities_db, closed_condition) countries = countries['name'] createJSON(countries, destdir+"/its-countries.json") for country in countries : print (country) country_name = "'" + country + "'" evol = ITS.EvolITSInfo(period, startdate, enddate, identities_db, ['country', country_name], closed_condition) evol = completePeriodIds(evol) createJSON (evol, destdir+"/"+country+"-its-cou-evolutionary.json") data = ITS.AggITSInfo(period, startdate, enddate, identities_db, ['country', country_name], closed_condition) createJSON (data, destdir+"/"+country+"-its-cou-static.json")
def peopleData(period, startdate, enddate, identities_db, destdir, top_authors_data): top = top_authors_data['authors.']["id"] top += top_authors_data['authors.last year']["id"] top += top_authors_data['authors.last month']["id"] # remove duplicates people = list(set(top)) # the order is not the same than in R json createJSON(people, destdir+"/scm-people.json", False) for upeople_id in people : evol_data = SCM.GetEvolPeopleSCM(upeople_id, period, startdate, enddate) evol_data = completePeriodIds(evol_data) createJSON (evol_data, destdir+"/people-"+str(upeople_id)+"-scm-evolutionary.json") agg = SCM.GetStaticPeopleSCM(upeople_id, startdate, enddate) createJSON (agg, destdir+"/people-"+str(upeople_id)+"-scm-static.json") pass