コード例 #1
0
ファイル: data_source.py プロジェクト: jgbarah/GrimoireLib
    def get_studies_data(ds, period, startdate, enddate, evol):
        """ Get data from studies to be included in agg and evol global JSONs  """
        from vizgrimoire.report import Report
        data = {}

        db_identities = Report.get_config()['generic']['db_identities']
        dbuser = Report.get_config()['generic']['db_user']
        dbpass = Report.get_config()['generic']['db_password']

        studies = Report.get_studies()

        metric_filters = Report.get_default_filter()

        ds_dbname = ds.get_db_name()
        dbname = Report.get_config()['generic'][ds_dbname]
        dsquery = ds.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        evol_txt = "evol"
        if not evol: evol_txt = "agg"
        logging.info("Creating studies for " + ds.get_name() + " " + evol_txt)
        for study in studies:
            try:
                obj = study(dbcon, metric_filters)
                if evol:
                    res = obj.get_ts(ds)
                else:
                    res = obj.get_agg(ds)
                if res is not None:
                    data = dict(res.items() + data.items())
            except TypeError:
                # logging.info(study.id + " does no support standard API. Not used.")
                pass

        return data
コード例 #2
0
    def get_studies_data(ds, period, startdate, enddate, evol):
        """ Get data from studies to be included in agg and evol global JSONs  """
        from vizgrimoire.report import Report
        data = {}

        db_identities = Report.get_config()['generic']['db_identities']
        dbuser = Report.get_config()['generic']['db_user']
        dbpass = Report.get_config()['generic']['db_password']

        studies = Report.get_studies()

        metric_filters = Report.get_default_filter()

        ds_dbname = ds.get_db_name()
        dbname = Report.get_config()['generic'][ds_dbname]
        dsquery = ds.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        evol_txt = "evol"
        if not evol: evol_txt = "agg"
        logging.info("Creating studies for " + ds.get_name() + " " + evol_txt)
        for study in studies:
            try:
                obj = study(dbcon, metric_filters)
                if evol:
                    res = obj.get_ts(ds)
                else:
                    res = obj.get_agg(ds)
                if res is not None:
                    data = dict(res.items() + data.items())
            except TypeError:
                # logging.info(study.id + " does no support standard API. Not used.")
                pass

        return data
コード例 #3
0
    def get_filter_items(filter_, startdate, enddate, identities_db):
        items = None
        filter_name = filter_.get_name()

        if (filter_name == "repository"):
            metric = DataSource.get_metrics("repositories", SCR)
        elif (filter_name == "company"):
            metric = DataSource.get_metrics("organizations", SCR)
        elif (filter_name == "country"):
            metric = DataSource.get_metrics("countries", SCR)
        elif (filter_name == "domain"):
            metric = DataSource.get_metrics("domains", SCR)
        elif (filter_name == "project"):
            metric = DataSource.get_metrics("projects", SCR)
        elif (filter_name == "people2"):
            metric = DataSource.get_metrics("people2", SCR)
        else:
            logging.error("SCR " + filter_name + " not supported")
            return items

        # workaround due to bug https://phabricator.wikimedia.org/T116484
        from vizgrimoire.report import Report
        automator = Report.get_config()
        DS = SCR
        if DS.get_name()+"_start_date" in Report.get_config()['r']:
            metric.filters.startdate = "'"+Report.get_config()['r'][DS.get_name()+"_start_date"]+"'"
        if DS.get_name()+"_end_date" in Report.get_config()['r']:
            metric.filters.enddate = "'"+Report.get_config()['r'][DS.get_name()+"_end_date"]+"'"
        # end

        items = metric.get_list()
        return items
コード例 #4
0
ファイル: data_source.py プロジェクト: jgbarah/GrimoireLib
    def get_filter_bots(filter_):
        from vizgrimoire.report import Report
        bots = []

        # If not using Report (automator) bots are not supported.
        if Report.get_config() == None:
            return bots

        if filter_.get_name_plural()+'_out' in Report.get_config()['r']:
            fbots = Report.get_config()['r'][filter_.get_name_plural()+'_out']
            bots = fbots.split(",")
            logging.info("BOTS for " + filter_.get_name_plural())
            logging.info(bots)
        return bots
コード例 #5
0
    def get_filter_bots(filter_):
        from vizgrimoire.report import Report
        bots = []

        # If not using Report (automator) bots are not supported.
        if Report.get_config() == None:
            return bots

        if filter_.get_name_plural() + '_out' in Report.get_config()['r']:
            fbots = Report.get_config()['r'][filter_.get_name_plural() +
                                             '_out']
            bots = fbots.split(",")
            logging.info("BOTS for " + filter_.get_name_plural())
            logging.info(bots)
        return bots
コード例 #6
0
ファイル: data_source.py プロジェクト: jgbarah/GrimoireLib
    def ages_study_com (ds, items, period,
                        startdate, enddate, destdir):
        """Perform ages study for companies, if it is specified in Report.

        Produces JSON files for those studies.

        Parameters
        ----------

        ds: { SCM | ITS | MLS }
           Data source
        items: ??
           Items
        period: ??
           Period
        startdate: ??
           Start date
        enddate: ??
           End date
        destdir: string
           Directory for writing the JSON files
        """

        from vizgrimoire.report import Report
        filter_name = "company"
        studies = Report.get_studies()
        ages = None
        for study in studies:
            if study.id == "ages":
                ages = study

        if ages is not None:
            # Get config parameters for producing a connection
            # to the database
            config = Report.get_config()
            db_identities = config['generic']['db_identities']
            dbuser = config['generic']['db_user']
            dbpass = config['generic']['db_password']

            start_string = ds.get_name() + "_start_date"
            end_string = ds.get_name() + "_end_date"
            if start_string in config['r']:
                startdate = "'" + config['r'][start_string] + "'"
            if end_string in config['r']:
                enddate = "'" + config['r'][end_string] + "'"
            ds_dbname = ds.get_db_name()
            dbname = config['generic'][ds_dbname]
            dsquery = ds.get_query_builder()
            dbcon = dsquery(dbuser, dbpass, dbname, db_identities)

            for item in items :
                filter_item = Filter(filter_name, item)
                metric_filters = MetricFilters(
                    period, startdate, enddate,
                    filter_item.get_type_analysis()
                    )
                obj = ages(dbcon, metric_filters)
                res = obj.create_report(ds, destdir)
コード例 #7
0
    def ages_study_com(ds, items, period, startdate, enddate, destdir):
        """Perform ages study for companies, if it is specified in Report.

        Produces JSON files for those studies.

        Parameters
        ----------

        ds: { SCM | ITS | MLS }
           Data source
        items: ??
           Items
        period: ??
           Period
        startdate: ??
           Start date
        enddate: ??
           End date
        destdir: string
           Directory for writing the JSON files
        """

        from vizgrimoire.report import Report
        filter_name = "company"
        studies = Report.get_studies()
        ages = None
        for study in studies:
            if study.id == "ages":
                ages = study

        if ages is not None:
            # Get config parameters for producing a connection
            # to the database
            config = Report.get_config()
            db_identities = config['generic']['db_identities']
            dbuser = config['generic']['db_user']
            dbpass = config['generic']['db_password']

            start_string = ds.get_name() + "_start_date"
            end_string = ds.get_name() + "_end_date"
            if start_string in config['r']:
                startdate = "'" + config['r'][start_string] + "'"
            if end_string in config['r']:
                enddate = "'" + config['r'][end_string] + "'"
            ds_dbname = ds.get_db_name()
            dbname = config['generic'][ds_dbname]
            dsquery = ds.get_query_builder()
            dbcon = dsquery(dbuser, dbpass, dbname, db_identities)

            for item in items:
                filter_item = Filter(filter_name, item)
                metric_filters = MetricFilters(period, startdate, enddate,
                                               filter_item.get_type_analysis())
                obj = ages(dbcon, metric_filters)
                res = obj.create_report(ds, destdir)
コード例 #8
0
    def get_its_from_organizations(self):
        automator = Report.get_config()
        identities_db = automator['generic']['db_identities']

        from_ = """
            FROM issues i
              JOIN people_uidentities pup ON i.submitted_by = pup.people_id 
              JOIN %s.enrollments enr ON enr.uuid = pup.uuid 
              JOIN %s.organizations org ON org.id = enr.organization_id 
        """ % (identities_db, identities_db)
        return from_
コード例 #9
0
    def get_its_from_organizations(self):
        automator = Report.get_config()
        identities_db = automator['generic']['db_identities']

        from_ = """
            FROM issues i
              JOIN people_uidentities pup ON i.submitted_by = pup.people_id 
              JOIN %s.enrollments enr ON enr.uuid = pup.uuid 
              JOIN %s.organizations org ON org.id = enr.organization_id 
        """ % (identities_db, identities_db)
        return from_
コード例 #10
0
    def get_mls_from_organizations(self):
        automator = Report.get_config()
        identities_db = automator['generic']['db_identities']

        from_ = """
            FROM messages m
              JOIN messages_people mp ON m.message_ID = mp.message_id
              JOIN people_uidentities pup ON mp.email_address = pup.people_id 
              JOIN %s.enrollments enr ON enr.upeople_id = pup.uuid 
              JOIN %s.organizations org ON org.id = enr.organization_id 
        """ % (identities_db, identities_db)
        return from_
コード例 #11
0
    def get_mls_from_organizations(self):
        automator = Report.get_config()
        identities_db = automator['generic']['db_identities']

        from_ = """
            FROM messages m
              JOIN messages_people mp ON m.message_ID = mp.message_id
              JOIN people_uidentities pup ON mp.email_address = pup.people_id 
              JOIN %s.enrollments enr ON enr.upeople_id = pup.uuid 
              JOIN %s.organizations org ON org.id = enr.organization_id 
        """ % (identities_db, identities_db)
        return from_
コード例 #12
0
 def get_scm_from_organizations(self, committers = False):
     automator = Report.get_config()
     identities_db = automator['generic']['db_identities']
     if (committers): field = "s.committer_id"
     else:  field = "s.author_id"
     from_ = """
         FROM scmlog s
           JOIN actions a ON a.commit_id = s.id
           JOIN people_uidentities pup ON %s = pup.people_id
           JOIN %s.enrollments enr ON enr.uuid = pup.uuid
           JOIN %s.organizations org ON org.id = enr.organization_id
     """ % (field, identities_db, identities_db)
     return from_
コード例 #13
0
    def get_top_data(cls, startdate, enddate, identities_db, filter_, npeople):
        bots = cls.get_bots()
        closed_condition = cls._get_closed_condition()
        # TODO: It should be configurable from Automator
        top_issues_on = False
        top = None
        mopeners = DataSource.get_metrics("openers", cls)
        mclosers = DataSource.get_metrics("closers", cls)
        # We should check this metric is ON
        stories_openers = DataSource.get_metrics("stories_openers", cls)
        if mopeners is None or mclosers is None: return None
        period = None
        type_analysis = None
        if filter_ is not None:
            type_analysis = filter_.get_type_analysis()
        mfilter = MetricFilters(period, startdate, enddate, type_analysis,
                                npeople)
        if mclosers.filters.closed_condition is not None:
            mfilter.closed_condition = mclosers.filters.closed_condition

        if filter_ is None:
            top_closers_data = {}
            top_closers_data['closers.'] = mclosers.get_list(mfilter, 0)
            top_closers_data['closers.last month'] = mclosers.get_list(
                mfilter, 31)
            top_closers_data['closers.last year'] = mclosers.get_list(
                mfilter, 365)

            top_openers_data = {}
            top_openers_data['openers.'] = mopeners.get_list(mfilter, 0)
            top_openers_data['openers.last month'] = mopeners.get_list(
                mfilter, 31)
            top_openers_data['openers.last year'] = mopeners.get_list(
                mfilter, 365)

            top = dict(top_closers_data.items() + top_openers_data.items())

            if top_issues_on:
                from vizgrimoire.analysis.top_issues import TopIssues
                from vizgrimoire.report import Report
                db_identities = Report.get_config()['generic']['db_identities']
                dbuser = Report.get_config()['generic']['db_user']
                dbpass = Report.get_config()['generic']['db_password']
                dbname = Report.get_config()['generic'][cls.get_db_name()]
                dbcon = ITSQuery(dbuser, dbpass, dbname, db_identities)
                metric_filters = MetricFilters(None, startdate, enddate, [])
                top_issues_data = TopIssues(dbcon, metric_filters).result(cls)

                top = dict(top.items() + top_issues_data.items())

            if False and stories_openers is not None:
                top_sopeners_data = {}
                top_sopeners_data[
                    'stories_openers.'] = stories_openers.get_list(mfilter, 0)
                top_sopeners_data[
                    'stories_openers.last month'] = stories_openers.get_list(
                        mfilter, 31)
                top_sopeners_data[
                    'stories_openers.last year'] = stories_openers.get_list(
                        mfilter, 365)

                top = dict(top.items() + top_sopeners_data.items())
        else:
            filter_name = filter_.get_name()
            if filter_name in ["company", "domain", "repository"]:
                if filter_name in ["company", "domain", "repository"]:
                    top = {}
                    top['closers.'] = mclosers.get_list(mfilter, 0)
                    top['closers.last month'] = mclosers.get_list(mfilter, 31)
                    top['closers.last year'] = mclosers.get_list(mfilter, 365)
                else:
                    # Remove filters above if there are performance issues
                    top = mclosers.get_list(mfilter)
            else:
                top = None

        return top
コード例 #14
0
    def result(self, data_source=None, destdir=None):
        if data_source != SCM or destdir is None: return None

        automator = Report.get_config()
        db_identities = automator["generic"]["db_identities"]
        dbuser = automator["generic"]["db_user"]
        dbpass = automator["generic"]["db_password"]

        start_date = automator['r']['start_date']
        if 'end_date' not in automator['r']:
            end_date = time.strftime('%Y-%m-%d')
        else:
            end_date = automator['r']['end_date']

        start_year = int(start_date.split("-")[0])
        end_year = int(end_date.split("-")[0])

        activity = {}
        activity['name'] = []

        # Commits
        data = self.db.ExecuteQuery(self.get_sql_commits())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("commits", activity, start_year, end_year)
        # Authors
        data = self.db.ExecuteQuery(self.get_sql_authors())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("authors", activity, start_year, end_year)
        # Committers
        data = self.db.ExecuteQuery(self.get_sql_committers())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("committers", activity, start_year, end_year)
        # Committers active: only valid for today
        data = self.db.ExecuteQuery(self.get_sql_committers(None, True))
        data = self._convert_dict_field(data, "committers_active",
                                        "committers-active")
        activity = self.add_organizations_data(activity, data)
        # Committers inactive: only valid for today
        activity['committers-inactive'] = \
            [ activity['committers'][i] - activity['committers-active'][i] \
             for i in range(0, len(activity['committers']))]
        activity['committers-percent-active'] = []
        for i in range(0, len(activity['committers'])):
            if activity['committers'][i] == 0:
                activity['committers-percent-active'].append(100)
            else:
                activity['committers-percent-active'].append(\
                (activity['committers-active'][i]*100) / activity['committers'][i])
        # Actions
        data = self.db.ExecuteQuery(self.get_sql_actions())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("actions", activity, start_year, end_year)
        # Source lines of code added
        data = self.db.ExecuteQuery(self.get_sql_lines_added())
        data = self._convert_dict_field(data, "lines_added", "lines-added")
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("lines-added", activity, start_year, end_year)
        # Source lines of code removed
        data = self.db.ExecuteQuery(self.get_sql_lines_removed())
        data = self._convert_dict_field(data, "lines_removed", "lines-removed")
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("lines-removed", activity, start_year, end_year)
        # Source lines of code total (added+removed)
        data = self.db.ExecuteQuery(self.get_sql_lines_total())
        data = self._convert_dict_field(data, "lines_total", "lines-total")
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("lines-total", activity, start_year, end_year)
        # Lines per commit
        self.add_metric_lines_commit(activity, start_year, end_year)

        # We need to change the db to tickets
        dbname = automator["generic"]["db_bicho"]
        dsquery = ITS.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        self.db = dbcon
        # Tickets opened
        data = self.db.ExecuteQuery(self.get_sql_opened())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("opened", activity, start_year, end_year)
        # Tickets closed
        data = self.db.ExecuteQuery(self.get_sql_closed())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("closed", activity, start_year, end_year)

        # Messages sent
        dbname = automator["generic"]["db_mlstats"]
        dsquery = MLS.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        self.db = dbcon

        data = self.db.ExecuteQuery(self.get_sql_sent())
        activity = self.add_organizations_data(activity, data)
        self.add_metric_years("sent", activity, start_year, end_year)

        createJSON(activity, destdir + "/organizations-activity.json")
        logging.info(destdir + "/organizations-activity.json created")
コード例 #15
0
    def get_metrics_data(DS,
                         period,
                         startdate,
                         enddate,
                         identities_db,
                         filter_=None,
                         evol=False):
        """ Get basic data from all core metrics """
        from vizgrimoire.GrimoireUtils import fill_and_order_items
        from vizgrimoire.ITS import ITS
        from vizgrimoire.MLS import MLS
        data = {}
        dsquery = DSQuery
        if DS == ITS: dsquery = ITSQuery
        if DS == MLS: dsquery = MLSQuery

        from vizgrimoire.report import Report
        automator = Report.get_config()

        if evol:
            metrics_on = DS.get_metrics_core_ts()
            automator_metrics = DS.get_name() + "_metrics_ts"
        else:
            metrics_on = DS.get_metrics_core_agg()
            automator_metrics = DS.get_name() + "_metrics_agg"

        if automator_metrics in automator['r']:
            metrics_on = automator['r'][automator_metrics].split(",")

        people_out = []
        if "people_out" in Report.get_config()['r']:
            people_out = Report.get_config()['r']["people_out"]
            people_out = people_out.split(",")

        type_analysis = None
        if filter_ is not None:
            type_analysis = filter_.get_type_analysis()

        if type_analysis and type_analysis[1] is None:
            # We need the items for filling later values in group by queries
            items = DS.get_filter_items(filter_, startdate, enddate,
                                        identities_db)
            if items is None: return data
            items = items.pop('name')

        if DS.get_name() + "_startdate" in Report.get_config()['r']:
            startdate = Report.get_config()['r'][DS.get_name() + "_startdate"]
        if DS.get_name() + "_enddate" in Report.get_config()['r']:
            enddate = Report.get_config()['r'][DS.get_name() + "_enddate"]
        # TODO: the hardcoded 10 should be removed, and use instead the npeople provided
        #       in the config file.
        mfilter = MetricFilters(period, startdate, enddate, type_analysis, 10,
                                people_out, None)
        metrics_reports = DS.get_metrics_core_reports()
        all_metrics = DS.get_metrics_set(DS)

        # Reports = filters metrics not available inside filters
        if type_analysis is None:
            from vizgrimoire.report import Report
            reports_on = Report.get_config()['r']['reports'].split(",")
            for r in metrics_reports:
                if r in reports_on: metrics_on += [r]

        for item in all_metrics:
            # print item
            if item.id not in metrics_on: continue
            mfilter_orig = item.filters
            mfilter.global_filter = mfilter_orig.global_filter
            mfilter.set_closed_condition(mfilter_orig.closed_condition)
            item.filters = mfilter
            if evol: mvalue = item.get_ts()
            else: mvalue = item.get_agg()

            if type_analysis and type_analysis[1] is None and mvalue:
                logging.info(item.id)
                id_field = None
                # Support for combined filters
                for idf in mvalue.keys():
                    if "CONCAT(" in idf:
                        id_field = idf
                        break
                if id_field is None:
                    id_field = dsquery.get_group_field_alias(type_analysis[0])
                mvalue = fill_and_order_items(items, mvalue, id_field, evol,
                                              period, startdate, enddate)
            data = dict(data.items() + mvalue.items())

            item.filters = mfilter_orig

        if not evol:
            init_date = DS.get_date_init(startdate, enddate, identities_db,
                                         type_analysis)
            end_date = DS.get_date_end(startdate, enddate, identities_db,
                                       type_analysis)

            if type_analysis and type_analysis[1] is None:
                if id_field is None:
                    id_field = dsquery.get_group_field_alias(type_analysis[0])
                init_date = fill_and_order_items(items, init_date, id_field,
                                                 evol, period, startdate,
                                                 enddate)
                end_date = fill_and_order_items(items, end_date, id_field,
                                                evol, period, startdate,
                                                enddate)
            if init_date is None: init_date = {}
            if end_date is None: end_date = {}
            data = dict(data.items() + init_date.items() + end_date.items())

            # Tendencies
            metrics_trends = DS.get_metrics_core_trends()

            automator_metrics = DS.get_name() + "_metrics_trends"
            if automator_metrics in automator['r']:
                metrics_trends = automator['r'][automator_metrics].split(",")

            for i in [7, 30, 365]:
                for item in all_metrics:
                    if item.id not in metrics_trends: continue
                    mfilter_orig = item.filters
                    item.filters = mfilter
                    period_data = item.get_trends(enddate, i)
                    item.filters = mfilter_orig

                    if type_analysis and type_analysis[1] is None:
                        group_field = dsquery.get_group_field_alias(
                            type_analysis[0])
                        period_data = fill_and_order_items(
                            items, period_data, group_field)

                    data = dict(data.items() + period_data.items())

        return data
コード例 #16
0
ファイル: data_source.py プロジェクト: jgbarah/GrimoireLib
    def get_metrics_data(DS, period, startdate, enddate, identities_db,
                         filter_ = None, evol = False):
        """ Get basic data from all core metrics """
        from vizgrimoire.GrimoireUtils import fill_and_order_items
        from vizgrimoire.ITS import ITS
        from vizgrimoire.MLS import MLS
        data = {}
        dsquery = DSQuery
        if DS == ITS: dsquery = ITSQuery
        if DS == MLS: dsquery = MLSQuery

        from vizgrimoire.report import Report
        automator = Report.get_config()

        if evol:
            metrics_on = DS.get_metrics_core_ts()
            automator_metrics = DS.get_name()+"_metrics_ts"
        else:
            metrics_on = DS.get_metrics_core_agg()
            automator_metrics = DS.get_name()+"_metrics_agg"

        if automator_metrics in automator['r']:
            metrics_on = automator['r'][automator_metrics].split(",")

        people_out = []
        if "people_out" in Report.get_config()['r']:
            people_out = Report.get_config()['r']["people_out"]
            people_out = people_out.split(",")


        type_analysis = None
        if filter_ is not None:
            type_analysis = filter_.get_type_analysis()

        if type_analysis and type_analysis[1] is None:
            # We need the items for filling later values in group by queries
            items = DS.get_filter_items(filter_, startdate, enddate, identities_db)
            if items is None: return data
            items = items.pop('name')

        if DS.get_name()+"_startdate" in Report.get_config()['r']:
            startdate = Report.get_config()['r'][DS.get_name()+"_startdate"]
        if DS.get_name()+"_enddate" in Report.get_config()['r']:
            enddate = Report.get_config()['r'][DS.get_name()+"_enddate"]
        # TODO: the hardcoded 10 should be removed, and use instead the npeople provided
        #       in the config file.
        mfilter = MetricFilters(period, startdate, enddate, type_analysis, 10, people_out, None)
        metrics_reports = DS.get_metrics_core_reports()
        all_metrics = DS.get_metrics_set(DS)

        # Reports = filters metrics not available inside filters
        if type_analysis is None:
            from vizgrimoire.report import Report
            reports_on = Report.get_config()['r']['reports'].split(",")
            for r in metrics_reports:
                if r in reports_on: metrics_on += [r]

        for item in all_metrics:
            # print item
            if item.id not in metrics_on: continue
            mfilter_orig = item.filters
            mfilter.global_filter = mfilter_orig.global_filter
            mfilter.set_closed_condition(mfilter_orig.closed_condition)
            item.filters = mfilter
            if evol: mvalue = item.get_ts()
            else:    mvalue = item.get_agg()

            if type_analysis and type_analysis[1] is None and mvalue:
                logging.info(item.id)
                id_field = None
                # Support for combined filters
                for idf in mvalue.keys():
                    if "CONCAT(" in idf:
                        id_field = idf
                        break
                if id_field is None:
                    id_field = dsquery.get_group_field_alias(type_analysis[0])
                mvalue = fill_and_order_items(items, mvalue, id_field,
                                              evol, period, startdate, enddate)
            data = dict(data.items() + mvalue.items())

            item.filters = mfilter_orig

        if not evol:
            init_date = DS.get_date_init(startdate, enddate, identities_db, type_analysis)
            end_date = DS.get_date_end(startdate, enddate, identities_db, type_analysis)

            if type_analysis and type_analysis[1] is None:
                if id_field is None:
                    id_field = dsquery.get_group_field_alias(type_analysis[0])
                init_date = fill_and_order_items(items, init_date, id_field,
                                                 evol, period, startdate, enddate)
                end_date = fill_and_order_items(items, end_date, id_field,
                                                evol, period, startdate, enddate)
            if init_date is None: init_date = {}
            if end_date is None: end_date = {}
            data = dict(data.items() + init_date.items() + end_date.items())

            # Tendencies
            metrics_trends = DS.get_metrics_core_trends()

            automator_metrics = DS.get_name()+"_metrics_trends"
            if automator_metrics in automator['r']:
                metrics_trends = automator['r'][automator_metrics].split(",")

            for i in [7,30,365]:
                for item in all_metrics:
                    if item.id not in metrics_trends: continue
                    mfilter_orig = item.filters
                    item.filters = mfilter
                    period_data = item.get_trends(enddate, i)
                    item.filters = mfilter_orig

                    if type_analysis and type_analysis[1] is None:
                        group_field = dsquery.get_group_field_alias(type_analysis[0])
                        period_data = fill_and_order_items(items, period_data, group_field)

                    data = dict(data.items() + period_data.items())

        return data
コード例 #17
0
    def result(self, data_source = None, destdir = None):
        if data_source != SCM or destdir is None: return None

        automator = Report.get_config()
        db_identities = automator["generic"]["db_identities"]
        dbuser = automator["generic"]["db_user"]
        dbpass = automator["generic"]["db_password"]

        start_date = automator['r']['start_date']
        if 'end_date' not in automator['r']:
            end_date = time.strftime('%Y-%m-%d')
        else:
            end_date = automator['r']['end_date']

        start_year = int(start_date.split("-")[0])
        end_year = int(end_date.split("-")[0])

        activity = {}
        activity['name'] = []

        # Commits
        data = self.db.ExecuteQuery(self.get_sql_commits())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("commits", activity, start_year, end_year)
        # Authors
        data = self.db.ExecuteQuery(self.get_sql_authors())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("authors", activity, start_year, end_year)
        # Committers
        data = self.db.ExecuteQuery(self.get_sql_committers())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("committers", activity, start_year, end_year)
        # Committers active: only valid for today
        data = self.db.ExecuteQuery(self.get_sql_committers(None, True))
        data = self._convert_dict_field(data, "committers_active","committers-active")
        activity = self.add_organizations_data (activity, data)
        # Committers inactive: only valid for today
        activity['committers-inactive'] = \
            [ activity['committers'][i] - activity['committers-active'][i] \
             for i in range(0, len(activity['committers']))]
        activity['committers-percent-active'] = []
        for i in range(0, len(activity['committers'])):
            if activity['committers'][i] == 0:
                activity['committers-percent-active'].append(100)
            else:
                activity['committers-percent-active'].append(\
                (activity['committers-active'][i]*100) / activity['committers'][i])
        # Actions
        data = self.db.ExecuteQuery(self.get_sql_actions())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("actions", activity, start_year, end_year)
        # Source lines of code added
        data = self.db.ExecuteQuery(self.get_sql_lines_added())
        data = self._convert_dict_field(data, "lines_added", "lines-added")
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("lines-added", activity, start_year, end_year)
        # Source lines of code removed
        data = self.db.ExecuteQuery(self.get_sql_lines_removed())
        data = self._convert_dict_field(data, "lines_removed","lines-removed")
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("lines-removed", activity, start_year, end_year)
        # Source lines of code total (added+removed)
        data = self.db.ExecuteQuery(self.get_sql_lines_total())
        data = self._convert_dict_field(data, "lines_total","lines-total")
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("lines-total", activity, start_year, end_year)
        # Lines per commit
        self.add_metric_lines_commit(activity, start_year, end_year)

        # We need to change the db to tickets
        dbname = automator["generic"]["db_bicho"]
        dsquery = ITS.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        self.db = dbcon
        # Tickets opened
        data = self.db.ExecuteQuery(self.get_sql_opened())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("opened", activity, start_year, end_year)
        # Tickets closed
        data = self.db.ExecuteQuery(self.get_sql_closed())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("closed", activity, start_year, end_year)

        # Messages sent 
        dbname = automator["generic"]["db_mlstats"]
        dsquery = MLS.get_query_builder()
        dbcon = dsquery(dbuser, dbpass, dbname, db_identities)
        self.db = dbcon

        data = self.db.ExecuteQuery(self.get_sql_sent())
        activity = self.add_organizations_data (activity, data)
        self.add_metric_years("sent", activity, start_year, end_year)

        createJSON(activity, destdir+"/organizations-activity.json")
        logging.info(destdir+"/organizations-activity.json created")
コード例 #18
0
    def __get_data__ (period, startdate, enddate, identities_db, filter_ = None, evol = False):
        data = {}
        DS = SCR
        from vizgrimoire.GrimoireUtils import fill_and_order_items

        type_analysis = None
        if filter_ is not None:
            type_analysis = [filter_.get_name(), filter_.get_item()]

        from vizgrimoire.report import Report
        automator = Report.get_config()

        if evol:
            metrics_on = DS.get_metrics_core_ts()
            automator_metrics = DS.get_name()+"_metrics_ts"
        else:
            metrics_on = DS.get_metrics_core_agg()
            automator_metrics = DS.get_name()+"_metrics_agg"

        if automator_metrics in automator['r']:
            metrics_on = automator['r'][automator_metrics].split(",")
            logging.info(automator_metrics + " found ")
            # print(metrics_on)

        people_out = []
        if "people_out" in Report.get_config()['r']:
            people_out = Report.get_config()['r']["people_out"]
            people_out = people_out.split(",")

        metrics_reports = SCR.get_metrics_core_reports()
        if filter_ is None:
            from vizgrimoire.report import Report
            reports_on = Report.get_config()['r']['reports'].split(",")
            for r in metrics_reports:
                if r in reports_on: metrics_on += [r]

        if type_analysis and type_analysis[1] is None:
            items = DS.get_filter_items(filter_, startdate, enddate, identities_db)
            items = items.pop('name')

        if DS.get_name()+"_start_date" in Report.get_config()['r']:
            startdate = "'"+Report.get_config()['r'][DS.get_name()+"_start_date"]+"'"
        if DS.get_name()+"_end_date" in Report.get_config()['r']:
            enddate = "'"+Report.get_config()['r'][DS.get_name()+"_end_date"]+"'"

        mfilter = MetricFilters(period, startdate, enddate, type_analysis, 10, people_out, None)
        all_metrics = SCR.get_metrics_set(SCR)

        # SCR specific: remove some metrics from filters
        if filter_ is not None:
            metrics_not_filters =  SCR.get_metrics_not_filters()
            metrics_on_filters = list(set(metrics_on) - set(metrics_not_filters))
            if filter_.get_name() == "repository":
                if 'review_time' in metrics_on: metrics_on_filters+= ['review_time']
                if 'submitted' in metrics_on: metrics_on_filters+= ['submitted']
            metrics_on = metrics_on_filters
        # END SCR specific

        for item in all_metrics:
            if item.id not in metrics_on: continue
            # logging.info(item.id)
            mfilter_orig = item.filters
            mfilter.global_filter = mfilter_orig.global_filter
            item.filters = mfilter
            if not evol: mvalue = item.get_agg()
            else:        mvalue = item.get_ts()

            if type_analysis and type_analysis[1] is None:
                logging.info(item.id)
                id_field = SCRQuery.get_group_field_alias(type_analysis[0])
                mvalue = check_array_values(mvalue)
                mvalue = fill_and_order_items(items, mvalue, id_field,
                                              evol, period, startdate, enddate)
            data = dict(data.items() + mvalue.items())
            item.filters = mfilter_orig

        # SCR SPECIFIC #
        if evol:
            if type_analysis and type_analysis[1] is None: pass
            else:
                metrics_on_changes = ['merged','abandoned','new']
                for item in all_metrics:
                    if item.id in metrics_on_changes and filter_ is None:
                        mfilter_orig = item.filters
                        item.filters = mfilter
                        mvalue = item.get_ts_changes()
                        data = dict(data.items() + mvalue.items())
                        item.filters = mfilter_orig
        # END SCR SPECIFIC #

        if not evol:
            init_date = DS.get_date_init(startdate, enddate, identities_db, type_analysis)
            end_date = DS.get_date_end(startdate, enddate, identities_db, type_analysis)

            if type_analysis and type_analysis[1] is None:
                if id_field is None:
                    id_field = dsquery.get_group_field_alias(type_analysis[0])
                init_date = fill_and_order_items(items, init_date, id_field,
                                                 evol, period, startdate, enddate)
                end_date = fill_and_order_items(items, end_date, id_field,
                                                evol, period, startdate, enddate)

            data = dict(data.items() + init_date.items() + end_date.items())

            # Tendencies
            metrics_trends = SCR.get_metrics_core_trends()

            automator_metrics = DS.get_name()+"_metrics_trends"
            if automator_metrics in automator['r']:
                metrics_trends = automator['r'][automator_metrics].split(",")

            for i in [7,30,365]:
                for item in all_metrics:
                    if item.id not in metrics_trends: continue
                    mfilter_orig = item.filters
                    item.filters = mfilter
                    period_data = item.get_trends(enddate, i)
                    item.filters = mfilter_orig

                    data = dict(data.items() +  period_data.items())

                    if type_analysis and type_analysis[1] is None:
                        id_field = SCRQuery.get_group_field_alias(type_analysis[0])
                        period_data = fill_and_order_items(items, period_data, id_field)

                    data = dict(data.items() +  period_data.items())


        if filter_ is not None: studies_data = {}
        else:
            studies_data = DataSource.get_studies_data(SCR, period, startdate, enddate, evol)
        data = dict(data.items() +  studies_data.items())

        return data
コード例 #19
0
ファイル: ITS.py プロジェクト: jgbarah/GrimoireLib
    def get_top_data (cls, startdate, enddate, identities_db, filter_, npeople):
        bots = cls.get_bots()
        closed_condition =  cls._get_closed_condition()
        # TODO: It should be configurable from Automator
        top_issues_on = False
        top = None
        mopeners = DataSource.get_metrics("openers", cls)
        mclosers = DataSource.get_metrics("closers", cls)
        # We should check this metric is ON
        stories_openers = DataSource.get_metrics("stories_openers", cls)
        if mopeners is None or mclosers is None: return None
        period = None
        type_analysis = None
        if filter_ is not None:
            type_analysis = filter_.get_type_analysis()
        mfilter = MetricFilters(period, startdate, enddate, type_analysis, npeople)
        if mclosers.filters.closed_condition is not None:
             mfilter.closed_condition = mclosers.filters.closed_condition

        if filter_ is None:
            top_closers_data = {}
            top_closers_data['closers.'] =  mclosers.get_list(mfilter, 0)
            top_closers_data['closers.last month']= mclosers.get_list(mfilter, 31)
            top_closers_data['closers.last year']= mclosers.get_list(mfilter, 365)

            top_openers_data = {}
            top_openers_data['openers.'] = mopeners.get_list(mfilter, 0)
            top_openers_data['openers.last month'] = mopeners.get_list(mfilter, 31)
            top_openers_data['openers.last year'] = mopeners.get_list(mfilter, 365)

            top = dict(top_closers_data.items() + top_openers_data.items())

            if top_issues_on:
                from vizgrimoire.analysis.top_issues import TopIssues
                from vizgrimoire.report import Report
                db_identities= Report.get_config()['generic']['db_identities']
                dbuser = Report.get_config()['generic']['db_user']
                dbpass = Report.get_config()['generic']['db_password']
                dbname = Report.get_config()['generic'][cls.get_db_name()]
                dbcon = ITSQuery(dbuser, dbpass, dbname, db_identities)
                metric_filters = MetricFilters(None, startdate, enddate, [])
                top_issues_data = TopIssues(dbcon, metric_filters).result(cls)

                top = dict(top.items() + top_issues_data.items())

            if False and stories_openers is not None:
                top_sopeners_data = {}
                top_sopeners_data['stories_openers.'] = stories_openers.get_list(mfilter, 0)
                top_sopeners_data['stories_openers.last month'] = stories_openers.get_list(mfilter, 31)
                top_sopeners_data['stories_openers.last year'] = stories_openers.get_list(mfilter, 365)

                top = dict(top.items() + top_sopeners_data.items())
        else:
            filter_name = filter_.get_name()
            if filter_name in ["company","domain","repository"]:
                if filter_name in ["company","domain","repository"]:
                    top = {}
                    top['closers.'] =  mclosers.get_list(mfilter, 0)
                    top['closers.last month']= mclosers.get_list(mfilter, 31)
                    top['closers.last year']= mclosers.get_list(mfilter, 365)
                else:
                    # Remove filters above if there are performance issues
                    top = mclosers.get_list(mfilter)
            else:
                top = None

        return top