Exemplo n.º 1
0
def pytest_funcarg__plm(request):
    """
    Give a test access to a PushLogModel instance.

    Truncate all project tables between tests in order to provide isolation.

    """
    from datazilla.model import PushLogModel

    plm = PushLogModel(request._pyfuncitem.session.pushlog_name,
                       out=sys.stdout,
                       verbosity=2)

    request.addfinalizer(partial(truncate, plm, ["branches", "branch_map"]))
    return plm
    def handle_project(self, project, **options):
        def to_seconds(td):
            return (td.microseconds +
                    (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6

        numdays = int(options.get("numdays", 1))
        now = int(time.time())
        time_constraint = now - to_seconds(timedelta(numdays))

        mtm = MetricsTestModel(project)

        test_run_ids = mtm.get_test_runs_not_in_all_dimensions(time_constraint)
        self.stdout.write("test run ids {0}\n".format(str(len(test_run_ids))))

        #Make a list of test_run_id chunks to iterate over
        chunk_size = 20
        test_run_id_chunks = [
            test_run_ids[i:i + chunk_size]
            for i in range(0, len(test_run_ids), chunk_size)
        ]

        plm = PushLogModel()

        for ids in test_run_id_chunks:

            self.stdout.write("Processing ids {0}\n".format(str(ids)))

            revisions_without_push_data = mtm.load_test_data_all_dimensions(
                ids)

            if revisions_without_push_data:

                revision_nodes = {}

                for revision in revisions_without_push_data:

                    node = plm.get_node_from_revision(
                        revision, revisions_without_push_data[revision])

                    revision_nodes[revision] = node

                mtm.set_push_data_all_dimensions(revision_nodes)

        plm.disconnect()
        mtm.disconnect()
Exemplo n.º 3
0
    def handle_project(self, project, **options):

        self.stdout.write("Processing project {0}\n".format(project))

        pushlog_project = options.get("pushlog_project", 'pushlog')
        loadlimit = int(options.get("loadlimit", 1))
        debug = options.get("debug", None)

        test_run_ids = []
        ptm = PerformanceTestModel(project)
        test_run_ids = ptm.process_objects(loadlimit)
        ptm.disconnect()

        """
        metrics_exclude_projects = set(['b2g', 'games', 'jetperf', 'marketapps', 'microperf', 'stoneridge', 'test', 'webpagetest'])
        if project not in metrics_exclude_projects:
            #minimum required number of replicates for
            #metrics processing
            replicate_min = 5
            compute_test_run_metrics(
                project, pushlog_project, debug, replicate_min, test_run_ids
                )
        """

        mtm = MetricsTestModel(project)
        revisions_without_push_data = mtm.load_test_data_all_dimensions(
            test_run_ids)

        if revisions_without_push_data:

            revision_nodes = {}
            plm = PushLogModel(pushlog_project)

            for revision in revisions_without_push_data:

                node = plm.get_node_from_revision(
                    revision, revisions_without_push_data[revision])

                revision_nodes[revision] = node

            plm.disconnect()
            mtm.set_push_data_all_dimensions(revision_nodes)

        mtm.disconnect()
Exemplo n.º 4
0
def pytest_sessionfinish(session):
    """Tear down the test environment, including databases."""
    print("\n")

    from django.conf import settings
    from datazilla.model import PerformanceTestModel, PushLogModel
    import MySQLdb

    source_list = PerformanceTestModel(session.perftest_name).sources.values()
    source_list.extend(
        PushLogModel(project=session.pushlog_name).sources.values())

    for sds in source_list:
        conn = MySQLdb.connect(
            host=sds.datasource.host,
            user=settings.DATAZILLA_DATABASE_USER,
            passwd=settings.DATAZILLA_DATABASE_PASSWORD,
        )
        cur = conn.cursor()
        cur.execute("DROP DATABASE {0}".format(sds.datasource.name))
        conn.close()

    session.django_runner.teardown_databases(session.django_db_config)
    session.django_runner.teardown_test_environment()
Exemplo n.º 5
0
def compute_test_run_metrics(project,
                             pushlog_project,
                             debug,
                             replicate_min,
                             test_run_ids=[]):
    """
    Runs all metric tests and associated summaries on a list of test run ids
    """
    ###
    #Insure that test_run_ids is iterable, if process objects generates
    #an error it's possible that test_run_ids will be explicitly set to
    #None
    ###
    test_run_ids = test_run_ids or []

    ##
    #Get test data for test run ids
    ##
    plm = PushLogModel(pushlog_project)
    mtm = MetricsTestModel(project)

    #####
    #We don't know if we need the pushlog, or for what branches
    #it will be required.  Make sure to only retrieve once for each
    #branch encountered and only when we need it.
    ####
    pushlog = {}

    #####
    #This data structure is used to lookup up the index position
    #of a revision in the push log to start walking from
    #####
    pushlog_lookup = {}

    for test_run_id in test_run_ids:

        child_test_data = mtm.get_test_values_by_test_run_id(test_run_id)

        if not child_test_data:
            msg = u"No test data available for test run id {0}".format(
                test_run_id)
            println(msg, debug)
            continue

        first_key = _get_first_mkey(child_test_data)

        rep_count = len(child_test_data[first_key]['values'])

        test_name = child_test_data[first_key]['ref_data']['test_name']

        child_revision, push_node, branch = _get_revision_and_push_node(
            plm, child_test_data, first_key)

        base_message = u"{0} {1}".format(child_revision, str(test_run_id))

        if not check_run_conditions(test_name, rep_count, push_node, branch,
                                    replicate_min, debug):
            println(u"Not able to run {0}\n".format(base_message), debug)
            continue

        #The test and its replicates pass the run conditions
        println(u"Running {0}".format(base_message), debug)

        stored_metric_keys = []

        try:

            stored_metric_keys = _run_metrics(test_run_id, mtm, plm,
                                              child_test_data, pushlog,
                                              pushlog_lookup, child_revision,
                                              push_node, branch, test_name,
                                              debug)

        except Exception as e:

            _handle_exception(mtm, e, test_name, child_revision, test_run_id,
                              compute_test_run_metrics.__name__, debug)

        try:

            _run_summary(test_run_id, mtm, plm, child_revision,
                         child_test_data, stored_metric_keys, push_node, debug)

        except Exception as e:

            _handle_exception(mtm, e, test_name, child_revision, test_run_id,
                              compute_test_run_metrics.__name__, debug)

        println(u"\tProcessing complete for {0}\n".format(base_message), debug)

    plm.disconnect()
    mtm.disconnect()
Exemplo n.º 6
0
class Command(BaseCommand):
    """
    Management command to update the pushlog table with the latest pushes.

    example resulting url:
        https://hg.mozilla.org/integration/mozilla-inbound/json-pushes?full=1&startdate=06/04/2012&enddate=06/07/2012


    """
    LOCK_FILE = "update_pushlog"

    help = "Update the repo pushlog table."

    option_list = BaseCommand.option_list + (
        make_option("--repo_host",
                    action="store",
                    dest="repo_host",
                    default=None,
                    help="The host name for the repo (e.g. hg.mozilla.org)"),
        make_option("--enddate",
                    action="store",
                    dest="enddate",
                    default=None,
                    help="(optional) The ending date range for pushlogs in " +
                    "the format: MM/DD/YYYY.  Default to today."),
        make_option("--numdays",
                    action="store",
                    dest="numdays",
                    default=None,
                    help="Number of days worth of pushlogs to return."),
        make_option("--hours",
                    action="store",
                    dest="hours",
                    default=None,
                    help="Number of hours worth of pushlogs to return."),

        # probably mostly for testing purposes, but could be otherwise useful.
        make_option("--branch",
                    action="store",
                    dest="branch",
                    default=None,
                    help="The branch to import pushlogs for (default to all)"),

        # probably mostly for testing purposes, but could be otherwise useful.
        make_option("--project",
                    action="store",
                    dest="project",
                    default=None,
                    help=("The project name for the the pushlog database " +
                          "storage (default to 'pushlog')")),
    )

    def println(self, val):
        self.stdout.write("{0}\n".format(str(val)))

    def handle(self, *args, **options):
        """ Store pushlog data in the database. """

        repo_host = options.get("repo_host")
        enddate = options.get("enddate")
        numdays = options.get("numdays")
        hours = options.get("hours")
        branch = options.get("branch")
        verbosity = options.get("verbosity")
        project = options.get("project")

        if not repo_host:
            raise CommandError(
                "You must supply a host name for the repo pushlogs " +
                "to store: --repo_host hostname")

        if not numdays and not hours:
            raise CommandError(
                "You must supply the number of days or hours of data.")
        else:
            if numdays:
                try:
                    numdays = int(numdays)
                except ValueError:
                    raise CommandError("numdays must be an integer.")

            if hours:

                try:
                    hours = int(hours)
                except ValueError:
                    raise CommandError("hours must be an integer.")

        pidfile = "{0}.pid".format(self.LOCK_FILE)

        if os.path.isfile(pidfile):

            pid = ""
            with open(pidfile) as f:
                pid = f.readline().strip()

            ####
            #If we have a pid file assume the update_pushlog command is
            #hanging on an intermitent urllib timeout from the call to the
            #json-pushes web service method and kill the hanging program.
            ####
            if pid:

                logfile_name = "{0}.log".format(self.LOCK_FILE)
                time_stamp = str(time.time()).split('.')[0]

                try:

                    os.kill(int(pid), signal.SIGKILL)

                except OSError, err:

                    log_file = open(logfile_name, 'a+')

                    msg = ""
                    if err.errno == errno.ESRCH:
                        msg = "pid:{0} time:{1}, Not running\n".format(
                            pid, time_stamp)
                    elif err.errno == errno.EPERM:
                        msg = "pid:{0} time:{1}, No permission to signal process\n".format(
                            pid, time_stamp)
                    else:
                        msg = "pid:{0} time:{1}, Generated unknown error {2}\n".format(
                            pid, str(err), time_stampe)

                    log_file.write(msg)
                    log_file.close()

                    #make sure we get rid of any pid file on error
                    os.unlink(pidfile)

                else:

                    #log the kill
                    log_file = open(logfile_name, 'a+')
                    log_file.write("pid:{0} time:{1}, Killed\n".format(
                        pid, time_stamp))
                    log_file.close()

                    #remove any existing pidfile
                    os.unlink(pidfile)

        #Write pid file
        pid = str(os.getpid())
        file(pidfile, 'w').write(pid)

        plm = PushLogModel(project=project,
                           out=self.stdout,
                           verbosity=verbosity)

        # store the pushlogs for the branch specified, or all branches
        summary = plm.store_pushlogs(repo_host, numdays, hours, enddate,
                                     branch)
        self.println(("Branches: {0}\nPushlogs stored: {1}, skipped: {2}\n" +
                      "Changesets stored: {3}, skipped: {4}").format(
                          summary["branches"],
                          summary["pushlogs_stored"],
                          summary["pushlogs_skipped"],
                          summary["changesets_stored"],
                          summary["changesets_skipped"],
                      ))

        plm.disconnect()

        os.unlink(pidfile)