Esempio n. 1
0
def get_standard_metrics(capture, actions):
    analysis_props = _get_analysis_props(capture.metadata["captureDevice"])

    metrics = {}
    if "unique_frames" in analysis_props["valid_measures"]:
        metrics["uniqueframes"] = videocapture.get_num_unique_frames(
            capture, threshold=analysis_props["animation_threshold"]
        )
    if "fps" in analysis_props["valid_measures"]:
        metrics["fps"] = videocapture.get_fps(capture, threshold=analysis_props["animation_threshold"])
    if "checkerboard" in analysis_props["valid_measures"]:
        metrics["checkerboard"] = videocapture.get_checkerboarding_area_duration(capture)
    if "overallentropy" in analysis_props["valid_measures"]:
        metrics["overallentropy"] = videocapture.get_overall_entropy(
            capture, edge_detection=analysis_props["edge_detection"]
        )

    if actions:
        # get the delta between the first non-sleep action being fired and
        # there being a visible change
        first_non_sleep_action = None
        for action in actions:
            if action["type"] != "sleep":
                first_non_sleep_action = action
                break
        if first_non_sleep_action:
            framediffs = videocapture.get_framediff_sums(capture)
            for (i, framediff) in enumerate(framediffs):
                t = i / float(capture.fps)
                if first_non_sleep_action["start"] < t and framediff >= analysis_props["input_threshold"]:
                    metrics["timetoresponse"] = t - first_non_sleep_action["start"]
                    return metrics

    return metrics
Esempio n. 2
0
def get_standard_metric_metadata(capture):
    return {
        'framediffsums':
        videocapture.get_framediff_sums(capture),
        'framesobelentropies':
        videocapture.get_frame_entropies(capture, sobelized=True)
    }
Esempio n. 3
0
def get_standard_metrics(capture, actions):
    analysis_props = _get_analysis_props(capture.metadata['captureDevice'])

    metrics = {}
    metrics['uniqueframes'] = videocapture.get_num_unique_frames(
        capture, threshold=analysis_props['animation_threshold'])
    metrics['fps'] = videocapture.get_fps(
        capture, threshold=analysis_props['animation_threshold'])
    metrics['checkerboard'] = videocapture.get_checkerboarding_area_duration(
        capture)
    if actions:
        # get the delta between the first non-sleep action being fired and
        # there being a visible change
        first_non_sleep_action = None
        for action in actions:
            if action['type'] != 'sleep':
                first_non_sleep_action = action
                break
        if first_non_sleep_action:
            framediffs = videocapture.get_framediff_sums(capture)
            for (i, framediff) in enumerate(framediffs):
                t = i / float(capture.fps)
                if first_non_sleep_action['start'] < t and \
                        framediff >= analysis_props['input_threshold']:
                    metrics['timetoresponse'] = (
                        t - first_non_sleep_action['start'])
                    return metrics

    return metrics
Esempio n. 4
0
def get_standard_metrics(capture, actions):
    analysis_props = _get_analysis_props(capture.metadata['captureDevice'])

    metrics = {}
    metrics['uniqueframes'] = videocapture.get_num_unique_frames(
        capture, threshold=analysis_props['animation_threshold'])
    metrics['fps'] = videocapture.get_fps(
        capture, threshold=analysis_props['animation_threshold'])
    metrics['checkerboard'] = videocapture.get_checkerboarding_area_duration(capture)
    if actions:
        # get the delta between the first non-sleep action being fired and
        # there being a visible change
        first_non_sleep_action = None
        for action in actions:
            if action['type'] != 'sleep':
                first_non_sleep_action = action
                break
        if first_non_sleep_action:
            framediffs = videocapture.get_framediff_sums(capture)
            for (i, framediff) in enumerate(framediffs):
                t = i/float(capture.fps)
                if first_non_sleep_action['start'] < t and \
                        framediff >= analysis_props['input_threshold']:
                    metrics['timetoresponse'] = (t - first_non_sleep_action['start'])
                    return metrics

    return metrics
Esempio n. 5
0
 def GET(self, name):
     capture = videocapture.Capture(os.path.join(CAPTURE_DIR, name))
     return videocapture.get_framediff_sums(capture)
def run_test(device,
             outputdir,
             outputfile,
             test,
             url_params,
             num_runs,
             startup_test,
             no_capture,
             get_internal_checkerboard_stats,
             apk=None,
             appname=None,
             appdate=None,
             profile_file=None,
             dmtype="adb",
             host=None,
             port=None):
    if apk:
        appinfo = eideticker.get_fennec_appinfo(apk)
        appname = appinfo['appname']
        print "Installing %s (version: %s, revision %s)" % (
            appinfo['appname'], appinfo['version'], appinfo['revision'])
        device.updateApp(apk)
    else:
        appinfo = None

    captures = []

    for i in range(num_runs):
        # Kill any existing instances of the processes
        device.killProcess(appname)

        # Now run the test
        capture_file = os.path.join(
            CAPTURE_DIR, "metric-test-%s-%s.zip" % (appname, int(time.time())))
        args = ["runtest.py", "--url-params", url_params, appname, test]
        if get_internal_checkerboard_stats:
            checkerboard_logfile = tempfile.NamedTemporaryFile()
            args.extend(["--checkerboard-log-file", checkerboard_logfile.name])
        if startup_test:
            args.extend(["--startup-test"])
        if no_capture:
            args.extend(["--no-capture"])
        else:
            args.extend(["--capture-file", capture_file])
        if profile_file:
            args.extend(["--profile-file", profile_file])
        if dmtype:
            args.extend(["-m", dmtype])
        if host:
            args.extend(["--host", host])
        if port:
            args.extend(["--port", port])
        print args
        retval = subprocess.call(args)
        if retval != 0:
            raise Exception("Failed to run test %s for %s" % (test, appname))

        capture_result = {}
        if not no_capture:
            capture_result['file'] = capture_file

            capture = videocapture.Capture(capture_file)

            framediff_sums = videocapture.get_framediff_sums(capture)
            if startup_test:
                capture_result['stableframe'] = videocapture.get_stable_frame(
                    capture)
            else:
                capture_result[
                    'uniqueframes'] = videocapture.get_num_unique_frames(
                        capture)
                capture_result['fps'] = videocapture.get_fps(capture)
                capture_result[
                    'checkerboard'] = videocapture.get_checkerboarding_area_duration(
                        capture)
            if outputdir:
                video_path = os.path.join('videos',
                                          'video-%s.webm' % time.time())
                video_file = os.path.join(outputdir, video_path)
                open(video_file, 'w').write(capture.get_video().read())
                capture_result['video'] = video_path

        if get_internal_checkerboard_stats:
            internal_checkerboard_totals = parse_checkerboard_log(
                checkerboard_logfile.name)
            capture_result[
                'internalcheckerboard'] = internal_checkerboard_totals

        captures.append(capture_result)

    appkey = appname
    if appdate:
        appkey = appdate.isoformat()
    else:
        appkey = appname

    if appinfo and appinfo.get('revision'):
        display_key = "%s (%s)" % (appkey, appinfo['revision'])
    else:
        display_key = appkey
    print "=== Results for %s ===" % display_key

    if not no_capture:
        if startup_test:
            print "  First stable frames:"
            print "  %s" % map(lambda c: c['stableframe'], captures)
            print
        else:
            print "  Number of unique frames:"
            print "  %s" % map(lambda c: c['uniqueframes'], captures)
            print

            print "  Average number of unique frames per second:"
            print "  %s" % map(lambda c: c['fps'], captures)
            print

            print "  Checkerboard area/duration (sum of percents NOT percentage):"
            print "  %s" % map(lambda c: c['checkerboard'], captures)
            print

        print "  Capture files (for further reference):"
        print "  Capture files: %s" % map(lambda c: c['file'], captures)
        print

    if get_internal_checkerboard_stats:
        print "  Internal Checkerboard Stats (sum of percents, not percentage):"
        print "  %s" % map(lambda c: c['internalcheckerboard'], captures)
        print

    if outputfile:
        resultdict = {'title': test, 'data': {}}
        if os.path.isfile(outputfile):
            resultdict.update(json.loads(open(outputfile).read()))

        if not resultdict['data'].get(appkey):
            resultdict['data'][appkey] = []
        resultdict['data'][appkey].extend(captures)

        with open(outputfile, 'w') as f:
            f.write(json.dumps(resultdict))
def runtest(device_prefs, testname, options, apk=None, appname=None, appdate=None):
    device = None
    if apk:
        appinfo = eideticker.get_fennec_appinfo(apk)
        appname = appinfo["appname"]
        print "Installing %s (version: %s, revision %s)" % (appinfo["appname"], appinfo["version"], appinfo["revision"])
        device = eideticker.getDevice(**device_prefs)
        device.updateApp(apk)
    else:
        appinfo = None

    testinfo = eideticker.get_testinfo(testname)
    stableframecapture = (
        testinfo["type"] in ("startup", "webstartup") or testinfo["defaultMeasure"] == "timetostableframe"
    )

    capture_results = []

    for i in range(options.num_runs):
        # Kill any existing instances of the processes (for Android)
        if device:
            device.killProcess(appname)

        # Now run the test
        curtime = int(time.time())
        capture_file = os.path.join(CAPTURE_DIR, "metric-test-%s-%s.zip" % (appname, curtime))
        if options.enable_profiling:
            profile_file = os.path.join(PROFILE_DIR, "profile-%s-%s.zip" % (appname, curtime))
        else:
            profile_file = None

        current_date = time.strftime("%Y-%m-%d")
        capture_name = "%s - %s (taken on %s)" % (testname, appname, current_date)

        if options.prepare_test:
            eideticker.prepare_test(testname, device_prefs)

        testlog = eideticker.run_test(
            testname,
            options.capture_device,
            appname,
            capture_name,
            device_prefs,
            extra_prefs=options.extra_prefs,
            extra_env_vars=options.extra_env_vars,
            log_checkerboard_stats=options.get_internal_checkerboard_stats,
            profile_file=profile_file,
            capture_area=options.capture_area,
            no_capture=options.no_capture,
            fps=options.fps,
            capture_file=capture_file,
            wifi_settings_file=options.wifi_settings_file,
            sync_time=options.sync_time,
        )

        capture_result = {}
        if not options.no_capture:
            capture_result["file"] = capture_file

            capture = videocapture.Capture(capture_file)
            capture_result["captureFPS"] = capture.fps

            if stableframecapture:
                capture_result["timetostableframe"] = eideticker.get_stable_frame_time(capture)
            else:
                capture_result.update(eideticker.get_standard_metrics(capture, testlog.actions))
            if options.outputdir:
                # video
                video_relpath = os.path.join("videos", "video-%s.webm" % time.time())
                video_path = os.path.join(options.outputdir, video_relpath)
                open(video_path, "w").write(capture.get_video().read())
                capture_result["video"] = video_relpath

                # framediff
                framediff_relpath = os.path.join("framediffs", "framediff-%s.json" % time.time())
                framediff_path = os.path.join(options.outputdir, framediff_relpath)
                with open(framediff_path, "w") as f:
                    framediff = videocapture.get_framediff_sums(capture)
                    f.write(json.dumps({"diffsums": framediff}))
                capture_result["frameDiff"] = framediff_relpath

        if options.enable_profiling:
            capture_result["profile"] = profile_file

        if options.get_internal_checkerboard_stats:
            capture_result["internalcheckerboard"] = testlog.checkerboard_percent_totals

        capture_results.append(capture_result)

    if options.devicetype == "b2g":
        # FIXME: get information from sources.xml and application.ini on
        # device, as we do in update-dashboard.py
        display_key = appkey = "FirefoxOS"
    else:
        appkey = appname
        if appdate:
            appkey = appdate.isoformat()
        else:
            appkey = appname

        if appinfo and appinfo.get("revision"):
            display_key = "%s (%s)" % (appkey, appinfo["revision"])
        else:
            display_key = appkey

    print "=== Results on %s for %s ===" % (testname, display_key)

    if not options.no_capture:
        if stableframecapture:
            print "  Times to first stable frame (seconds):"
            print "  %s" % map(lambda c: c["timetostableframe"], capture_results)
            print
        else:
            print "  Number of unique frames:"
            print "  %s" % map(lambda c: c["uniqueframes"], capture_results)
            print

            print "  Average number of unique frames per second:"
            print "  %s" % map(lambda c: c["fps"], capture_results)
            print

            print "  Checkerboard area/duration (sum of percents NOT percentage):"
            print "  %s" % map(lambda c: c["checkerboard"], capture_results)
            print

            print "  Time to first input response: "
            print "  %s" % map(lambda c: c.get("timetoresponse"), capture_results)
            print

        print "  Capture files:"
        print "  Capture files: %s" % map(lambda c: c["file"], capture_results)
        print

    if options.enable_profiling:
        print "  Profile files:"
        print "  Profile files: %s" % map(lambda c: c["profile"], capture_results)
        print

    if options.get_internal_checkerboard_stats:
        print "  Internal Checkerboard Stats (sum of percents, not percentage):"
        print "  %s" % map(lambda c: c["internalcheckerboard"], capture_results)
        print

    if options.outputdir:
        outputfile = os.path.join(options.outputdir, "metric.json")
        resultdict = {"title": testname, "data": {}}
        if os.path.isfile(outputfile):
            resultdict.update(json.loads(open(outputfile).read()))

        if not resultdict["data"].get(appkey):
            resultdict["data"][appkey] = []
        resultdict["data"][appkey].extend(capture_results)

        with open(outputfile, "w") as f:
            f.write(json.dumps(resultdict))
Esempio n. 8
0
    if baseline:
        datapoint.update({'baseline': True})

    metrics = {}
    if testinfo['type'] == 'startup' or testinfo['type'] == 'webstartup' or \
            testinfo['defaultMeasure'] == 'timetostableframe':
        metrics['timetostableframe'] = eideticker.get_stable_frame_time(
            capture)
    else:
        # standard test metrics
        metrics = eideticker.get_standard_metrics(capture, testlog.actions)
    datapoint.update(metrics)
    metadata['metrics'] = metrics

    metadata['frameDiffSums'] = videocapture.get_framediff_sums(capture)
    metadata['frameSobelEntropies'] = videocapture.get_frame_entropies(capture,
                                                                       sobelized=True)

    if enable_profiling:
        metadata['profile'] = profile_path

    # add logs (if any) to test metadata
    metadata.update(testlog.getdict())

    # Add datapoint
    data['testdata'][productname][appdate].append(datapoint)

    # Dump metadata
    open(os.path.join(outputdir, 'metadata', '%s.json' % datapoint['uuid']),
         'w').write(json.dumps(metadata))
Esempio n. 9
0
def get_standard_metric_metadata(capture):
    return {
        "framediffsums": videocapture.get_framediff_sums(capture),
        "framecannyentropies": videocapture.get_frame_entropies(capture, edge_detection="canny"),
    }
Esempio n. 10
0
def get_standard_metric_metadata(capture):
    return { 'framediffsums': videocapture.get_framediff_sums(capture),
             'framecannyentropies': videocapture.get_frame_entropies(
            capture, edge_detection='canny') }
def run_test(device, outputdir, outputfile, test, url_params, num_runs,
             startup_test, no_capture, get_internal_checkerboard_stats,
             apk=None, appname = None, appdate = None, profile_file=None,
             dmtype="adb", host=None, port=None):
    if apk:
        appinfo = eideticker.get_fennec_appinfo(apk)
        appname = appinfo['appname']
        print "Installing %s (version: %s, revision %s)" % (appinfo['appname'],
                                                            appinfo['version'],
                                                            appinfo['revision'])
        device.updateApp(apk)
    else:
        appinfo = None

    captures = []

    for i in range(num_runs):
        # Kill any existing instances of the processes
        device.killProcess(appname)

        # Now run the test
        capture_file = os.path.join(CAPTURE_DIR,
                                    "metric-test-%s-%s.zip" % (appname,
                                                               int(time.time())))
        args = ["runtest.py", "--url-params", url_params, appname, test]
        if get_internal_checkerboard_stats:
            checkerboard_logfile = tempfile.NamedTemporaryFile()
            args.extend(["--checkerboard-log-file", checkerboard_logfile.name])
        if startup_test:
            args.extend(["--startup-test"])
        if no_capture:
            args.extend(["--no-capture"])
        else:
            args.extend(["--capture-file", capture_file])
        if profile_file:
            args.extend(["--profile-file", profile_file])
        if dmtype:
            args.extend(["-m", dmtype])
        if host:
            args.extend(["--host", host])
        if port:
            args.extend(["--port", port])
        print args
        retval = subprocess.call(args)
        if retval != 0:
            raise Exception("Failed to run test %s for %s" % (test, appname))

        capture_result = {}
        if not no_capture:
            capture_result['file'] = capture_file

            capture = videocapture.Capture(capture_file)

            framediff_sums = videocapture.get_framediff_sums(capture)
            if startup_test:
                capture_result['stableframe'] = videocapture.get_stable_frame(capture)
            else:
                capture_result['uniqueframes'] = videocapture.get_num_unique_frames(capture)
                capture_result['fps'] = videocapture.get_fps(capture)
                capture_result['checkerboard'] = videocapture.get_checkerboarding_area_duration(capture)
            if outputdir:
                video_path = os.path.join('videos', 'video-%s.webm' % time.time())
                video_file = os.path.join(outputdir, video_path)
                open(video_file, 'w').write(capture.get_video().read())
                capture_result['video'] = video_path

        if get_internal_checkerboard_stats:
            internal_checkerboard_totals = parse_checkerboard_log(checkerboard_logfile.name)
            capture_result['internalcheckerboard'] = internal_checkerboard_totals

        captures.append(capture_result)

    appkey = appname
    if appdate:
        appkey = appdate.isoformat()
    else:
        appkey = appname

    if appinfo and appinfo.get('revision'):
        display_key = "%s (%s)" % (appkey, appinfo['revision'])
    else:
        display_key = appkey
    print "=== Results for %s ===" % display_key

    if not no_capture:
        if startup_test:
            print "  First stable frames:"
            print "  %s" % map(lambda c: c['stableframe'], captures)
            print
        else:
            print "  Number of unique frames:"
            print "  %s" % map(lambda c: c['uniqueframes'], captures)
            print

            print "  Average number of unique frames per second:"
            print "  %s" % map(lambda c: c['fps'], captures)
            print

            print "  Checkerboard area/duration (sum of percents NOT percentage):"
            print "  %s" % map(lambda c: c['checkerboard'], captures)
            print

        print "  Capture files (for further reference):"
        print "  Capture files: %s" % map(lambda c: c['file'], captures)
        print

    if get_internal_checkerboard_stats:
        print "  Internal Checkerboard Stats (sum of percents, not percentage):"
        print "  %s" % map(lambda c: c['internalcheckerboard'], captures)
        print

    if outputfile:
        resultdict = { 'title': test, 'data': {} }
        if os.path.isfile(outputfile):
            resultdict.update(json.loads(open(outputfile).read()))

        if not resultdict['data'].get(appkey):
            resultdict['data'][appkey] = []
        resultdict['data'][appkey].extend(captures)

        with open(outputfile, 'w') as f:
            f.write(json.dumps(resultdict))
Esempio n. 12
0
 def GET(self, name):
     capture = videocapture.Capture(os.path.join(CAPTURE_DIR, name))
     return videocapture.get_framediff_sums(capture)
Esempio n. 13
0
    if baseline:
        datapoint.update({ 'baseline': True })

    if testinfo['type'] == 'startup' or testinfo['type'] == 'webstartup' or \
            testinfo['defaultMeasure'] == 'timetostableframe':
        datapoint['timetostableframe'] = eideticker.get_stable_frame_time(capture)
    else:
        # standard test metrics
        datapoint.update(eideticker.get_standard_metrics(capture,
                                                         testlog.actions))

    framediff_relpath = os.path.join('framediffs', 'framediff-%s.json' % time.time())
    framediff_path = os.path.join(outputdir, framediff_relpath)
    with open(framediff_path, 'w') as f:
        framediff = videocapture.get_framediff_sums(capture)
        f.write(json.dumps({ 'diffsums': framediff }))
    datapoint['frameDiff'] = framediff_relpath

    if enable_profiling:
        datapoint['profile'] = profile_path

    if log_http_requests:
        request_log_relpath = os.path.join('httplogs',
                                        'http-log-%s.json' % time.time())
        testlog.save_logs(http_request_log_path=
                          os.path.join(outputdir, request_log_relpath))
        datapoint['httpLog'] = request_log_relpath

    if log_actions:
        actions_log_relpath = os.path.join('actionlogs',
def runtest(device_prefs,
            testname,
            options,
            apk=None,
            appname=None,
            appdate=None):
    device = None
    if apk:
        appinfo = eideticker.get_fennec_appinfo(apk)
        appname = appinfo['appname']
        print "Installing %s (version: %s, revision %s)" % (
            appinfo['appname'], appinfo['version'], appinfo['revision'])
        device = eideticker.getDevice(**device_prefs)
        device.updateApp(apk)
    else:
        appinfo = None

    testinfo = eideticker.get_testinfo(testname)
    stableframecapture = (testinfo['type'] in ('startup', 'webstartup')
                          or testinfo['defaultMeasure'] == 'timetostableframe')

    capture_results = []

    if options.prepare_test:
        eideticker.prepare_test(testname, device_prefs)

    for i in range(options.num_runs):
        # Kill any existing instances of the processes (for Android)
        if device:
            device.killProcess(appname)

        # Now run the test
        curtime = int(time.time())
        capture_file = os.path.join(
            CAPTURE_DIR, "metric-test-%s-%s.zip" % (appname, curtime))
        if options.enable_profiling and options.outputdir:
            profile_relpath = os.path.join('profiles',
                                           'sps-profile-%s.zip' % time.time())
            profile_file = os.path.join(options.outputdir, profile_relpath)
        else:
            profile_file = None

        current_date = time.strftime("%Y-%m-%d")
        capture_name = "%s - %s (taken on %s)" % (testname, appname,
                                                  current_date)

        testlog = eideticker.run_test(
            testname,
            options.capture_device,
            appname,
            capture_name,
            device_prefs,
            extra_prefs=options.extra_prefs,
            extra_env_vars=options.extra_env_vars,
            log_checkerboard_stats=options.get_internal_checkerboard_stats,
            profile_file=profile_file,
            capture_area=options.capture_area,
            no_capture=options.no_capture,
            fps=options.fps,
            capture_file=capture_file,
            wifi_settings_file=options.wifi_settings_file,
            sync_time=options.sync_time)

        capture_uuid = uuid.uuid1().hex
        datapoint = {'uuid': capture_uuid}
        metadata = {}
        metrics = {}

        if not options.no_capture:
            capture = videocapture.Capture(capture_file)

            datapoint['captureFile'] = metadata['captureFile'] = capture_file
            metadata['captureFPS'] = capture.fps
            metadata['generatedVideoFPS'] = capture.generated_video_fps

            if stableframecapture:
                metrics['timetostableframe'] = \
                    eideticker.get_stable_frame_time(capture)
            else:
                metrics.update(
                    eideticker.get_standard_metrics(capture, testlog.actions))
            metadata['metrics'] = metrics

            metadata['framediffSums'] = videocapture.get_framediff_sums(
                capture)

            if options.outputdir:
                # video
                video_relpath = os.path.join('videos',
                                             'video-%s.webm' % time.time())
                video_path = os.path.join(options.outputdir, video_relpath)
                open(video_path, 'w').write(capture.get_video().read())
                metadata['video'] = video_relpath

        if options.get_internal_checkerboard_stats:
            metrics['internalcheckerboard'] = \
                testlog.checkerboard_percent_totals

        # Want metrics data in data, so we can graph everything at once
        datapoint.update(metrics)

        if options.enable_profiling:
            metadata['profile'] = profile_file

        # dump metadata
        if options.outputdir:
            # metadata
            metadata_path = os.path.join(options.outputdir, 'metadata',
                                         '%s.json' % capture_uuid)
            open(metadata_path, 'w').write(json.dumps(metadata))

        capture_results.append(datapoint)

    if options.devicetype == "b2g":
        # FIXME: get information from sources.xml and application.ini on
        # device, as we do in update-dashboard.py
        display_key = appkey = "FirefoxOS"
    else:
        appkey = appname
        if appdate:
            appkey = appdate.isoformat()
        else:
            appkey = appname

        if appinfo and appinfo.get('revision'):
            display_key = "%s (%s)" % (appkey, appinfo['revision'])
        else:
            display_key = appkey

    print "=== Results on %s for %s ===" % (testname, display_key)

    if not options.no_capture:
        if stableframecapture:
            print "  Times to first stable frame (seconds):"
            print "  %s" % map(lambda c: c['timetostableframe'],
                               capture_results)
            print
        else:
            print "  Number of unique frames:"
            print "  %s" % map(lambda c: c['uniqueframes'], capture_results)
            print

            print "  Average number of unique frames per second:"
            print "  %s" % map(lambda c: c['fps'], capture_results)
            print

            print "  Checkerboard area/duration (sum of percents NOT "
            "percentage):"
            print "  %s" % map(lambda c: c['checkerboard'], capture_results)
            print

            print "  Time to first input response: "
            print "  %s" % map(lambda c: c.get('timetoresponse'),
                               capture_results)
            print

        print "  Capture files:"
        print "  Capture files: %s" % map(lambda c: c['captureFile'],
                                          capture_results)
        print

    if options.get_internal_checkerboard_stats:
        print "  Internal Checkerboard Stats (sum of percents, not "
        "percentage):"
        print "  %s" % map(lambda c: c['internalcheckerboard'],
                           capture_results)
        print

    if options.outputdir:
        outputfile = os.path.join(options.outputdir, "metric.json")
        resultdict = {'title': testname, 'data': {}}
        if os.path.isfile(outputfile):
            resultdict.update(json.loads(open(outputfile).read()))

        if not resultdict['data'].get(appkey):
            resultdict['data'][appkey] = []
        resultdict['data'][appkey].extend(capture_results)

        with open(outputfile, 'w') as f:
            f.write(json.dumps(resultdict))
Esempio n. 15
0
def get_standard_metric_metadata(capture):
    return { 'framediffsums': videocapture.get_framediff_sums(capture),
             'framesobelentropies': videocapture.get_frame_entropies(
            capture, sobelized=True) }
Esempio n. 16
0
    if baseline:
        datapoint.update({'baseline': True})

    metrics = {}
    if testinfo['type'] == 'startup' or testinfo['type'] == 'webstartup' or \
            testinfo['defaultMeasure'] == 'timetostableframe':
        metrics['timetostableframe'] = eideticker.get_stable_frame_time(
            capture)
    else:
        # standard test metrics
        metrics = eideticker.get_standard_metrics(capture, testlog.actions)
    datapoint.update(metrics)
    metadata['metrics'] = metrics

    metadata['frameDiffSums'] = videocapture.get_framediff_sums(capture)
    metadata['frameSobelEntropies'] = videocapture.get_frame_entropies(
        capture, sobelized=True)

    if enable_profiling:
        metadata['profile'] = profile_path

    # add logs (if any) to test metadata
    metadata.update(testlog.getdict())

    # Add datapoint
    data['testdata'][productname][appdate].append(datapoint)

    # Dump metadata
    open(os.path.join(outputdir, 'metadata', '%s.json' % datapoint['uuid']),
         'w').write(json.dumps(metadata))
Esempio n. 17
0
        datapoint['timetostableframe'] = videocapture.get_stable_frame_time(capture)
    else:
        # standard test metrics
        if capture_device == "pointgrey":
            # even with median filtering, pointgrey captures tend to have a
            # bunch of visual noise -- try to compensate for this by setting
            # a higher threshold for frames to be considered different
            threshold = 2000
        datapoint['uniqueframes'] = videocapture.get_num_unique_frames(capture, threshold=threshold)
        datapoint['fps'] = videocapture.get_fps(capture, threshold=threshold)
        datapoint['checkerboard'] = videocapture.get_checkerboarding_area_duration(capture)

    framediff_relpath = os.path.join('framediffs', 'framediff-%s.json' % time.time())
    framediff_path = os.path.join(outputdir, framediff_relpath)
    with open(framediff_path, 'w') as f:
        framediff = videocapture.get_framediff_sums(capture, threshold=threshold)
        f.write(json.dumps({ 'diffsums': framediff }))
    datapoint['frameDiff'] = framediff_relpath

    if enable_profiling:
        datapoint['profile'] = profile_path

    if log_http_requests:
        datapoint['httpLog'] = request_log_path

    data['testdata'][productname][appdate].append(datapoint)

    # Write the data to disk immediately (so we don't lose it if we fail later)
    datafile_dir = os.path.dirname(datafile)
    if not os.path.exists(datafile_dir):
        os.mkdir(datafile_dir)