Example #1
0
    def run_test_group(self):
        self.results = []

        # reset number of passed/failed tests
        self.numpassed = 0
        self.numfailed = 0

        # build our tps.xpi extension
        self.extensions = []
        self.extensions.append(os.path.join(self.extensionDir, 'tps'))
        self.extensions.append(os.path.join(self.extensionDir, "mozmill"))

        # build the test list
        try:
            f = open(self.testfile)
            jsondata = f.read()
            f.close()
            testfiles = json.loads(jsondata)
            testlist = testfiles['tests']
        except ValueError:
            testlist = [os.path.basename(self.testfile)]
        testdir = os.path.dirname(self.testfile)

        self.mozhttpd = MozHttpd(port=4567, docroot=testdir)
        self.mozhttpd.start()

        # run each test, and save the results
        for test in testlist:
            result = self.run_single_test(testdir, test)

            if not self.productversion:
                self.productversion = result['productversion']
            if not self.addonversion:
                self.addonversion = result['addonversion']

            self.results.append({
                'state': result['state'],
                'name': result['name'],
                'message': result['message'],
                'logdata': result['logdata']
            })
            if result['state'] == 'TEST-PASS':
                self.numpassed += 1
            else:
                self.numfailed += 1
                if self.stop_on_error:
                    print '\nTest failed with --stop-on-error specified; not running any more tests.\n'
                    break

        self.mozhttpd.stop()

        # generate the postdata we'll use to post the results to the db
        self.postdata = {
            'tests': self.results,
            'os': '%s %sbit' % (mozinfo.version, mozinfo.bits),
            'testtype': 'crossweave',
            'productversion': self.productversion,
            'addonversion': self.addonversion,
            'synctype': self.synctype,
        }
Example #2
0
 def start_httpd(self):
     host = moznetwork.get_ip()
     self.httpd = MozHttpd(host=host,
                           port=0,
                           docroot=os.path.join(os.path.dirname(os.path.dirname(__file__)), 'www'))
     self.httpd.start()
     self.baseurl = 'http://%s:%d/' % (host, self.httpd.httpd.server_port)
     self.logger.info('running webserver on %s' % self.baseurl)
Example #3
0
 def start_httpd(self):
     host = moznetwork.get_ip()
     s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     s.bind(("",0))
     port = s.getsockname()[1]
     s.close()
     self.baseurl = 'http://%s:%d/' % (host, port)
     self.logger.info('running webserver on %s' % self.baseurl)
     self.httpd = MozHttpd(host=host,
                           port=port,
                           docroot=os.path.join(os.path.dirname(__file__), 'www'))
     self.httpd.start()
Example #4
0
 def runServer(self):
     """
     Start a basic HTML server to host
     test related files.
     """
     if not self.options.serverPath:
         self.logger.warning('Can\'t start HTTP server, --server-path not specified')
         return
     self.logger.debug('Starting server on port ' + str(self.options.serverPort))
     self.server = MozHttpd(port=self.options.serverPort,
                            docroot=self.options.serverPath,
                            proxy_host_dirs=self.options.proxyHostDirs)
     self.server.start(block=False)
Example #5
0
 def start(self, block=False):
     if self.alive:
         return
     self._server = MozHttpd(host=self.host,
                             port=self.port,
                             docroot=self.root,
                             urlhandlers=[{
                                 "method": "POST",
                                 "path": "/file_upload",
                                 "function": upload_handler
                             }])
     self._server.start(block=block)
     self.port = self._server.httpd.server_port
     self.base_url = self.get_url()
    cli = CLI()
    debug_args, interactive = cli.debugger_arguments()
    runner_args = cli.runner_args()

    build = MozbuildObject.from_environment()

    binary = runner_args.get('binary')
    if not binary:
        binary = build.get_binary_path(where="staged-package")

    path_mappings = {
        k: os.path.join(build.topsrcdir, v)
        for k, v in PATH_MAPPINGS.items()
    }
    httpd = MozHttpd(port=PORT,
                     docroot=os.path.join(build.topsrcdir, "build", "pgo"),
                     path_mappings=path_mappings)
    httpd.start(block=False)

    locations = ServerLocations()
    locations.add_host(host='127.0.0.1',
                       port=PORT,
                       options='primary,privileged')

    with TemporaryDirectory() as profilePath:
        # TODO: refactor this into mozprofile
        profile_data_dir = os.path.join(build.topsrcdir, 'testing', 'profiles')
        with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh:
            base_profiles = json.load(fh)['profileserver']

        prefpaths = [os.path.join(profile_data_dir, profile, 'user.js')
    def valgrind_test(self, suppressions):

        from mozfile import TemporaryDirectory
        from mozhttpd import MozHttpd
        from mozprofile import FirefoxProfile, Preferences
        from mozprofile.permissions import ServerLocations
        from mozrunner import FirefoxRunner
        from mozrunner.utils import findInPath
        from six import string_types
        from valgrind.output_handler import OutputHandler

        build_dir = os.path.join(self.topsrcdir, "build")

        # XXX: currently we just use the PGO inputs for Valgrind runs.  This may
        # change in the future.
        httpd = MozHttpd(docroot=os.path.join(build_dir, "pgo"))
        httpd.start(block=False)

        with TemporaryDirectory() as profilePath:
            # TODO: refactor this into mozprofile
            profile_data_dir = os.path.join(self.topsrcdir, "testing",
                                            "profiles")
            with open(os.path.join(profile_data_dir, "profiles.json"),
                      "r") as fh:
                base_profiles = json.load(fh)["valgrind"]

            prefpaths = [
                os.path.join(profile_data_dir, profile, "user.js")
                for profile in base_profiles
            ]
            prefs = {}
            for path in prefpaths:
                prefs.update(Preferences.read_prefs(path))

            interpolation = {
                "server": "%s:%d" % httpd.httpd.server_address,
            }
            for k, v in prefs.items():
                if isinstance(v, string_types):
                    v = v.format(**interpolation)
                prefs[k] = Preferences.cast(v)

            quitter = os.path.join(self.topsrcdir, "tools", "quitter",
                                   "*****@*****.**")

            locations = ServerLocations()
            locations.add_host(host="127.0.0.1",
                               port=httpd.httpd.server_port,
                               options="primary")

            profile = FirefoxProfile(
                profile=profilePath,
                preferences=prefs,
                addons=[quitter],
                locations=locations,
            )

            firefox_args = [httpd.get_url()]

            env = os.environ.copy()
            env["G_SLICE"] = "always-malloc"
            env["MOZ_CC_RUN_DURING_SHUTDOWN"] = "1"
            env["MOZ_CRASHREPORTER_NO_REPORT"] = "1"
            env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1"
            env["XPCOM_DEBUG_BREAK"] = "warn"

            outputHandler = OutputHandler(self.log)
            kp_kwargs = {
                "processOutputLine": [outputHandler],
                "universal_newlines": True,
            }

            valgrind = "valgrind"
            if not os.path.exists(valgrind):
                valgrind = findInPath(valgrind)

            valgrind_args = [
                valgrind,
                "--sym-offsets=yes",
                "--smc-check=all-non-file",
                "--vex-iropt-register-updates=allregs-at-mem-access",
                "--gen-suppressions=all",
                "--num-callers=36",
                "--leak-check=full",
                "--show-possibly-lost=no",
                "--track-origins=yes",
                "--trace-children=yes",
                "-v",  # Enable verbosity to get the list of used suppressions
                # Avoid excessive delays in the presence of spinlocks.
                # See bug 1309851.
                "--fair-sched=yes",
                # Keep debuginfo after library unmap.  See bug 1382280.
                "--keep-debuginfo=yes",
                # Reduce noise level on rustc and/or LLVM compiled code.
                # See bug 1365915
                "--expensive-definedness-checks=yes",
                # Compensate for the compiler inlining `new` but not `delete`
                # or vice versa.
                "--show-mismatched-frees=no",
            ]

            for s in suppressions:
                valgrind_args.append("--suppressions=" + s)

            supps_dir = os.path.join(build_dir, "valgrind")
            supps_file1 = os.path.join(supps_dir, "cross-architecture.sup")
            valgrind_args.append("--suppressions=" + supps_file1)

            if mozinfo.os == "linux":
                machtype = {
                    "x86_64": "x86_64-pc-linux-gnu",
                    "x86": "i386-pc-linux-gnu",
                }.get(mozinfo.processor)
                if machtype:
                    supps_file2 = os.path.join(supps_dir, machtype + ".sup")
                    if os.path.isfile(supps_file2):
                        valgrind_args.append("--suppressions=" + supps_file2)

            exitcode = None
            timeout = 1800
            binary_not_found_exception = None
            try:
                runner = FirefoxRunner(
                    profile=profile,
                    binary=self.get_binary_path(),
                    cmdargs=firefox_args,
                    env=env,
                    process_args=kp_kwargs,
                )
                runner.start(debug_args=valgrind_args)
                exitcode = runner.wait(timeout=timeout)
            except BinaryNotFoundException as e:
                binary_not_found_exception = e
            finally:
                errs = outputHandler.error_count
                supps = outputHandler.suppression_count
                if errs != supps:
                    status = 1  # turns the TBPL job orange
                    self.log(
                        logging.ERROR,
                        "valgrind-fail-parsing",
                        {
                            "errs": errs,
                            "supps": supps
                        },
                        "TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors "
                        "seen, but {supps} generated suppressions seen",
                    )

                elif errs == 0:
                    status = 0
                    self.log(
                        logging.INFO,
                        "valgrind-pass",
                        {},
                        "TEST-PASS | valgrind-test | valgrind found no errors",
                    )
                else:
                    status = 1  # turns the TBPL job orange
                    # We've already printed details of the errors.

                if binary_not_found_exception:
                    status = 2  # turns the TBPL job red
                    self.log(
                        logging.ERROR,
                        "valgrind-fail-errors",
                        {"error": str(binary_not_found_exception)},
                        "TEST-UNEXPECTED-FAIL | valgrind-test | {error}",
                    )
                    self.log(
                        logging.INFO,
                        "valgrind-fail-errors",
                        {"help": binary_not_found_exception.help()},
                        "{help}",
                    )
                elif exitcode is None:
                    status = 2  # turns the TBPL job red
                    self.log(
                        logging.ERROR,
                        "valgrind-fail-timeout",
                        {"timeout": timeout},
                        "TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out "
                        "(reached {timeout} second limit)",
                    )
                elif exitcode != 0:
                    status = 2  # turns the TBPL job red
                    self.log(
                        logging.ERROR,
                        "valgrind-fail-errors",
                        {"exitcode": exitcode},
                        "TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code "
                        "from Valgrind: {exitcode}",
                    )

                httpd.stop()

            return status
Example #8
0
    def valgrind_test(self, suppressions):

        from mozfile import TemporaryDirectory
        from mozhttpd import MozHttpd
        from mozprofile import FirefoxProfile, Preferences
        from mozprofile.permissions import ServerLocations
        from mozrunner import FirefoxRunner
        from mozrunner.utils import findInPath
        from six import string_types
        from valgrind.output_handler import OutputHandler

        build_dir = os.path.join(self.topsrcdir, 'build')

        # XXX: currently we just use the PGO inputs for Valgrind runs.  This may
        # change in the future.
        httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo'))
        httpd.start(block=False)

        with TemporaryDirectory() as profilePath:
            # TODO: refactor this into mozprofile
            profile_data_dir = os.path.join(self.topsrcdir, 'testing',
                                            'profiles')
            with open(os.path.join(profile_data_dir, 'profiles.json'),
                      'r') as fh:
                base_profiles = json.load(fh)['valgrind']

            prefpaths = [
                os.path.join(profile_data_dir, profile, 'user.js')
                for profile in base_profiles
            ]
            prefs = {}
            for path in prefpaths:
                prefs.update(Preferences.read_prefs(path))

            interpolation = {
                'server': '%s:%d' % httpd.httpd.server_address,
            }
            for k, v in prefs.items():
                if isinstance(v, string_types):
                    v = v.format(**interpolation)
                prefs[k] = Preferences.cast(v)

            quitter = os.path.join(self.topsrcdir, 'tools', 'quitter',
                                   '*****@*****.**')

            locations = ServerLocations()
            locations.add_host(host='127.0.0.1',
                               port=httpd.httpd.server_port,
                               options='primary')

            profile = FirefoxProfile(profile=profilePath,
                                     preferences=prefs,
                                     addons=[quitter],
                                     locations=locations)

            firefox_args = [httpd.get_url()]

            env = os.environ.copy()
            env['G_SLICE'] = 'always-malloc'
            env['MOZ_CC_RUN_DURING_SHUTDOWN'] = '1'
            env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
            env['MOZ_DISABLE_NONLOCAL_CONNECTIONS'] = '1'
            env['XPCOM_DEBUG_BREAK'] = 'warn'

            env.update(self.extra_environment_variables)

            outputHandler = OutputHandler(self.log)
            kp_kwargs = {'processOutputLine': [outputHandler]}

            valgrind = 'valgrind'
            if not os.path.exists(valgrind):
                valgrind = findInPath(valgrind)

            valgrind_args = [
                valgrind,
                '--sym-offsets=yes',
                '--smc-check=all-non-file',
                '--vex-iropt-register-updates=allregs-at-mem-access',
                '--gen-suppressions=all',
                '--num-callers=36',
                '--leak-check=full',
                '--show-possibly-lost=no',
                '--track-origins=yes',
                '--trace-children=yes',
                '-v',  # Enable verbosity to get the list of used suppressions
                # Avoid excessive delays in the presence of spinlocks.
                # See bug 1309851.
                '--fair-sched=yes',
                # Keep debuginfo after library unmap.  See bug 1382280.
                '--keep-debuginfo=yes',
                # Reduce noise level on rustc and/or LLVM compiled code.
                # See bug 1365915
                '--expensive-definedness-checks=yes',
            ]

            for s in suppressions:
                valgrind_args.append('--suppressions=' + s)

            supps_dir = os.path.join(build_dir, 'valgrind')
            supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup')
            valgrind_args.append('--suppressions=' + supps_file1)

            if mozinfo.os == 'linux':
                machtype = {
                    'x86_64': 'x86_64-pc-linux-gnu',
                    'x86': 'i386-pc-linux-gnu',
                }.get(mozinfo.processor)
                if machtype:
                    supps_file2 = os.path.join(supps_dir, machtype + '.sup')
                    if os.path.isfile(supps_file2):
                        valgrind_args.append('--suppressions=' + supps_file2)

            exitcode = None
            timeout = 1800
            try:
                runner = FirefoxRunner(profile=profile,
                                       binary=self.get_binary_path(),
                                       cmdargs=firefox_args,
                                       env=env,
                                       process_args=kp_kwargs)
                runner.start(debug_args=valgrind_args)
                exitcode = runner.wait(timeout=timeout)

            finally:
                errs = outputHandler.error_count
                supps = outputHandler.suppression_count
                if errs != supps:
                    status = 1  # turns the TBPL job orange
                    self.log(
                        logging.ERROR, 'valgrind-fail-parsing', {
                            'errs': errs,
                            'supps': supps
                        },
                        'TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors '
                        'seen, but {supps} generated suppressions seen')

                elif errs == 0:
                    status = 0
                    self.log(
                        logging.INFO, 'valgrind-pass', {},
                        'TEST-PASS | valgrind-test | valgrind found no errors')
                else:
                    status = 1  # turns the TBPL job orange
                    # We've already printed details of the errors.

                if exitcode is None:
                    status = 2  # turns the TBPL job red
                    self.log(
                        logging.ERROR, 'valgrind-fail-timeout',
                        {'timeout': timeout},
                        'TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out '
                        '(reached {timeout} second limit)')
                elif exitcode != 0:
                    status = 2  # turns the TBPL job red
                    self.log(
                        logging.ERROR, 'valgrind-fail-errors',
                        {'exitcode': exitcode},
                        'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code '
                        'from Valgrind: {exitcode}')

                httpd.stop()

            return status
    def run_tests(self):
        """
        Generate the PGO profile data
        """
        from mozhttpd import MozHttpd
        from mozprofile import Preferences
        from mozdevice import ADBDevice, ADBTimeoutError
        from six import string_types
        from marionette_driver.marionette import Marionette

        app = self.query_package_name()

        IP = '10.0.2.2'
        PORT = 8888

        PATH_MAPPINGS = {
            '/js-input/webkit/PerformanceTests':
            'third_party/webkit/PerformanceTests',
        }

        dirs = self.query_abs_dirs()
        topsrcdir = os.path.join(dirs['abs_work_dir'], 'src')
        adb = self.query_exe('adb')

        path_mappings = {
            k: os.path.join(topsrcdir, v)
            for k, v in PATH_MAPPINGS.items()
        }
        httpd = MozHttpd(port=PORT,
                         docroot=os.path.join(topsrcdir, "build", "pgo"),
                         path_mappings=path_mappings)
        httpd.start(block=False)

        profile_data_dir = os.path.join(topsrcdir, 'testing', 'profiles')
        with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh:
            base_profiles = json.load(fh)['profileserver']

        prefpaths = [
            os.path.join(profile_data_dir, profile, 'user.js')
            for profile in base_profiles
        ]

        prefs = {}
        for path in prefpaths:
            prefs.update(Preferences.read_prefs(path))

        interpolation = {
            "server": "%s:%d" % httpd.httpd.server_address,
            "OOP": "false"
        }
        for k, v in prefs.items():
            if isinstance(v, string_types):
                v = v.format(**interpolation)
            prefs[k] = Preferences.cast(v)

        # Enforce 1proc. This isn't in one of the user.js files because those
        # are shared with desktop, which wants e10s. We can't interpolate
        # because the formatting code only works for strings, and this is a
        # bool pref.
        prefs["browser.tabs.remote.autostart"] = False

        outputdir = self.config.get('output_directory', '/sdcard/pgo_profile')
        jarlog = posixpath.join(outputdir, 'en-US.log')
        profdata = posixpath.join(outputdir, 'default_%p_random_%m.profraw')

        env = {}
        env["XPCOM_DEBUG_BREAK"] = "warn"
        env["MOZ_IN_AUTOMATION"] = "1"
        env["MOZ_JAR_LOG_FILE"] = jarlog
        env["LLVM_PROFILE_FILE"] = profdata

        adbdevice = ADBDevice(adb=adb, device='emulator-5554')
        adbdevice.mkdir(outputdir)

        try:
            # Run Fennec a first time to initialize its profile
            driver = Marionette(
                app='fennec',
                package_name=app,
                adb_path=adb,
                bin="target.apk",
                prefs=prefs,
                connect_to_running_emulator=True,
                startup_timeout=1000,
                env=env,
            )
            driver.start_session()

            # Now generate the profile and wait for it to complete
            for page in PAGES:
                driver.navigate("http://%s:%d/%s" % (IP, PORT, page))
                timeout = 2
                if 'Speedometer/index.html' in page:
                    # The Speedometer test actually runs many tests internally in
                    # javascript, so it needs extra time to run through them. The
                    # emulator doesn't get very far through the whole suite, but
                    # this extra time at least lets some of them process.
                    timeout = 360
                time.sleep(timeout)

            driver.set_context("chrome")
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                let cancelQuit = Components.classes["@mozilla.org/supports-PRBool;1"]
                    .createInstance(Components.interfaces.nsISupportsPRBool);
                Services.obs.notifyObservers(cancelQuit, "quit-application-requested", null);
                return cancelQuit.data;
            """)
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                Services.startup.quit(Ci.nsIAppStartup.eAttemptQuit)
            """)

            # There is a delay between execute_script() returning and the profile data
            # actually getting written out, so poll the device until we get a profile.
            for i in range(50):
                if not adbdevice.process_exist(app):
                    break
                time.sleep(2)
            else:
                raise Exception("Android App (%s) never quit" % app)

            # Pull all the profraw files and en-US.log
            adbdevice.pull(outputdir, '/builds/worker/workspace/')
        except ADBTimeoutError:
            self.fatal('INFRA-ERROR: Failed with an ADBTimeoutError',
                       EXIT_STATUS_DICT[TBPL_RETRY])

        profraw_files = glob.glob('/builds/worker/workspace/*.profraw')
        if not profraw_files:
            self.fatal(
                'Could not find any profraw files in /builds/worker/workspace')
        merge_cmd = [
            os.path.join(os.environ['MOZ_FETCHES_DIR'],
                         'clang/bin/llvm-profdata'),
            'merge',
            '-o',
            '/builds/worker/workspace/merged.profdata',
        ] + profraw_files
        rc = subprocess.call(merge_cmd)
        if rc != 0:
            self.fatal(
                'INFRA-ERROR: Failed to merge profile data. Corrupt profile?',
                EXIT_STATUS_DICT[TBPL_RETRY])

        # tarfile doesn't support xz in this version of Python
        tar_cmd = [
            'tar',
            '-acvf',
            '/builds/worker/artifacts/profdata.tar.xz',
            '-C',
            '/builds/worker/workspace',
            'merged.profdata',
            'en-US.log',
        ]
        subprocess.check_call(tar_cmd)

        httpd.stop()
Example #10
0
    def run_tests(self):
        """
        Generate the PGO profile data
        """
        from mozhttpd import MozHttpd
        from mozprofile import Preferences
        from mozdevice import ADBDeviceFactory, ADBTimeoutError
        from six import string_types
        from marionette_driver.marionette import Marionette

        app = self.query_package_name()

        IP = "10.0.2.2"
        PORT = 8888

        PATH_MAPPINGS = {
            "/js-input/webkit/PerformanceTests":
            "third_party/webkit/PerformanceTests",
        }

        dirs = self.query_abs_dirs()
        topsrcdir = dirs["abs_src_dir"]
        adb = self.query_exe("adb")

        path_mappings = {
            k: os.path.join(topsrcdir, v)
            for k, v in PATH_MAPPINGS.items()
        }
        httpd = MozHttpd(
            port=PORT,
            docroot=os.path.join(topsrcdir, "build", "pgo"),
            path_mappings=path_mappings,
        )
        httpd.start(block=False)

        profile_data_dir = os.path.join(topsrcdir, "testing", "profiles")
        with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh:
            base_profiles = json.load(fh)["profileserver"]

        prefpaths = [
            os.path.join(profile_data_dir, profile, "user.js")
            for profile in base_profiles
        ]

        prefs = {}
        for path in prefpaths:
            prefs.update(Preferences.read_prefs(path))

        interpolation = {
            "server": "%s:%d" % httpd.httpd.server_address,
            "OOP": "false"
        }
        for k, v in prefs.items():
            if isinstance(v, string_types):
                v = v.format(**interpolation)
            prefs[k] = Preferences.cast(v)

        outputdir = self.config.get("output_directory", "/sdcard/pgo_profile")
        jarlog = posixpath.join(outputdir, "en-US.log")
        profdata = posixpath.join(outputdir, "default_%p_random_%m.profraw")

        env = {}
        env["XPCOM_DEBUG_BREAK"] = "warn"
        env["MOZ_IN_AUTOMATION"] = "1"
        env["MOZ_JAR_LOG_FILE"] = jarlog
        env["LLVM_PROFILE_FILE"] = profdata

        if self.query_minidump_stackwalk():
            os.environ["MINIDUMP_STACKWALK"] = self.minidump_stackwalk_path
        os.environ["MINIDUMP_SAVE_PATH"] = self.query_abs_dirs(
        )["abs_blob_upload_dir"]
        if not self.symbols_path:
            self.symbols_path = os.environ.get("MOZ_FETCHES_DIR")

        # Force test_root to be on the sdcard for android pgo
        # builds which fail for Android 4.3 when profiles are located
        # in /data/local/tmp/test_root with
        # E AndroidRuntime: FATAL EXCEPTION: Gecko
        # E AndroidRuntime: java.lang.IllegalArgumentException: \
        #    Profile directory must be writable if specified: /data/local/tmp/test_root/profile
        # This occurs when .can-write-sentinel is written to
        # the profile in
        # mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoProfile.java.
        # This is not a problem on later versions of Android. This
        # over-ride of test_root should be removed when Android 4.3 is no
        # longer supported.
        sdcard_test_root = "/sdcard/test_root"
        adbdevice = ADBDeviceFactory(adb=adb,
                                     device="emulator-5554",
                                     test_root=sdcard_test_root)
        if adbdevice.test_root != sdcard_test_root:
            # If the test_root was previously set and shared
            # the initializer will not have updated the shared
            # value. Force it to match the sdcard_test_root.
            adbdevice.test_root = sdcard_test_root
        adbdevice.mkdir(outputdir, parents=True)

        try:
            # Run Fennec a first time to initialize its profile
            driver = Marionette(
                app="fennec",
                package_name=app,
                adb_path=adb,
                bin="geckoview-androidTest.apk",
                prefs=prefs,
                connect_to_running_emulator=True,
                startup_timeout=1000,
                env=env,
                symbols_path=self.symbols_path,
            )
            driver.start_session()

            # Now generate the profile and wait for it to complete
            for page in PAGES:
                driver.navigate("http://%s:%d/%s" % (IP, PORT, page))
                timeout = 2
                if "Speedometer/index.html" in page:
                    # The Speedometer test actually runs many tests internally in
                    # javascript, so it needs extra time to run through them. The
                    # emulator doesn't get very far through the whole suite, but
                    # this extra time at least lets some of them process.
                    timeout = 360
                time.sleep(timeout)

            driver.set_context("chrome")
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                let cancelQuit = Components.classes["@mozilla.org/supports-PRBool;1"]
                    .createInstance(Components.interfaces.nsISupportsPRBool);
                Services.obs.notifyObservers(cancelQuit, "quit-application-requested", null);
                return cancelQuit.data;
            """)
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                Services.startup.quit(Ci.nsIAppStartup.eAttemptQuit)
            """)

            # There is a delay between execute_script() returning and the profile data
            # actually getting written out, so poll the device until we get a profile.
            for i in range(50):
                if not adbdevice.process_exist(app):
                    break
                time.sleep(2)
            else:
                raise Exception("Android App (%s) never quit" % app)

            # Pull all the profraw files and en-US.log
            adbdevice.pull(outputdir, "/builds/worker/workspace/")
        except ADBTimeoutError:
            self.fatal(
                "INFRA-ERROR: Failed with an ADBTimeoutError",
                EXIT_STATUS_DICT[TBPL_RETRY],
            )

        profraw_files = glob.glob("/builds/worker/workspace/*.profraw")
        if not profraw_files:
            self.fatal(
                "Could not find any profraw files in /builds/worker/workspace")
        merge_cmd = [
            os.path.join(os.environ["MOZ_FETCHES_DIR"],
                         "clang/bin/llvm-profdata"),
            "merge",
            "-o",
            "/builds/worker/workspace/merged.profdata",
        ] + profraw_files
        rc = subprocess.call(merge_cmd)
        if rc != 0:
            self.fatal(
                "INFRA-ERROR: Failed to merge profile data. Corrupt profile?",
                EXIT_STATUS_DICT[TBPL_RETRY],
            )

        # tarfile doesn't support xz in this version of Python
        tar_cmd = [
            "tar",
            "-acvf",
            "/builds/worker/artifacts/profdata.tar.xz",
            "-C",
            "/builds/worker/workspace",
            "merged.profdata",
            "en-US.log",
        ]
        subprocess.check_call(tar_cmd)

        httpd.stop()
Example #11
0
    def valgrind_test(self, suppressions):
        import json
        import re
        import sys
        import tempfile

        from mozbuild.base import MozbuildObject
        from mozfile import TemporaryDirectory
        from mozhttpd import MozHttpd
        from mozprofile import FirefoxProfile, Preferences
        from mozprofile.permissions import ServerLocations
        from mozrunner import FirefoxRunner
        from mozrunner.utils import findInPath

        build_dir = os.path.join(self.topsrcdir, 'build')

        # XXX: currently we just use the PGO inputs for Valgrind runs.  This may
        # change in the future.
        httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo'))
        httpd.start(block=False)

        with TemporaryDirectory() as profilePath:
            #TODO: refactor this into mozprofile
            prefpath = os.path.join(self.topsrcdir, 'testing', 'profiles',
                                    'prefs_general.js')
            prefs = {}
            prefs.update(Preferences.read_prefs(prefpath))
            interpolation = {
                'server': '%s:%d' % httpd.httpd.server_address,
                'OOP': 'false'
            }
            prefs = json.loads(json.dumps(prefs) % interpolation)
            for pref in prefs:
                prefs[pref] = Preferences.cast(prefs[pref])

            quitter = os.path.join(self.distdir, 'xpi-stage', 'quitter')

            locations = ServerLocations()
            locations.add_host(host='127.0.0.1',
                               port=httpd.httpd.server_port,
                               options='primary')

            profile = FirefoxProfile(profile=profilePath,
                                     preferences=prefs,
                                     addons=[quitter],
                                     locations=locations)

            firefox_args = [httpd.get_url()]

            env = os.environ.copy()
            env['G_SLICE'] = 'always-malloc'
            env['XPCOM_CC_RUN_DURING_SHUTDOWN'] = '1'
            env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
            env['XPCOM_DEBUG_BREAK'] = 'warn'

            class OutputHandler(object):
                def __init__(self):
                    self.found_errors = False

                def __call__(self, line):
                    print(line)
                    m = re.match(
                        r'.*ERROR SUMMARY: [1-9]\d* errors from \d+ contexts',
                        line)
                    if m:
                        self.found_errors = True

            outputHandler = OutputHandler()
            kp_kwargs = {'processOutputLine': [outputHandler]}

            valgrind = 'valgrind'
            if not os.path.exists(valgrind):
                valgrind = findInPath(valgrind)

            valgrind_args = [
                valgrind, '--smc-check=all-non-file',
                '--vex-iropt-register-updates=allregs-at-mem-access',
                '--gen-suppressions=all', '--num-callers=20',
                '--leak-check=full', '--show-possibly-lost=no',
                '--track-origins=yes'
            ]

            for s in suppressions:
                valgrind_args.append('--suppressions=' + s)

            supps_dir = os.path.join(build_dir, 'valgrind')
            supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup')
            valgrind_args.append('--suppressions=' + supps_file1)

            # MACHTYPE is an odd bash-only environment variable that doesn't
            # show up in os.environ, so we have to get it another way.
            machtype = subprocess.check_output(
                ['bash', '-c', 'echo $MACHTYPE']).rstrip()
            supps_file2 = os.path.join(supps_dir, machtype + '.sup')
            if os.path.isfile(supps_file2):
                valgrind_args.append('--suppressions=' + supps_file2)

            exitcode = None
            try:
                runner = FirefoxRunner(profile=profile,
                                       binary=self.get_binary_path(),
                                       cmdargs=firefox_args,
                                       env=env,
                                       kp_kwargs=kp_kwargs)
                runner.start(debug_args=valgrind_args)
                exitcode = runner.wait()

            finally:
                if not outputHandler.found_errors:
                    status = 0
                    print(
                        'TEST-PASS | valgrind-test | valgrind found no errors')
                else:
                    status = 1  # turns the TBPL job orange
                    print(
                        'TEST-UNEXPECTED-FAIL | valgrind-test | valgrind found errors'
                    )

                if exitcode != 0:
                    status = 2  # turns the TBPL job red
                    print(
                        'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code from Valgrind'
                    )

                httpd.stop()

            return status
Example #12
0
    def valgrind_test(self, suppressions):
        import json
        import sys
        import tempfile

        from mozbuild.base import MozbuildObject
        from mozfile import TemporaryDirectory
        from mozhttpd import MozHttpd
        from mozprofile import FirefoxProfile, Preferences
        from mozprofile.permissions import ServerLocations
        from mozrunner import FirefoxRunner
        from mozrunner.utils import findInPath
        from valgrind.output_handler import OutputHandler

        build_dir = os.path.join(self.topsrcdir, 'build')

        # XXX: currently we just use the PGO inputs for Valgrind runs.  This may
        # change in the future.
        httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo'))
        httpd.start(block=False)

        with TemporaryDirectory() as profilePath:
            #TODO: refactor this into mozprofile
            prefpath = os.path.join(self.topsrcdir, 'testing', 'profiles', 'prefs_general.js')
            prefs = {}
            prefs.update(Preferences.read_prefs(prefpath))
            interpolation = { 'server': '%s:%d' % httpd.httpd.server_address,
                              'OOP': 'false'}
            prefs = json.loads(json.dumps(prefs) % interpolation)
            for pref in prefs:
                prefs[pref] = Preferences.cast(prefs[pref])

            quitter = os.path.join(self.topsrcdir, 'tools', 'quitter', '*****@*****.**')

            locations = ServerLocations()
            locations.add_host(host='127.0.0.1',
                               port=httpd.httpd.server_port,
                               options='primary')

            profile = FirefoxProfile(profile=profilePath,
                                     preferences=prefs,
                                     addons=[quitter],
                                     locations=locations)

            firefox_args = [httpd.get_url()]

            env = os.environ.copy()
            env['G_SLICE'] = 'always-malloc'
            env['MOZ_CC_RUN_DURING_SHUTDOWN'] = '1'
            env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
            env['XPCOM_DEBUG_BREAK'] = 'warn'

            env.update(self.extra_environment_variables)

            outputHandler = OutputHandler(self.log)
            kp_kwargs = {'processOutputLine': [outputHandler]}

            valgrind = 'valgrind'
            if not os.path.exists(valgrind):
                valgrind = findInPath(valgrind)

            valgrind_args = [
                valgrind,
                '--smc-check=all-non-file',
                '--vex-iropt-register-updates=allregs-at-mem-access',
                '--gen-suppressions=all',
                '--num-callers=36',
                '--leak-check=full',
                '--show-possibly-lost=no',
                '--track-origins=yes',
                '--trace-children=yes',
                '-v',  # Enable verbosity to get the list of used suppressions
            ]

            for s in suppressions:
                valgrind_args.append('--suppressions=' + s)

            supps_dir = os.path.join(build_dir, 'valgrind')
            supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup')
            valgrind_args.append('--suppressions=' + supps_file1)

            # MACHTYPE is an odd bash-only environment variable that doesn't
            # show up in os.environ, so we have to get it another way.
            machtype = subprocess.check_output(['bash', '-c', 'echo $MACHTYPE']).rstrip()
            supps_file2 = os.path.join(supps_dir, machtype + '.sup')
            if os.path.isfile(supps_file2):
                valgrind_args.append('--suppressions=' + supps_file2)

            exitcode = None
            timeout = 1800
            try:
                runner = FirefoxRunner(profile=profile,
                                       binary=self.get_binary_path(),
                                       cmdargs=firefox_args,
                                       env=env,
                                       process_args=kp_kwargs)
                runner.start(debug_args=valgrind_args)
                exitcode = runner.wait(timeout=timeout)

            finally:
                errs = outputHandler.error_count
                supps = outputHandler.suppression_count
                if errs != supps:
                    status = 1  # turns the TBPL job orange
                    self.log(logging.ERROR, 'valgrind-fail-parsing',
                             {'errs': errs, 'supps': supps},
                             'TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors seen, but {supps} generated suppressions seen')

                elif errs == 0:
                    status = 0
                    self.log(logging.INFO, 'valgrind-pass', {},
                             'TEST-PASS | valgrind-test | valgrind found no errors')
                else:
                    status = 1  # turns the TBPL job orange
                    # We've already printed details of the errors.

                if exitcode == None:
                    status = 2  # turns the TBPL job red
                    self.log(logging.ERROR, 'valgrind-fail-timeout',
                             {'timeout': timeout},
                             'TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out (reached {timeout} second limit)')
                elif exitcode != 0:
                    status = 2  # turns the TBPL job red
                    self.log(logging.ERROR, 'valgrind-fail-errors', {},
                             'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code from Valgrind')

                httpd.stop()

            return status
Example #13
0
def main(args):
    parser = Options()
    options, args = parser.parse_args()
    kill_port = 20703

    if not options.html_manifest or not options.specialpowers or not options.host1 or not options.host2 or not options.signalling_server:
        parser.print_usage()
        return 2

    package_options = get_package_options(parser, options)
    if not package_options:
        parser.print_usage()
        return 2

    if not os.path.isdir(options.specialpowers):
        parser.error("SpecialPowers directory %s does not exist" %
                     options.specialpowers)
        return 2
    if options.prefs and not os.path.isfile(options.prefs):
        parser.error("Prefs file %s does not exist" % options.prefs)
        return 2
    if options.log_dest and not os.path.isdir(options.log_dest):
        parser.error("Log directory %s does not exist" % options.log_dest)
        return 2

    log = mozlog.unstructured.getLogger('steeplechase')
    log.setLevel(logging.DEBUG)
    if ':' in options.host1:
        host1, port = options.host1.split(':')
        dm1 = DeviceManagerSUT(host1, port)
    else:
        dm1 = DeviceManagerSUT(options.host1)
    if ':' in options.host2:
        host2, port = options.host2.split(':')
        dm2 = DeviceManagerSUT(host2, port)
    else:
        dm2 = DeviceManagerSUT(options.host2)

    if (options.killall is not None) and (options.killall == 1):
        kill_dm1 = DeviceManagerSUT(host1, kill_port)
        kill_dm2 = DeviceManagerSUT(host2, kill_port)
        os_type = GetOStypes(package_options)
        print("OS type of host1 is " + os_type[0] + " and host2 is " +
              os_type[1])
        KillFirefoxesCommand(kill_dm1, os_type[0])
        KillFirefoxesCommand(kill_dm2, os_type[1])

    remote_info = [{
        'dm': dm1,
        'binary': package_options.binary,
        'package': package_options.package,
        'is_initiator': True,
        'name': 'Client1'
    }, {
        'dm': dm2,
        'binary': package_options.binary2,
        'package': package_options.package2,
        'is_initiator': False,
        'name': 'Client2'
    }]
    # first, push app
    for info in remote_info:
        dm = info['dm']

        if info['binary']:
            asset = Binary(path=info['binary'],
                           log=log,
                           dm=info['dm'],
                           name=info['name'])
        else:
            asset = generate_package_asset(path=info['package'],
                                           log=log,
                                           dm=info['dm'],
                                           name=info['name'])

        if options.setup:
            asset.setup_test_root()
        info['test_root'] = asset.test_root()

        if options.setup:
            log.info("Pushing app to %s...", info["name"])
            asset.setup_client()
        info['remote_app_path'] = asset.path_to_launch()
        if not options.setup and not dm.fileExists(info['remote_app_path']):
            log.error("App does not exist on %s, don't use --noSetup",
                      info['name'])
            return 2

    pass_count, fail_count = 0, 0
    if options.html_manifest:
        manifest = TestManifest(strict=False)
        manifest.read(options.html_manifest)
        manifest_data = {
            "tests": [{
                "path": t["relpath"]
            } for t in manifest.active_tests(disabled=False, **mozinfo.info)]
        }

        remote_port = 0
        if options.remote_webserver:
            result = re.search(':(\d+)', options.remote_webserver)
            if result:
                remote_port = int(result.groups()[0])

        @json_response
        def get_manifest(req):
            return (200, manifest_data)

        handlers = [{
            'method': 'GET',
            'path': '/manifest.json',
            'function': get_manifest
        }]
        httpd = MozHttpd(
            host=moznetwork.get_ip(),
            port=remote_port,
            log_requests=True,
            docroot=os.path.join(os.path.dirname(__file__), "..",
                                 "webharness"),
            urlhandlers=handlers,
            path_mappings={"/tests": os.path.dirname(options.html_manifest)})
        httpd.start(block=False)
        test = HTMLTests(httpd, remote_info, log, options)
        html_pass_count, html_fail_count = test.run()
        pass_count += html_pass_count
        fail_count += html_fail_count
        httpd.stop()
    log.info("Result summary:")
    log.info("Passed: %d" % pass_count)
    log.info("Failed: %d" % fail_count)
    return pass_count > 0 and fail_count == 0
Example #14
0
    def run_test_group(self):
        self.results = []

        # reset number of passed/failed tests
        self.numpassed = 0
        self.numfailed = 0

        # build our tps.xpi extension
        self.extensions = []
        self.extensions.append(os.path.join(self.extensionDir, "tps"))

        # build the test list
        try:
            f = open(self.testfile)
            jsondata = f.read()
            f.close()
            testfiles = json.loads(jsondata)
            testlist = []
            for (filename, meta) in testfiles["tests"].items():
                skip_reason = meta.get("disabled")
                if skip_reason:
                    print("Skipping test {} - {}".format(
                        filename, skip_reason))
                else:
                    testlist.append(filename)
        except ValueError:
            testlist = [os.path.basename(self.testfile)]
        testdir = os.path.dirname(self.testfile)

        self.mozhttpd = MozHttpd(port=4567, docroot=testdir)
        self.mozhttpd.start()

        # run each test, and save the results
        for test in testlist:
            result = self.run_single_test(testdir, test)

            if not self.productversion:
                self.productversion = result["productversion"]
            if not self.addonversion:
                self.addonversion = result["addonversion"]

            self.results.append({
                "state": result["state"],
                "name": result["name"],
                "message": result["message"],
                "logdata": result["logdata"],
            })
            if result["state"] == "TEST-PASS":
                self.numpassed += 1
            else:
                self.numfailed += 1
                if self.stop_on_error:
                    print("\nTest failed with --stop-on-error specified; "
                          "not running any more tests.\n")
                    break

        self.mozhttpd.stop()

        # generate the postdata we'll use to post the results to the db
        self.postdata = {
            "tests": self.results,
            "os": "%s %sbit" % (mozinfo.version, mozinfo.bits),
            "testtype": "crossweave",
            "productversion": self.productversion,
            "addonversion": self.addonversion,
            "synctype": self.synctype,
        }
    def run_tests(self):
        """
        Generate the PGO profile data
        """
        from mozhttpd import MozHttpd
        from mozprofile import Preferences
        from mozdevice import ADBDevice, ADBProcessError, ADBTimeoutError
        from six import string_types
        from marionette_driver.marionette import Marionette

        app = self.query_package_name()

        IP = '10.0.2.2'
        PORT = 8888

        PATH_MAPPINGS = {
            '/js-input/webkit/PerformanceTests':
            'third_party/webkit/PerformanceTests',
        }

        dirs = self.query_abs_dirs()
        topsrcdir = os.path.join(dirs['abs_work_dir'], 'src')
        adb = self.query_exe('adb')

        path_mappings = {
            k: os.path.join(topsrcdir, v)
            for k, v in PATH_MAPPINGS.items()
        }
        httpd = MozHttpd(port=PORT,
                         docroot=os.path.join(topsrcdir, "build", "pgo"),
                         path_mappings=path_mappings)
        httpd.start(block=False)

        profile_data_dir = os.path.join(topsrcdir, 'testing', 'profiles')
        with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh:
            base_profiles = json.load(fh)['profileserver']

        prefpaths = [
            os.path.join(profile_data_dir, profile, 'user.js')
            for profile in base_profiles
        ]

        prefs = {}
        for path in prefpaths:
            prefs.update(Preferences.read_prefs(path))

        interpolation = {
            "server": "%s:%d" % httpd.httpd.server_address,
            "OOP": "false"
        }
        for k, v in prefs.items():
            if isinstance(v, string_types):
                v = v.format(**interpolation)
            prefs[k] = Preferences.cast(v)

        outputdir = self.config.get('output_directory', '/sdcard')
        jarlog = posixpath.join(outputdir, 'en-US.log')
        profdata = posixpath.join(outputdir, 'default.profraw')

        env = {}
        env["XPCOM_DEBUG_BREAK"] = "warn"
        env["MOZ_IN_AUTOMATION"] = "1"
        env["MOZ_JAR_LOG_FILE"] = jarlog
        env["LLVM_PROFILE_FILE"] = profdata

        adbdevice = ADBDevice(adb=adb, device='emulator-5554')

        try:
            # Run Fennec a first time to initialize its profile
            driver = Marionette(
                app='fennec',
                package_name=app,
                adb_path=adb,
                bin="target.apk",
                prefs=prefs,
                connect_to_running_emulator=True,
                startup_timeout=1000,
                env=env,
            )
            driver.start_session()

            # Now generate the profile and wait for it to complete
            for page in PAGES:
                driver.navigate("http://%s:%d/%s" % (IP, PORT, page))
                timeout = 2
                if 'Speedometer/index.html' in page:
                    # The Speedometer test actually runs many tests internally in
                    # javascript, so it needs extra time to run through them. The
                    # emulator doesn't get very far through the whole suite, but
                    # this extra time at least lets some of them process.
                    timeout = 360
                time.sleep(timeout)

            driver.set_context("chrome")
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                let cancelQuit = Components.classes["@mozilla.org/supports-PRBool;1"]
                    .createInstance(Components.interfaces.nsISupportsPRBool);
                Services.obs.notifyObservers(cancelQuit, "quit-application-requested", null);
                return cancelQuit.data;
            """)
            driver.execute_script("""
                Components.utils.import("resource://gre/modules/Services.jsm");
                Services.startup.quit(Ci.nsIAppStartup.eAttemptQuit)
            """)

            # There is a delay between execute_script() returning and the profile data
            # actually getting written out, so poll the device until we get a profile.
            for i in range(50):
                try:
                    localprof = '/builds/worker/workspace/default.profraw'
                    adbdevice.pull(profdata, localprof)
                    stats = os.stat(localprof)
                    if stats.st_size == 0:
                        # The file may not have been fully written yet, so retry until we
                        # get actual data.
                        time.sleep(2)
                    else:
                        break
                except ADBProcessError:
                    # The file may not exist at all yet, which would raise an
                    # ADBProcessError, so retry.
                    time.sleep(2)
            else:
                raise Exception("Unable to pull default.profraw")
            adbdevice.pull(jarlog, '/builds/worker/workspace/en-US.log')
        except ADBTimeoutError:
            self.fatal('INFRA-ERROR: Failed with an ADBTimeoutError',
                       EXIT_STATUS_DICT[TBPL_RETRY])

        # We normally merge as part of a GENERATED_FILES step in the profile-use
        # build, but Android runs sometimes result in a truncated profile. We do
        # a merge here to make sure the data isn't corrupt so we can retry the
        # 'run' task if necessary.
        merge_cmd = [
            '/builds/worker/workspace/build/clang/bin/llvm-profdata',
            'merge',
            '/builds/worker/workspace/default.profraw',
            '-o',
            '/builds/worker/workspace/merged.profraw',
        ]
        rc = subprocess.call(merge_cmd)
        if rc != 0:
            self.fatal(
                'INFRA-ERROR: Failed to merge profile data. Corrupt profile?',
                EXIT_STATUS_DICT[TBPL_RETRY])

        # tarfile doesn't support xz in this version of Python
        tar_cmd = [
            'tar',
            '-acvf',
            '/builds/worker/artifacts/profdata.tar.xz',
            '-C',
            '/builds/worker/workspace',
            'merged.profraw',
            'en-US.log',
        ]
        subprocess.check_call(tar_cmd)

        httpd.stop()