def setup_adb_device(self): if self.device is None: self.device = ADBDevice(verbose=True) tune_performance(self.device, log=LOG) self.clear_app_data() self.set_debug_app_flag()
def __init__(self, log, options): self.log = log verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True self.device = ADBDevice(adb=options.adbPath or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) self.options = options self.log.debug("options=%s" % vars(options)) update_mozinfo() self.remote_profile = posixpath.join(self.device.test_root, 'junit-profile') if self.options.coverage and not self.options.coverage_output_dir: raise Exception( "--coverage-output-dir is required when using --enable-coverage" ) if self.options.coverage: self.remote_coverage_output_file = posixpath.join( self.device.test_root, 'junit-coverage.ec') self.coverage_output_file = os.path.join( self.options.coverage_output_dir, 'junit-coverage.ec') self.server_init() self.cleanup() self.device.clear_logcat() self.build_profile() self.startServers(self.options, debuggerInfo=None) self.log.debug("Servers started")
def setup_adb_device(self): if self.device is None: self.device = ADBDevice(verbose=True) if not self.config.get("disable_perf_tuning", False): tune_performance(self.device, log=LOG) self.clear_app_data() self.set_debug_app_flag()
def get_browser_meta(self): """Returns the browser name and version in a tuple (name, version). Uses mozversion as the primary method to get this meta data and for android this is the only method which exists to get this data. With android, we use the installerpath attribute to determine this and this only works with Firefox browsers. """ browser_name = None browser_version = None if self.config["app"] in self.firefox_android_apps: try: meta = mozversion.get_version(binary=self.installerpath) browser_name = meta.get("application_name") browser_version = meta.get("application_version") except Exception as e: LOG.warning( "Failed to get android browser meta data through mozversion: %s-%s" % (e.__class__.__name__, e)) if self.config["app"] == "chrome-m": # We absolutely need to determine the chrome # version here so that we can select the correct # chromedriver for browsertime from mozdevice import ADBDevice device = ADBDevice(verbose=True) binary = "com.android.chrome" pkg_info = device.shell_output("dumpsys package %s" % binary) version_matcher = re.compile(r".*versionName=([\d.]+)") for line in pkg_info.split("\n"): match = version_matcher.match(line) if match: browser_version = match.group(1) browser_name = self.config["app"] # First one found is the non-system # or latest version. break if not browser_version: raise Exception( "Could not determine version for Google Chrome for Android" ) if not browser_name: LOG.warning("Could not find a browser name") else: LOG.info("Browser name: %s" % browser_name) if not browser_version: LOG.warning("Could not find a browser version") else: LOG.info("Browser version: %s" % browser_version) return (browser_name, browser_version)
def __init__(self, options, progs): cppunittests.CPPUnitTests.__init__(self) self.options = options self.device = ADBDevice(adb=options.adb_path or 'adb', device=options.device_serial, test_root=options.remote_test_root) self.remote_test_root = posixpath.join(self.device.test_root, "cppunittests") self.remote_bin_dir = posixpath.join(self.remote_test_root, "b") self.remote_tmp_dir = posixpath.join(self.remote_test_root, "tmp") self.remote_home_dir = posixpath.join(self.remote_test_root, "h") if options.setup: self.setup_bin(progs)
def __init__(self, options, message_logger): """ Simple one-time initialization. """ MochitestDesktop.__init__(self, options.flavor, vars(options)) verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True self.device = ADBDevice(adb=options.adbPath or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) options.logFile = "robocop.log" if options.remoteTestRoot is None: options.remoteTestRoot = self.device.test_root self.remoteProfile = posixpath.join(options.remoteTestRoot, "profile") self.remoteProfileCopy = posixpath.join(options.remoteTestRoot, "profile-copy") self.remoteModulesDir = posixpath.join(options.remoteTestRoot, "modules/") self.remoteConfigFile = posixpath.join(options.remoteTestRoot, "robotium.config") self.remoteLogFile = posixpath.join(options.remoteTestRoot, "logs", "robocop.log") self.options = options process_args = {'messageLogger': message_logger} self.auto = RemoteAutomation(self.device, options.remoteappname, self.remoteProfile, self.remoteLogFile, processArgs=process_args) self.environment = self.auto.environment self.remoteScreenshots = "/mnt/sdcard/Robotium-Screenshots" self.remoteMozLog = posixpath.join(options.remoteTestRoot, "mozlog") self.localLog = options.logFile self.localProfile = None self.certdbNew = True self.passed = 0 self.failed = 0 self.todo = 0
def setup_adb_device(self): if self.device is None: self.device = ADBDevice(verbose=True) tune_performance(self.device, log=LOG) LOG.info("creating remote root folder for raptor: %s" % self.remote_test_root) self.device.rm(self.remote_test_root, force=True, recursive=True) self.device.mkdir(self.remote_test_root) self.device.chmod(self.remote_test_root, recursive=True, root=True) self.clear_app_data() self.set_debug_app_flag()
def launchProcess(self, cmd, stdout, stderr, env, cwd, timeout=None): self.timedout = False cmd.insert(1, self.remoteHere) cmd = ADBDevice._escape_command_line(cmd) try: # env is ignored here since the environment has already been # set for the command via the pushWrapper method. adb_process = self.device.shell(cmd, timeout=timeout+10, root=True) output_file = adb_process.stdout_file self.shellReturnCode = adb_process.exitcode except ADBTimeoutError: raise except Exception as e: if self.timedout: # If the test timed out, there is a good chance the shell # call also timed out and raised this Exception. # Ignore this exception to simplify the error report. self.shellReturnCode = None else: raise e # The device manager may have timed out waiting for xpcshell. # Guard against an accumulation of hung processes by killing # them here. Note also that IPC tests may spawn new instances # of xpcshell. self.device.pkill("xpcshell") return output_file
def setup_adb_device(self): if self.device is None: self.device = ADBDevice(verbose=True) if not self.config.get("disable_perf_tuning", False): tune_performance(self.device, log=LOG) if self.config['power_test']: disable_charging(self.device) LOG.info("creating remote root folder for raptor: %s" % self.remote_test_root) self.device.rm(self.remote_test_root, force=True, recursive=True) self.device.mkdir(self.remote_test_root) self.device.chmod(self.remote_test_root, recursive=True, root=True) self.clear_app_data() self.set_debug_app_flag()
def __call__(self, metadata): self.app_name = self.get_arg("android-app-name") self.android_activity = self.get_arg("android-activity") self.metadata = metadata try: self.device = ADBDevice(verbose=True, timeout=30) except (ADBError, AttributeError) as e: self.error("Could not connect to the phone. Is it connected?") raise DeviceError(str(e)) # install APKs for apk in self.get_arg("android-install-apk"): self.info("Installing %s" % apk) if apk in _PERMALINKS: apk = _PERMALINKS[apk] if apk.startswith("http"): with tempfile.TemporaryDirectory() as tmpdirname: target = Path(tmpdirname, "target.apk") self.info("Downloading %s" % apk) download_file(apk, target) self.info("Installing downloaded APK") self.device.install_app(str(target), replace=True) else: self.device.install_app(apk, replace=True) self.info("Done.") # checking that the app is installed if not self.device.is_app_installed(self.app_name): raise Exception("%s is not installed" % self.app_name) # set up default activity with the app name if none given if self.android_activity is None: # guess the activity, given the app if "fenix" in self.app_name: self.android_activity = "org.mozilla.fenix.IntentReceiverActivity" elif "geckoview_example" in self.app_name: self.android_activity = ( "org.mozilla.geckoview_example.GeckoViewActivity") self.set_arg("android_activity", self.android_activity) self.info("Android environment:") self.info("- Application name: %s" % self.app_name) self.info("- Activity: %s" % self.android_activity) self.info("- Intent: %s" % self.get_arg("android_intent")) return metadata
def _get_device(substs, device_serial=None): global devices if device_serial in devices: device = devices[device_serial] else: adb_path = _find_sdk_exe(substs, 'adb', False) if not adb_path: adb_path = 'adb' device = ADBDevice(adb=adb_path, verbose=verbose_logging, device=device_serial) devices[device_serial] = device return device
def get_device(self, adb_path, device_serial): # Create a mozdevice.ADBDevice object for the specified device_serial # and cache it for future use. If the same device_serial is subsequently # requested, retrieve it from the cache to avoid costly re-initialization. global devices if device_serial in devices: device = devices[device_serial] else: device = ADBDevice(adb=adb_path, device=device_serial) devices[device_serial] = device return device
def run_test_remote(test, device, prefix, tempdir, options): from mozdevice import ADBDevice, ADBProcessError if options.test_reflect_stringify: raise ValueError("can't run Reflect.stringify tests remotely") cmd = test.command( prefix, posixpath.join(options.remote_test_root, "lib/"), posixpath.join(options.remote_test_root, "modules/"), tempdir, posixpath.join(options.remote_test_root, "tests"), ) if options.show_cmd: print(escape_cmdline(cmd)) env = {"LD_LIBRARY_PATH": os.path.dirname(prefix[0])} if test.tz_pacific: env["TZ"] = "PST8PDT" # replace with shlex.join when move to Python 3.8+ cmd = ADBDevice._escape_command_line(cmd) start = datetime.now() try: # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. out = device.shell_output(cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout)) returncode = 0 except ADBProcessError as e: # Treat ignorable intermittent adb communication errors as # skipped tests. out = str(e.adb_process.stdout) returncode = e.adb_process.exitcode re_ignore = re.compile(r"error: (closed|device .* not found)") if returncode == 1 and re_ignore.search(out): print("Skipping {} due to ignorable adb error {}".format( test.path, out)) test.skip_if_cond = "true" returncode = test.SKIPPED_EXIT_STATUS elapsed = (datetime.now() - start).total_seconds() # We can't distinguish between stdout and stderr so we pass # the same buffer to both. return TestOutput(test, cmd, out, out, returncode, elapsed, False)
def _test_remote(self, cond, options=[]): from mozdevice import ADBDevice, ADBProcessError ans = self.cache.get(cond, None) if ans is not None: return ans env = { 'LD_LIBRARY_PATH': posixpath.join(self.options.remote_test_root, 'bin'), } cmd = [self.js_bin] + self.js_args + options + [ # run in safe configuration, since it is hard to debug # crashes when running code here. In particular, msan will # error out if the jit is active. '--no-baseline', '--no-blinterp', '-e', self.js_prologue, '-e', 'print(!!({}))'.format(cond) ] cmd = ADBDevice._escape_command_line(cmd) try: # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. out = self.device.shell_output(cmd, env=env, cwd=self.options.remote_test_root, timeout=None) err = '' except ADBProcessError as e: out = '' err = str(e.adb_process.stdout) if out == 'true': ans = True elif out == 'false': ans = False else: raise Exception("Failed to test XUL condition {!r};" " output was {!r}, stderr was {!r}".format( cond, out, err)) self.cache[cond] = ans return ans
def setup_script(device, prefix, tempdir, options, uniq_tag, tests): timeout = int(options.timeout) script_timeout = 0 try: print("tasks_adb_remote.py : Create batch script") tmpf = tempfile.NamedTemporaryFile(mode="w", delete=False) tmpf.write(script_preamble(uniq_tag, prefix, options)) for i, test in enumerate(tests): # This test is common to all tasks_*.py files, however, jit-test do # not provide the `run_skipped` option, and all tests are always # enabled. assert test.enable # and not options.run_skipped if options.test_reflect_stringify: raise ValueError("can't run Reflect.stringify tests remotely") cmd = test.command( prefix, posixpath.join(options.remote_test_root, "lib/"), posixpath.join(options.remote_test_root, "modules/"), tempdir, posixpath.join(options.remote_test_root, "tests"), ) # replace with shlex.join when move to Python 3.8+ cmd = ADBDevice._escape_command_line(cmd) env = {} if test.tz_pacific: env["TZ"] = "PST8PDT" envStr = "".join(key + "='" + val + "' " for key, val in env.items()) tmpf.write("{}do_test {} 0 {};\n".format(envStr, i, cmd)) script_timeout += timeout tmpf.write("do_end;\n") tmpf.close() script = posixpath.join(options.remote_test_root, "test_manifest.sh") device.push(tmpf.name, script) device.chmod(script) print("tasks_adb_remote.py : Batch script created") except Exception as e: print("tasks_adb_remote.py : Batch script failed") raise e finally: if tmpf: os.unlink(tmpf.name) return script, script_timeout
def run_test_remote(test, device, prefix, options): from mozdevice import ADBDevice, ADBProcessError, ADBTimeoutError if options.test_reflect_stringify: raise ValueError("can't run Reflect.stringify tests remotely") cmd = test.command(prefix, posixpath.join(options.remote_test_root, 'lib/'), posixpath.join(options.remote_test_root, 'modules/'), posixpath.join(options.remote_test_root, 'tests')) if options.show_cmd: print(escape_cmdline(cmd)) env = {} if test.tz_pacific: env['TZ'] = 'PST8PDT' env['LD_LIBRARY_PATH'] = options.remote_test_root cmd = ADBDevice._escape_command_line(cmd) start = datetime.now() try: out = device.shell_output(cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout)) returncode = 0 except ADBTimeoutError: raise except ADBProcessError as e: out = e.adb_process.stdout print("exception output: %s" % str(out)) returncode = e.adb_process.exitcode elapsed = (datetime.now() - start).total_seconds() # We can't distinguish between stdout and stderr so we pass # the same buffer to both. return TestOutput(test, cmd, out, out, returncode, elapsed, False)
def run_test_remote(test, device, prefix, options): from mozdevice import ADBDevice, ADBProcessError cmd = test.get_command(prefix) test_root_parent = os.path.dirname(test.root) jtd_tests = posixpath.join(options.remote_test_root, 'tests') cmd = [_.replace(test_root_parent, jtd_tests) for _ in cmd] env = {'TZ': 'PST8PDT', 'LD_LIBRARY_PATH': os.path.dirname(prefix[0])} adb_cmd = ADBDevice._escape_command_line(cmd) start = datetime.now() try: # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. out = device.shell_output(adb_cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout)) returncode = 0 except ADBProcessError as e: # Treat ignorable intermittent adb communication errors as # skipped tests. out = str(e.adb_process.stdout) returncode = e.adb_process.exitcode re_ignore = re.compile(r'error: (closed|device .* not found)') if returncode == 1 and re_ignore.search(out): print("Skipping {} due to ignorable adb error {}".format( test.path, out)) test.skip_if_cond = "true" returncode = test.SKIPPED_EXIT_STATUS elapsed = (datetime.now() - start).total_seconds() # We can't distinguish between stdout and stderr so we pass # the same buffer to both. return TestOutput(test, cmd, out, out, returncode, elapsed, False)
def init_device(options): # Initialize the device global DEVICE assert options.remote and options.js_shell if DEVICE is not None: return DEVICE from mozdevice import ADBDevice, ADBError, ADBTimeoutError try: if not options.local_lib: # if not specified, use the local directory containing # the js binary to find the necessary libraries. options.local_lib = posixpath.dirname(options.js_shell) DEVICE = ADBDevice(device=options.device_serial, test_root=options.remote_test_root) init_remote_dir(DEVICE, options.remote_test_root) bin_dir = posixpath.join(options.remote_test_root, 'bin') tests_dir = posixpath.join(options.remote_test_root, 'tests') # Push js shell and libraries. init_remote_dir(DEVICE, tests_dir) init_remote_dir(DEVICE, bin_dir) push_libs(options, DEVICE, bin_dir) push_progs(options, DEVICE, [options.js_shell], bin_dir) # update options.js_shell to point to the js binary on the device options.js_shell = os.path.join(bin_dir, 'js') return DEVICE except (ADBError, ADBTimeoutError): print("TEST-UNEXPECTED-FAIL | remote.py : Device initialization failed") raise
def post_to_treeherder(self, tests): version = mozversion.get_version( binary=self.bin, sources=self.sources, dm_type='adb', device_serial=self.device_serial) job_collection = TreeherderJobCollection() job = job_collection.get_job() device = version.get('device_id') device_firmware_version_release = \ version.get('device_firmware_version_release') if not device: self.logger.error('Submitting to Treeherder is currently limited ' 'to devices.') return try: group = DEVICE_GROUP_MAP[device][device_firmware_version_release] job.add_group_name(group['name']) job.add_group_symbol(group['symbol']) job.add_job_name('Gaia Python Integration Test (%s)' % group['symbol']) job.add_job_symbol('Gip') except KeyError: self.logger.error('Unknown device id: %s or device firmware ' 'version: %s. Unable to determine Treeherder ' 'group. Supported devices: %s' % (device, device_firmware_version_release, ['%s: %s' % (k, [fw for fw in v.keys()]) for k, v in DEVICE_GROUP_MAP.iteritems()])) return # Determine revision hash from application revision revision = version['application_changeset'] project = version['application_repository'].split('/')[-1] lookup_url = urljoin( self.treeherder_url, 'api/project/%s/revision-lookup/?revision=%s' % ( project, revision)) self.logger.debug('Getting revision hash from: %s' % lookup_url) response = requests.get(lookup_url) response.raise_for_status() assert response.json(), 'Unable to determine revision hash for %s. ' \ 'Perhaps it has not been ingested by ' \ 'Treeherder?' % revision revision_hash = response.json()[revision]['revision_hash'] job.add_revision_hash(revision_hash) job.add_project(project) job.add_job_guid(str(uuid.uuid4())) job.add_product_name('b2g') job.add_state('completed') # Determine test result if self.failed or self.unexpected_successes: job.add_result('testfailed') else: job.add_result('success') job.add_submit_timestamp(int(self.start_time)) job.add_start_timestamp(int(self.start_time)) job.add_end_timestamp(int(self.end_time)) job.add_machine(socket.gethostname()) job.add_build_info('b2g', 'b2g-device-image', 'x86') job.add_machine_info('b2g', 'b2g-device-image', 'x86') # All B2G device builds are currently opt builds job.add_option_collection({'opt': True}) date_format = '%d %b %Y %H:%M:%S' job_details = [{ 'content_type': 'link', 'title': 'Gaia revision:', 'url': 'https://github.com/mozilla-b2g/gaia/commit/%s' % version.get('gaia_changeset'), 'value': version.get('gaia_changeset'), }, { 'content_type': 'text', 'title': 'Gaia date:', 'value': version.get('gaia_date') and time.strftime( date_format, time.localtime(int(version.get('gaia_date')))), }, { 'content_type': 'text', 'title': 'Device identifier:', 'value': version.get('device_id') }, { 'content_type': 'text', 'title': 'Device firmware (date):', 'value': version.get('device_firmware_date') and time.strftime( date_format, time.localtime(int( version.get('device_firmware_date')))), }, { 'content_type': 'text', 'title': 'Device firmware (incremental):', 'value': version.get('device_firmware_version_incremental') }, { 'content_type': 'text', 'title': 'Device firmware (release):', 'value': version.get('device_firmware_version_release') }] ci_url = os.environ.get('BUILD_URL') if ci_url: job_details.append({ 'url': ci_url, 'value': ci_url, 'content_type': 'link', 'title': 'CI build:'}) # Attach logcat adb_device = ADBDevice(self.device_serial) with tempfile.NamedTemporaryFile(suffix='logcat.txt') as f: f.writelines(adb_device.get_logcat()) self.logger.debug('Logcat stored in: %s' % f.name) try: url = self.upload_to_s3(f.name) job_details.append({ 'url': url, 'value': 'logcat.txt', 'content_type': 'link', 'title': 'Log:'}) except S3UploadError: job_details.append({ 'value': 'Failed to upload logcat.txt', 'content_type': 'text', 'title': 'Error:'}) # Attach log files handlers = [handler for handler in self.logger.handlers if isinstance(handler, StreamHandler) and os.path.exists(handler.stream.name)] for handler in handlers: path = handler.stream.name filename = os.path.split(path)[-1] try: url = self.upload_to_s3(path) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Log:'}) # Add log reference if type(handler.formatter) is TbplFormatter or \ type(handler.formatter) is LogLevelFilter and \ type(handler.formatter.inner) is TbplFormatter: job.add_log_reference(filename, url) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) # Attach reports for report in [self.html_output, self.xml_output]: if report is not None: filename = os.path.split(report)[-1] try: url = self.upload_to_s3(report) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Report:'}) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) if job_details: job.add_artifact('Job Info', 'json', {'job_details': job_details}) job_collection.add(job) # Send the collection to Treeherder url = urlparse(self.treeherder_url) request = TreeherderRequest( protocol=url.scheme, host=url.hostname, project=project, oauth_key=os.environ.get('TREEHERDER_KEY'), oauth_secret=os.environ.get('TREEHERDER_SECRET')) self.logger.debug('Sending results to Treeherder: %s' % job_collection.to_json()) response = request.post(job_collection) self.logger.debug('Response: %s' % response.read()) assert response.status == 200, 'Failed to send results!' self.logger.info('Results are available to view at: %s' % ( urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' % ( project, revision))))
def __init__(self, options, scriptDir): RefTest.__init__(self, options.suite) self.run_by_manifest = False self.scriptDir = scriptDir self.localLogName = options.localLogName verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True print "set verbose!" self.device = ADBDevice(adb=options.adb_path or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) if options.remoteTestRoot is None: options.remoteTestRoot = posixpath.join(self.device.test_root, "reftest") options.remoteProfile = posixpath.join(options.remoteTestRoot, "profile") options.remoteLogFile = posixpath.join(options.remoteTestRoot, "reftest.log") options.logFile = options.remoteLogFile self.remoteProfile = options.remoteProfile self.remoteTestRoot = options.remoteTestRoot if not options.ignoreWindowSize: parts = self.device.get_info('screen')['screen'][0].split() width = int(parts[0].split(':')[1]) height = int(parts[1].split(':')[1]) if (width < 1366 or height < 1050): self.error("ERROR: Invalid screen resolution %sx%s, " "please adjust to 1366x1050 or higher" % (width, height)) self._populate_logger(options) self.outputHandler = OutputHandler(self.log, options.utilityPath, options.symbolsPath) # RemoteAutomation.py's 'messageLogger' is also used by mochitest. Mimic a mochitest # MessageLogger object to re-use this code path. self.outputHandler.write = self.outputHandler.__call__ args = {'messageLogger': self.outputHandler} self.automation = RemoteAutomation(self.device, appName=options.app, remoteProfile=self.remoteProfile, remoteLog=options.remoteLogFile, processArgs=args) self.environment = self.automation.environment if self.automation.IS_DEBUG_BUILD: self.SERVER_STARTUP_TIMEOUT = 180 else: self.SERVER_STARTUP_TIMEOUT = 90 self.remoteCache = os.path.join(options.remoteTestRoot, "cache/") # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) self.device.clear_logcat() self.device.rm(self.remoteCache, force=True, recursive=True) procName = options.app.split('/')[-1] self.device.stop_application(procName) if self.device.process_exist(procName): self.log.error("unable to kill %s before starting tests!" % procName)
class RemoteReftest(RefTest): use_marionette = False resolver_cls = RemoteReftestResolver def __init__(self, options, scriptDir): RefTest.__init__(self, options.suite) self.run_by_manifest = False self.scriptDir = scriptDir self.localLogName = options.localLogName verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True print "set verbose!" self.device = ADBDevice(adb=options.adb_path or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) if options.remoteTestRoot is None: options.remoteTestRoot = posixpath.join(self.device.test_root, "reftest") options.remoteProfile = posixpath.join(options.remoteTestRoot, "profile") options.remoteLogFile = posixpath.join(options.remoteTestRoot, "reftest.log") options.logFile = options.remoteLogFile self.remoteProfile = options.remoteProfile self.remoteTestRoot = options.remoteTestRoot if not options.ignoreWindowSize: parts = self.device.get_info('screen')['screen'][0].split() width = int(parts[0].split(':')[1]) height = int(parts[1].split(':')[1]) if (width < 1366 or height < 1050): self.error("ERROR: Invalid screen resolution %sx%s, " "please adjust to 1366x1050 or higher" % (width, height)) self._populate_logger(options) self.outputHandler = OutputHandler(self.log, options.utilityPath, options.symbolsPath) # RemoteAutomation.py's 'messageLogger' is also used by mochitest. Mimic a mochitest # MessageLogger object to re-use this code path. self.outputHandler.write = self.outputHandler.__call__ args = {'messageLogger': self.outputHandler} self.automation = RemoteAutomation(self.device, appName=options.app, remoteProfile=self.remoteProfile, remoteLog=options.remoteLogFile, processArgs=args) self.environment = self.automation.environment if self.automation.IS_DEBUG_BUILD: self.SERVER_STARTUP_TIMEOUT = 180 else: self.SERVER_STARTUP_TIMEOUT = 90 self.remoteCache = os.path.join(options.remoteTestRoot, "cache/") # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) self.device.clear_logcat() self.device.rm(self.remoteCache, force=True, recursive=True) procName = options.app.split('/')[-1] self.device.stop_application(procName) if self.device.process_exist(procName): self.log.error("unable to kill %s before starting tests!" % procName) def findPath(self, paths, filename=None): for path in paths: p = path if filename: p = os.path.join(p, filename) if os.path.exists(self.getFullPath(p)): return path return None def startWebServer(self, options): """ Create the webserver on the host and start it up """ remoteXrePath = options.xrePath remoteUtilityPath = options.utilityPath localAutomation = Automation() localAutomation.IS_WIN32 = False localAutomation.IS_LINUX = False localAutomation.IS_MAC = False localAutomation.UNIXISH = False hostos = sys.platform if (hostos == 'mac' or hostos == 'darwin'): localAutomation.IS_MAC = True elif (hostos == 'linux' or hostos == 'linux2'): localAutomation.IS_LINUX = True localAutomation.UNIXISH = True elif (hostos == 'win32' or hostos == 'win64'): localAutomation.BIN_SUFFIX = ".exe" localAutomation.IS_WIN32 = True paths = [options.xrePath, localAutomation.DIST_BIN] options.xrePath = self.findPath(paths) if options.xrePath is None: print( "ERROR: unable to find xulrunner path for %s, " "please specify with --xre-path" % (os.name)) return 1 paths.append("bin") paths.append(os.path.join("..", "bin")) xpcshell = "xpcshell" if (os.name == "nt"): xpcshell += ".exe" if (options.utilityPath): paths.insert(0, options.utilityPath) options.utilityPath = self.findPath(paths, xpcshell) if options.utilityPath is None: print( "ERROR: unable to find utility path for %s, " "please specify with --utility-path" % (os.name)) return 1 options.serverProfilePath = tempfile.mkdtemp() self.server = ReftestServer(localAutomation, options, self.scriptDir) retVal = self.server.start() if retVal: return retVal retVal = self.server.ensureReady(self.SERVER_STARTUP_TIMEOUT) if retVal: return retVal options.xrePath = remoteXrePath options.utilityPath = remoteUtilityPath return 0 def stopWebServer(self, options): self.server.stop() def killNamedProc(self, pname, orphans=True): """ Kill processes matching the given command name """ self.log.info("Checking for %s processes..." % pname) for proc in psutil.process_iter(): try: if proc.name() == pname: procd = proc.as_dict( attrs=['pid', 'ppid', 'name', 'username']) if proc.ppid() == 1 or not orphans: self.log.info("killing %s" % procd) try: os.kill(proc.pid, getattr(signal, "SIGKILL", signal.SIGTERM)) except Exception as e: self.log.info("Failed to kill process %d: %s" % (proc.pid, str(e))) else: self.log.info("NOT killing %s (not an orphan?)" % procd) except Exception: # may not be able to access process info for all processes continue def createReftestProfile(self, options, **kwargs): profile = RefTest.createReftestProfile(self, options, server=options.remoteWebServer, port=options.httpPort, **kwargs) profileDir = profile.profile prefs = {} prefs["app.update.url.android"] = "" prefs["reftest.remote"] = True prefs[ "datareporting.policy.dataSubmissionPolicyBypassAcceptance"] = True # move necko cache to a location that can be cleaned up prefs["browser.cache.disk.parent_directory"] = self.remoteCache prefs["layout.css.devPixelsPerPx"] = "1.0" # Because Fennec is a little wacky (see bug 1156817) we need to load the # reftest pages at 1.0 zoom, rather than zooming to fit the CSS viewport. prefs["apz.allow_zooming"] = False # Set the extra prefs. profile.set_preferences(prefs) try: self.device.push(profileDir, options.remoteProfile) self.device.chmod(options.remoteProfile, recursive=True, root=True) except Exception: print "Automation Error: Failed to copy profiledir to device" raise return profile def printDeviceInfo(self, printLogcat=False): try: if printLogcat: logcat = self.device.get_logcat( filter_out_regexps=fennecLogcatFilters) for l in logcat: ul = l.decode('utf-8', errors='replace') sl = ul.encode('iso8859-1', errors='replace') print "%s\n" % sl print "Device info:" devinfo = self.device.get_info() for category in devinfo: if type(devinfo[category]) is list: print " %s:" % category for item in devinfo[category]: print " %s" % item else: print " %s: %s" % (category, devinfo[category]) print "Test root: %s" % self.device.test_root except ADBTimeoutError: raise except Exception as e: print "WARNING: Error getting device information: %s" % str(e) def environment(self, **kwargs): return self.automation.environment(**kwargs) def buildBrowserEnv(self, options, profileDir): browserEnv = RefTest.buildBrowserEnv(self, options, profileDir) # remove desktop environment not used on device if "XPCOM_MEM_BLOAT_LOG" in browserEnv: del browserEnv["XPCOM_MEM_BLOAT_LOG"] return browserEnv def runApp(self, options, cmdargs=None, timeout=None, debuggerInfo=None, symbolsPath=None, valgrindPath=None, valgrindArgs=None, valgrindSuppFiles=None, **profileArgs): if cmdargs is None: cmdargs = [] if self.use_marionette: cmdargs.append('-marionette') binary = options.app profile = self.createReftestProfile(options, **profileArgs) # browser environment env = self.buildBrowserEnv(options, profile.profile) self.log.info("Running with e10s: {}".format(options.e10s)) status, self.lastTestSeen = self.automation.runApp( None, env, binary, profile.profile, cmdargs, utilityPath=options.utilityPath, xrePath=options.xrePath, debuggerInfo=debuggerInfo, symbolsPath=symbolsPath, timeout=timeout, e10s=options.e10s) self.cleanup(profile.profile) return status def cleanup(self, profileDir): self.device.rm(self.remoteTestRoot, force=True, recursive=True) self.device.rm(self.remoteProfile, force=True, recursive=True) self.device.rm(self.remoteCache, force=True, recursive=True) RefTest.cleanup(self, profileDir)
def post_to_treeherder(self, tests): version = mozversion.get_version(binary=self.bin, sources=self.sources, dm_type='adb', device_serial=self.device_serial) job_collection = TreeherderJobCollection() job = job_collection.get_job() device = version.get('device_id') device_firmware_version_release = \ version.get('device_firmware_version_release') if not device: self.logger.error('Submitting to Treeherder is currently limited ' 'to devices.') return try: group = DEVICE_GROUP_MAP[device][device_firmware_version_release] job.add_group_name(group['name']) job.add_group_symbol(group['symbol']) job.add_job_name('Gaia Python Integration Test (%s)' % group['symbol']) job.add_job_symbol('Gip') except KeyError: self.logger.error('Unknown device id: %s or device firmware ' 'version: %s. Unable to determine Treeherder ' 'group. Supported devices: %s' % (device, device_firmware_version_release, [ '%s: %s' % (k, [fw for fw in v.keys()]) for k, v in DEVICE_GROUP_MAP.iteritems() ])) return # Determine revision hash from application revision revision = version['application_changeset'] project = version['application_repository'].split('/')[-1] lookup_url = urljoin( self.treeherder_url, 'api/project/%s/revision-lookup/?revision=%s' % (project, revision)) self.logger.debug('Getting revision hash from: %s' % lookup_url) response = requests.get(lookup_url) response.raise_for_status() assert response.json(), 'Unable to determine revision hash for %s. ' \ 'Perhaps it has not been ingested by ' \ 'Treeherder?' % revision revision_hash = response.json()[revision]['revision_hash'] job.add_revision_hash(revision_hash) job.add_project(project) job.add_job_guid(str(uuid.uuid4())) job.add_product_name('b2g') job.add_state('completed') # Determine test result if self.failed or self.unexpected_successes: job.add_result('testfailed') else: job.add_result('success') job.add_submit_timestamp(int(self.start_time)) job.add_start_timestamp(int(self.start_time)) job.add_end_timestamp(int(self.end_time)) job.add_machine(socket.gethostname()) job.add_build_info('b2g', 'b2g-device-image', 'x86') job.add_machine_info('b2g', 'b2g-device-image', 'x86') # All B2G device builds are currently opt builds job.add_option_collection({'opt': True}) date_format = '%d %b %Y %H:%M:%S' job_details = [{ 'content_type': 'link', 'title': 'Gaia revision:', 'url': 'https://github.com/mozilla-b2g/gaia/commit/%s' % version.get('gaia_changeset'), 'value': version.get('gaia_changeset'), }, { 'content_type': 'text', 'title': 'Gaia date:', 'value': version.get('gaia_date') and time.strftime( date_format, time.localtime(int(version.get('gaia_date')))), }, { 'content_type': 'text', 'title': 'Device identifier:', 'value': version.get('device_id') }, { 'content_type': 'text', 'title': 'Device firmware (date):', 'value': version.get('device_firmware_date') and time.strftime( date_format, time.localtime(int(version.get('device_firmware_date')))), }, { 'content_type': 'text', 'title': 'Device firmware (incremental):', 'value': version.get('device_firmware_version_incremental') }, { 'content_type': 'text', 'title': 'Device firmware (release):', 'value': version.get('device_firmware_version_release') }] ci_url = os.environ.get('BUILD_URL') if ci_url: job_details.append({ 'url': ci_url, 'value': ci_url, 'content_type': 'link', 'title': 'CI build:' }) # Attach logcat adb_device = ADBDevice(self.device_serial) with tempfile.NamedTemporaryFile(suffix='logcat.txt') as f: f.writelines(adb_device.get_logcat()) self.logger.debug('Logcat stored in: %s' % f.name) try: url = self.upload_to_s3(f.name) job_details.append({ 'url': url, 'value': 'logcat.txt', 'content_type': 'link', 'title': 'Log:' }) except S3UploadError: job_details.append({ 'value': 'Failed to upload logcat.txt', 'content_type': 'text', 'title': 'Error:' }) # Attach log files handlers = [ handler for handler in self.logger.handlers if isinstance(handler, StreamHandler) and os.path.exists(handler.stream.name) ] for handler in handlers: path = handler.stream.name filename = os.path.split(path)[-1] try: url = self.upload_to_s3(path) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Log:' }) # Add log reference if type(handler.formatter) is TbplFormatter or \ type(handler.formatter) is LogLevelFilter and \ type(handler.formatter.inner) is TbplFormatter: job.add_log_reference(filename, url) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:' }) # Attach reports for report in [self.html_output, self.xml_output]: if report is not None: filename = os.path.split(report)[-1] try: url = self.upload_to_s3(report) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Report:' }) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:' }) if job_details: job.add_artifact('Job Info', 'json', {'job_details': job_details}) job_collection.add(job) # Send the collection to Treeherder url = urlparse(self.treeherder_url) request = TreeherderRequest( protocol=url.scheme, host=url.hostname, project=project, oauth_key=os.environ.get('TREEHERDER_KEY'), oauth_secret=os.environ.get('TREEHERDER_SECRET')) self.logger.debug('Sending results to Treeherder: %s' % job_collection.to_json()) response = request.post(job_collection) self.logger.debug('Response: %s' % response.read()) assert response.status == 200, 'Failed to send results!' self.logger.info( 'Results are available to view at: %s' % (urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' % (project, revision))))
class MochiRemote(MochitestDesktop): localProfile = None logMessages = [] def __init__(self, options): MochitestDesktop.__init__(self, options.flavor, vars(options)) verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True if hasattr(options, 'log'): delattr(options, 'log') self.certdbNew = True self.chromePushed = False self.mozLogName = "moz.log" self.device = ADBDevice(adb=options.adbPath or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) if options.remoteTestRoot is None: options.remoteTestRoot = self.device.test_root options.dumpOutputDirectory = options.remoteTestRoot self.remoteLogFile = posixpath.join(options.remoteTestRoot, "logs", "mochitest.log") logParent = posixpath.dirname(self.remoteLogFile) self.device.rm(logParent, force=True, recursive=True) self.device.mkdir(logParent) self.remoteProfile = posixpath.join(options.remoteTestRoot, "profile/") self.device.rm(self.remoteProfile, force=True, recursive=True) self.counts = dict() self.message_logger = MessageLogger(logger=None) self.message_logger.logger = self.log process_args = {'messageLogger': self.message_logger, 'counts': self.counts} self.automation = RemoteAutomation(self.device, options.remoteappname, self.remoteProfile, self.remoteLogFile, processArgs=process_args) self.environment = self.automation.environment # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) self.automation.deleteANRs() self.automation.deleteTombstones() self.device.clear_logcat() self.remoteModulesDir = posixpath.join(options.remoteTestRoot, "modules/") self.remoteCache = posixpath.join(options.remoteTestRoot, "cache/") self.device.rm(self.remoteCache, force=True, recursive=True) # move necko cache to a location that can be cleaned up options.extraPrefs += ["browser.cache.disk.parent_directory=%s" % self.remoteCache] self.remoteMozLog = posixpath.join(options.remoteTestRoot, "mozlog") self.device.rm(self.remoteMozLog, force=True, recursive=True) self.device.mkdir(self.remoteMozLog) self.remoteChromeTestDir = posixpath.join( options.remoteTestRoot, "chrome") self.device.rm(self.remoteChromeTestDir, force=True, recursive=True) self.device.mkdir(self.remoteChromeTestDir) procName = options.app.split('/')[-1] self.device.stop_application(procName) if self.device.process_exist(procName): self.log.warning("unable to kill %s before running tests!" % procName) # Add Android version (SDK level) to mozinfo so that manifest entries # can be conditional on android_version. self.log.info( "Android sdk version '%s'; will use this to filter manifests" % str(self.device.version)) mozinfo.info['android_version'] = str(self.device.version) mozinfo.info['isFennec'] = not ('geckoview' in options.app) mozinfo.info['is_emulator'] = self.device._device_serial.startswith('emulator-') def cleanup(self, options, final=False): if final: self.device.rm(self.remoteChromeTestDir, force=True, recursive=True) self.chromePushed = False uploadDir = os.environ.get('MOZ_UPLOAD_DIR', None) if uploadDir and self.device.is_dir(self.remoteMozLog): self.device.pull(self.remoteMozLog, uploadDir) self.device.rm(self.remoteLogFile, force=True) self.device.rm(self.remoteProfile, force=True, recursive=True) self.device.rm(self.remoteCache, force=True, recursive=True) MochitestDesktop.cleanup(self, options, final) self.localProfile = None def dumpScreen(self, utilityPath): if self.haveDumpedScreen: self.log.info( "Not taking screenshot here: see the one that was previously logged") return self.haveDumpedScreen = True if self.device._device_serial.startswith('emulator-'): dump_screen(utilityPath, self.log) else: dump_device_screen(self.device, self.log) def findPath(self, paths, filename=None): for path in paths: p = path if filename: p = os.path.join(p, filename) if os.path.exists(self.getFullPath(p)): return path return None def makeLocalAutomation(self): localAutomation = Automation() localAutomation.IS_WIN32 = False localAutomation.IS_LINUX = False localAutomation.IS_MAC = False localAutomation.UNIXISH = False hostos = sys.platform if (hostos == 'mac' or hostos == 'darwin'): localAutomation.IS_MAC = True elif (hostos == 'linux' or hostos == 'linux2'): localAutomation.IS_LINUX = True localAutomation.UNIXISH = True elif (hostos == 'win32' or hostos == 'win64'): localAutomation.BIN_SUFFIX = ".exe" localAutomation.IS_WIN32 = True return localAutomation # This seems kludgy, but this class uses paths from the remote host in the # options, except when calling up to the base class, which doesn't # understand the distinction. This switches out the remote values for local # ones that the base class understands. This is necessary for the web # server, SSL tunnel and profile building functions. def switchToLocalPaths(self, options): """ Set local paths in the options, return a function that will restore remote values """ remoteXrePath = options.xrePath remoteProfilePath = options.profilePath remoteUtilityPath = options.utilityPath localAutomation = self.makeLocalAutomation() paths = [ options.xrePath, localAutomation.DIST_BIN, ] options.xrePath = self.findPath(paths) if options.xrePath is None: self.log.error( "unable to find xulrunner path for %s, please specify with --xre-path" % os.name) sys.exit(1) xpcshell = "xpcshell" if (os.name == "nt"): xpcshell += ".exe" if options.utilityPath: paths = [options.utilityPath, options.xrePath] else: paths = [options.xrePath] options.utilityPath = self.findPath(paths, xpcshell) if options.utilityPath is None: self.log.error( "unable to find utility path for %s, please specify with --utility-path" % os.name) sys.exit(1) xpcshell_path = os.path.join(options.utilityPath, xpcshell) if localAutomation.elf_arm(xpcshell_path): self.log.error('xpcshell at %s is an ARM binary; please use ' 'the --utility-path argument to specify the path ' 'to a desktop version.' % xpcshell_path) sys.exit(1) if self.localProfile: options.profilePath = self.localProfile else: options.profilePath = None def fixup(): options.xrePath = remoteXrePath options.utilityPath = remoteUtilityPath options.profilePath = remoteProfilePath return fixup def startServers(self, options, debuggerInfo): """ Create the servers on the host and start them up """ restoreRemotePaths = self.switchToLocalPaths(options) MochitestDesktop.startServers( self, options, debuggerInfo) restoreRemotePaths() def buildProfile(self, options): restoreRemotePaths = self.switchToLocalPaths(options) if options.testingModulesDir: try: self.device.push(options.testingModulesDir, self.remoteModulesDir) self.device.chmod(self.remoteModulesDir, recursive=True, root=True) except Exception: self.log.error( "Automation Error: Unable to copy test modules to device.") raise savedTestingModulesDir = options.testingModulesDir options.testingModulesDir = self.remoteModulesDir else: savedTestingModulesDir = None manifest = MochitestDesktop.buildProfile(self, options) if savedTestingModulesDir: options.testingModulesDir = savedTestingModulesDir self.localProfile = options.profilePath restoreRemotePaths() options.profilePath = self.remoteProfile return manifest def buildURLOptions(self, options, env): saveLogFile = options.logFile options.logFile = self.remoteLogFile options.profilePath = self.localProfile env["MOZ_HIDE_RESULTS_TABLE"] = "1" retVal = MochitestDesktop.buildURLOptions(self, options, env) # we really need testConfig.js (for browser chrome) try: self.device.push(options.profilePath, self.remoteProfile) self.device.chmod(self.remoteProfile, recursive=True, root=True) except Exception: self.log.error("Automation Error: Unable to copy profile to device.") raise options.profilePath = self.remoteProfile options.logFile = saveLogFile return retVal def getChromeTestDir(self, options): local = super(MochiRemote, self).getChromeTestDir(options) remote = self.remoteChromeTestDir if options.flavor == 'chrome' and not self.chromePushed: self.log.info("pushing %s to %s on device..." % (local, remote)) local = os.path.join(local, "chrome") self.device.push(local, remote) self.chromePushed = True return remote def getLogFilePath(self, logFile): return logFile def printDeviceInfo(self, printLogcat=False): try: if printLogcat: logcat = self.device.get_logcat( filter_out_regexps=fennecLogcatFilters) for l in logcat: ul = l.decode('utf-8', errors='replace') sl = ul.encode('iso8859-1', errors='replace') self.log.info(sl) self.log.info("Device info:") devinfo = self.device.get_info() for category in devinfo: if type(devinfo[category]) is list: self.log.info(" %s:" % category) for item in devinfo[category]: self.log.info(" %s" % item) else: self.log.info(" %s: %s" % (category, devinfo[category])) self.log.info("Test root: %s" % self.device.test_root) except ADBTimeoutError: raise except Exception as e: self.log.warning("Error getting device information: %s" % str(e)) def getGMPPluginPath(self, options): # TODO: bug 1149374 return None def buildBrowserEnv(self, options, debugger=False): browserEnv = MochitestDesktop.buildBrowserEnv( self, options, debugger=debugger) # remove desktop environment not used on device if "XPCOM_MEM_BLOAT_LOG" in browserEnv: del browserEnv["XPCOM_MEM_BLOAT_LOG"] # override mozLogs to avoid processing in MochitestDesktop base class self.mozLogs = None browserEnv["MOZ_LOG_FILE"] = os.path.join( self.remoteMozLog, self.mozLogName) if options.dmd: browserEnv['DMD'] = '1' # Contents of remoteMozLog will be pulled from device and copied to the # host MOZ_UPLOAD_DIR, to be made available as test artifacts. Make # MOZ_UPLOAD_DIR available to the browser environment so that tests # can use it as though they were running on the host. browserEnv["MOZ_UPLOAD_DIR"] = self.remoteMozLog return browserEnv def runApp(self, *args, **kwargs): """front-end automation.py's `runApp` functionality until FennecRunner is written""" # automation.py/remoteautomation `runApp` takes the profile path, # whereas runtest.py's `runApp` takes a mozprofile object. if 'profileDir' not in kwargs and 'profile' in kwargs: kwargs['profileDir'] = kwargs.pop('profile').profile # remove args not supported by automation.py kwargs.pop('marionette_args', None) ret, _ = self.automation.runApp(*args, **kwargs) self.countpass += self.counts['pass'] self.countfail += self.counts['fail'] self.counttodo += self.counts['todo'] return ret, None
class BrowsertimeAndroid(PerftestAndroid, Browsertime): """Android setup and configuration for browsertime When running raptor-browsertime tests on android, we create the profile (and set the proxy prefs in the profile that is using playback) but we don't need to copy it onto the device because geckodriver takes care of that. We tell browsertime to use our profile (we pass it in with the firefox.profileTemplate arg); browsertime creates a copy of that and passes that into geckodriver. Geckodriver then takes the profile and copies it onto the mobile device's sdcard for us; and then it even writes the geckoview app config.yaml file onto the device, which points the app to the profile on the sdcard. Therefore, raptor doesn't have to copy the profile onto the scard (and create the config.yaml) file ourselves. Also note when using playback, the nss certificate db is created as usual when mitmproxy is started (and saved in the profile) so it is already included in the profile that browsertime/geckodriver copies onto the device. """ def __init__(self, app, binary, activity=None, intent=None, **kwargs): super(BrowsertimeAndroid, self).__init__( app, binary, profile_class="firefox", **kwargs ) self.config.update({"activity": activity, "intent": intent}) self.remote_test_root = os.path.abspath( os.path.join(os.sep, "sdcard", "raptor") ) self.remote_profile = os.path.join(self.remote_test_root, "profile") @property def browsertime_args(self): args_list = [ "--browser", "firefox", "--android", # Work around a `selenium-webdriver` issue where Browsertime # fails to find a Firefox binary even though we're going to # actually do things on an Android device. "--firefox.binaryPath", self.browsertime_node, "--firefox.android.package", self.config["binary"], "--firefox.android.activity", self.config["activity"], ] # if running on Fenix we must add the intent as we use a special non-default one there if self.config["app"] == "fenix" and self.config.get("intent") is not None: args_list.extend(["--firefox.android.intentArgument=-a"]) args_list.extend( ["--firefox.android.intentArgument", self.config["intent"]] ) args_list.extend(["--firefox.android.intentArgument=-d"]) args_list.extend(["--firefox.android.intentArgument", str("about:blank")]) return args_list def build_browser_profile(self): super(BrowsertimeAndroid, self).build_browser_profile() # Merge in the Android profile. path = os.path.join(self.profile_data_dir, "raptor-android") LOG.info("Merging profile: {}".format(path)) self.profile.merge(path) self.profile.set_preferences( {"browser.tabs.remote.autostart": self.config["e10s"]} ) # There's no great way to have "after" advice in Python, so we do this # in super and then again here since the profile merging re-introduces # the "#MozRunner" delimiters. self.remove_mozprofile_delimiters_from_profile() def setup_adb_device(self): if self.device is None: self.device = ADBDevice(verbose=True) tune_performance(self.device, log=LOG) self.clear_app_data() self.set_debug_app_flag() def run_test_setup(self, test): super(BrowsertimeAndroid, self).run_test_setup(test) self.set_reverse_ports() if self.playback: self.turn_on_android_app_proxy() self.remove_mozprofile_delimiters_from_profile() def run_tests(self, tests, test_names): self.setup_adb_device() return super(BrowsertimeAndroid, self).run_tests(tests, test_names) def run_test_teardown(self, test): LOG.info("removing reverse socket connections") self.device.remove_socket_connections("reverse") super(BrowsertimeAndroid, self).run_test_teardown(test)
def create_browser_handler(self): # create the android device handler; it gets initiated and sets up adb etc self.log.info("creating android device handler using mozdevice") self.device = ADBDevice(verbose=True) self.device.clear_logcat() self.clear_app_data()
class RaptorAndroid(Raptor): def __init__(self, app, binary, run_local=False, obj_path=None, gecko_profile=False, gecko_profile_interval=None, gecko_profile_entries=None, symbols_path=None, host=None, power_test=False, is_release_build=False, debug_mode=False, activity=None): Raptor.__init__(self, app, binary, run_local, obj_path, gecko_profile, gecko_profile_interval, gecko_profile_entries, symbols_path, host, power_test, is_release_build, debug_mode) # on android, when creating the browser profile, we want to use a 'firefox' type profile self.profile_class = "firefox" self.config['activity'] = activity def create_browser_handler(self): # create the android device handler; it gets initiated and sets up adb etc self.log.info("creating android device handler using mozdevice") self.device = ADBDevice(verbose=True) self.device.clear_logcat() self.clear_app_data() def clear_app_data(self): self.log.info("clearing %s app data" % self.config['binary']) self.device.shell("pm clear %s" % self.config['binary']) def create_raptor_sdcard_folder(self): # for android/geckoview, create a top-level raptor folder on the device # sdcard; if it already exists remove it so we start fresh each time self.device_raptor_dir = "/sdcard/raptor" self.config['device_raptor_dir'] = self.device_raptor_dir if self.device.is_dir(self.device_raptor_dir): self.log.info("deleting existing device raptor dir: %s" % self.device_raptor_dir) self.device.rm(self.device_raptor_dir, recursive=True) self.log.info("creating raptor folder on sdcard: %s" % self.device_raptor_dir) self.device.mkdir(self.device_raptor_dir) self.device.chmod(self.device_raptor_dir, recursive=True) def copy_profile_onto_device(self): # for geckoview/fennec we must copy the profile onto the device and set perms if not self.device.is_app_installed(self.config['binary']): raise Exception('%s is not installed' % self.config['binary']) self.device_profile = os.path.join(self.device_raptor_dir, "profile") if self.device.is_dir(self.device_profile): self.log.info("deleting existing device profile folder: %s" % self.device_profile) self.device.rm(self.device_profile, recursive=True) self.log.info("creating profile folder on device: %s" % self.device_profile) self.device.mkdir(self.device_profile) self.log.info("copying firefox profile onto the device") self.log.info("note: the profile folder being copied is: %s" % self.profile.profile) self.log.info('the adb push cmd copies that profile dir to a new temp dir before copy') self.device.push(self.profile.profile, self.device_profile) self.device.chmod(self.device_profile, recursive=True) def turn_on_android_app_proxy(self): # for geckoview/android pageload playback we can't use a policy to turn on the # proxy; we need to set prefs instead; note that the 'host' may be different # than '127.0.0.1' so we must set the prefs accordingly self.log.info("setting profile prefs to turn on the android app proxy") proxy_prefs = {} proxy_prefs["network.proxy.type"] = 1 proxy_prefs["network.proxy.http"] = self.config['host'] proxy_prefs["network.proxy.http_port"] = 8080 proxy_prefs["network.proxy.ssl"] = self.config['host'] proxy_prefs["network.proxy.ssl_port"] = 8080 proxy_prefs["network.proxy.no_proxies_on"] = self.config['host'] self.profile.set_preferences(proxy_prefs) def launch_firefox_android_app(self): self.log.info("starting %s" % self.config['app']) extra_args = ["-profile", self.device_profile, "--es", "env0", "LOG_VERBOSE=1", "--es", "env1", "R_LOG_LEVEL=6"] try: # make sure the android app is not already running self.device.stop_application(self.config['binary']) if self.config['app'] == "fennec": self.device.launch_fennec(self.config['binary'], extra_args=extra_args, url='about:blank', fail_if_running=False) else: self.device.launch_activity(self.config['binary'], self.config['activity'], extra_args=extra_args, url='about:blank', e10s=True, fail_if_running=False) except Exception as e: self.log.error("Exception launching %s" % self.config['binary']) self.log.error("Exception: %s %s" % (type(e).__name__, str(e))) if self.config['power_test']: finish_geckoview_power_test(self) raise # give our control server the device and app info self.control_server.device = self.device self.control_server.app_name = self.config['binary'] def run_test(self, test, timeout=None): if self.config['power_test']: init_geckoview_power_test(self) self.run_test_setup(test) self.create_raptor_sdcard_folder() if test.get('playback', None) is not None: self.start_playback(test) if self.config['host'] not in ('localhost', '127.0.0.1'): self.delete_proxy_settings_from_profile() if test.get('playback', None) is not None: self.turn_on_android_app_proxy() self.copy_profile_onto_device() # now start the browser/app under test self.launch_firefox_android_app() # set our control server flag to indicate we are running the browser/app self.control_server._finished = False self.wait_for_test_finish(test, timeout) if self.config['power_test']: finish_geckoview_power_test(self) self.run_test_teardown() # in debug mode, and running locally, leave the browser running if self.debug_mode and self.config['run_local']: self.log.info("* debug-mode enabled - please shutdown the browser manually...") self.runner.wait(timeout=None) def check_for_crashes(self): # Turn off verbose to prevent logcat from being inserted into the main log. verbose = self.device._verbose self.device._verbose = False logcat = self.device.get_logcat() self.device._verbose = verbose if logcat: if mozcrash.check_for_java_exception(logcat, "raptor"): return try: dump_dir = tempfile.mkdtemp() remote_dir = posixpath.join(self.device_profile, 'minidumps') if not self.device.is_dir(remote_dir): self.log.error("No crash directory (%s) found on remote device" % remote_dir) return self.device.pull(remote_dir, dump_dir) mozcrash.log_crashes(self.log, dump_dir, self.config['symbols_path']) finally: try: shutil.rmtree(dump_dir) except Exception: self.log.warning("unable to remove directory: %s" % dump_dir)
def run_tests_remote(tests, num_tests, prefix, options, slog): # Setup device with everything needed to run our tests. from mozdevice import ADBDevice, ADBError, ADBTimeoutError try: device = ADBDevice(device=options.device_serial, test_root=options.remote_test_root) init_remote_dir(device, options.remote_test_root) # Update the test root to point to our test directory. jit_tests_dir = posixpath.join(options.remote_test_root, 'jit-tests') options.remote_test_root = posixpath.join(jit_tests_dir, 'jit-tests') # Push js shell and libraries. init_remote_dir(device, jit_tests_dir) push_libs(options, device) push_progs(options, device, [prefix[0]]) device.chmod(options.remote_test_root, recursive=True, root=True) jtd_tests = posixpath.join(jit_tests_dir, 'tests') init_remote_dir(device, jtd_tests) device.push(JS_TESTS_DIR, jtd_tests, timeout=600) device.chmod(jtd_tests, recursive=True, root=True) device.push(os.path.dirname(TEST_DIR), options.remote_test_root, timeout=600) device.chmod(options.remote_test_root, recursive=True, root=True) prefix[0] = os.path.join(options.remote_test_root, 'js') except (ADBError, ADBTimeoutError): print("TEST-UNEXPECTED-FAIL | jit_test.py" + " : Device initialization failed") raise # Run all tests. pb = create_progressbar(num_tests, options) try: gen = get_remote_results(tests, device, prefix, options) ok = process_test_results(gen, num_tests, pb, options, slog) except (ADBError, ADBTimeoutError): print("TEST-UNEXPECTED-FAIL | jit_test.py" + " : Device error during test") raise return ok
def main(): parser = argparse.ArgumentParser( usage='%(prog)s [options] <test command> (<test command option> ...)', description="Wrapper script for tests run on physical Android devices at Bitbar. Runs the provided command " "wrapped with required setup and teardown.") _args, extra_args = parser.parse_known_args() logging.basicConfig(format='%(asctime)-15s %(levelname)s %(message)s', level=logging.INFO, stream=sys.stdout) print('\nscript.py: starting') with open('/builds/worker/version') as versionfile: version = versionfile.read().strip() print('\nDockerfile version {}'.format(version)) taskcluster_debug = '*' task_cwd = os.getcwd() print('Current working directory: {}'.format(task_cwd)) with open('/builds/taskcluster/scriptvars.json') as scriptvars: scriptvarsenv = json.loads(scriptvars.read()) print('Bitbar test run: https://mozilla.testdroid.com/#testing/device-session/{}/{}/{}'.format( scriptvarsenv['TESTDROID_PROJECT_ID'], scriptvarsenv['TESTDROID_BUILD_ID'], scriptvarsenv['TESTDROID_RUN_ID'])) env = dict(os.environ) if 'PATH' in os.environ: path = os.environ['PATH'] else: path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin' path += ':/builds/worker/android-sdk-linux/tools/bin:/builds/worker/android-sdk-linux/platform-tools' env['PATH'] = os.environ['PATH'] = path env['NEED_XVFB'] = 'false' env['DEVICE_NAME'] = scriptvarsenv['DEVICE_NAME'] env['ANDROID_DEVICE'] = scriptvarsenv['ANDROID_DEVICE'] env['DEVICE_SERIAL'] = scriptvarsenv['DEVICE_SERIAL'] env['HOST_IP'] = scriptvarsenv['HOST_IP'] env['DEVICE_IP'] = scriptvarsenv['DEVICE_IP'] env['DOCKER_IMAGE_VERSION'] = scriptvarsenv['DOCKER_IMAGE_VERSION'] if 'HOME' not in env: env['HOME'] = '/builds/worker' print('setting HOME to {}'.format(env['HOME'])) show_df() # If we are running normal tests we will be connected via usb and # there should be only one device connected. If we are running # power tests, the framework will have already called adb tcpip # 5555 on the device before it disconnected usb. There should be # no devices connected and we will need to perform an adb connect # to connect to the device. DEVICE_SERIAL will be set to either # the device's serial number or its ipaddress:5555 by the framework. try: adbhost = ADBHost(verbose=True) if env['DEVICE_SERIAL'].endswith(':5555'): # Power testing with adb over wifi. adbhost.command_output(["connect", env['DEVICE_SERIAL']]) devices = adbhost.devices() print(json.dumps(devices, indent=4)) if len(devices) != 1: fatal('Must have exactly one connected device. {} found.'.format(len(devices)), retry=True) except (ADBError, ADBTimeoutError) as e: fatal('{} Unable to obtain attached devices'.format(e), retry=True) try: for f in glob('/tmp/adb.*.log'): print('\n{}:\n'.format(f)) with open(f) as afile: print(afile.read()) except Exception as e: print('{} while reading adb logs'.format(e)) print('Connecting to Android device {}'.format(env['DEVICE_SERIAL'])) try: device = ADBDevice(device=env['DEVICE_SERIAL']) android_version = device.get_prop('ro.build.version.release') print('Android device version (ro.build.version.release): {}'.format(android_version)) # this can explode if an unknown device, explode now vs in an hour... device_type = get_device_type(device) # set device to UTC if device.is_rooted: device.shell_output('setprop persist.sys.timezone "UTC"', timeout=ADB_COMMAND_TIMEOUT) # show date for visual confirmation device_datetime = device.shell_output("date", timeout=ADB_COMMAND_TIMEOUT) print('Android device datetime: {}'.format(device_datetime)) # clean up the device. device.rm('/data/local/tests', recursive=True, force=True) device.rm('/data/local/tmp/*', recursive=True, force=True) device.rm('/data/local/tmp/xpcb', recursive=True, force=True) device.rm('/sdcard/tests', recursive=True, force=True) device.rm('/sdcard/raptor-profile', recursive=True, force=True) except (ADBError, ADBTimeoutError) as e: fatal("{} attempting to clean up device".format(e), retry=True) if taskcluster_debug: env['DEBUG'] = taskcluster_debug print('environment = {}'.format(json.dumps(env, indent=4))) # run the payload's command and ensure that: # - all output is printed # - no deadlock occurs between proc.poll() and sys.stdout.readline() # - more info # - https://bugzilla.mozilla.org/show_bug.cgi?id=1611936 # - https://stackoverflow.com/questions/58471094/python-subprocess-readline-hangs-cant-use-normal-options print("script.py: running command '%s'" % ' '.join(extra_args)) rc = None proc = subprocess.Popen(extra_args, # use standard os buffer size bufsize=-1, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) # Create the queue instance q = queue.Queue() # Kick off the monitoring thread thread = threading.Thread(target=_monitor_readline, args=(proc, q)) thread.daemon = True thread.start() start = datetime.now() while True: time.sleep(0.1) bail = True rc = proc.poll() if rc is None: bail = False # Re-set the thread timer start = datetime.now() out = "" while not q.empty(): out += q.get() if out: print(out.rstrip()) # In the case where the thread is still alive and reading, and # the process has exited and finished, give it up to X seconds # to finish reading if bail and thread.is_alive() and (datetime.now() - start).total_seconds() < 5: bail = False if bail: break print("script.py: command finished") # enable charging on device if it is disabled # see https://bugzilla.mozilla.org/show_bug.cgi?id=1565324 enable_charging(device, device_type) try: if env['DEVICE_SERIAL'].endswith(':5555'): device.command_output(["usb"]) adbhost.command_output(["disconnect", env['DEVICE_SERIAL']]) adbhost.kill_server() except (ADBError, ADBTimeoutError) as e: print('{} attempting adb kill-server'.format(e)) try: print('\nnetstat -aop\n%s\n\n' % subprocess.check_output( ['netstat', '-aop'], stderr=subprocess.STDOUT).decode()) except subprocess.CalledProcessError as e: print('{} attempting netstat'.format(e)) show_df() print('script.py: exiting with exitcode {}.'.format(rc)) return rc
def run(self, script, address='localhost:2828', symbols=None, treeherder='https://treeherder.mozilla.org/', reset=False, **kwargs): try: host, port = address.split(':') except ValueError: raise ValueError('--address must be in the format host:port') # Check that Orangutan is installed self.adb_device = ADBDevice(self.device_serial) orng_path = posixpath.join('data', 'local', 'orng') if not self.adb_device.exists(orng_path): raise Exception('Orangutan not found! Please install it according ' 'to the documentation.') self.runner = B2GDeviceRunner( serial=self.device_serial, process_args={'stream': None}, symbols_path=symbols, logdir=self.temp_dir) if reset: self.runner.start() else: self.runner.device.connect() port = self.runner.device.setup_port_forwarding(remote_port=port) assert self.runner.device.wait_for_port(port), \ 'Timed out waiting for port!' marionette = Marionette(host=host, port=port) marionette.start_session() try: marionette.set_context(marionette.CONTEXT_CHROME) self.is_debug = marionette.execute_script( 'return Components.classes["@mozilla.org/xpcom/debug;1"].' 'getService(Components.interfaces.nsIDebug2).isDebugBuild;') marionette.set_context(marionette.CONTEXT_CONTENT) if reset: gaia_device = GaiaDevice(marionette) gaia_device.wait_for_b2g_ready(timeout=120) gaia_device.unlock() gaia_apps = GaiaApps(marionette) gaia_apps.kill_all() # TODO: Disable bluetooth, emergency calls, carrier, etc # Run Orangutan script remote_script = posixpath.join(self.adb_device.test_root, 'orng.script') self.adb_device.push(script, remote_script) self.start_time = time.time() # TODO: Kill remote process on keyboard interrupt self.adb_device.shell('%s %s %s' % (orng_path, self.device_properties['input'], remote_script)) self.end_time = time.time() self.adb_device.rm(remote_script) except (MarionetteException, IOError): if self.runner.crashed: # Crash has been detected pass else: raise self.runner.check_for_crashes(test_name='b2gmonkey') # Report results to Treeherder required_envs = ['TREEHERDER_KEY', 'TREEHERDER_SECRET'] if all([os.environ.get(v) for v in required_envs]): self.post_to_treeherder(script, treeherder) else: self._logger.info( 'Results will not be posted to Treeherder. Please set the ' 'following environment variables to enable Treeherder ' 'reports: %s' % ', '.join([ v for v in required_envs if not os.environ.get(v)]))
class XPCShellRemote(xpcshell.XPCShellTests, object): def __init__(self, options, log): xpcshell.XPCShellTests.__init__(self, log) self.options = options verbose = False if options['log_tbpl_level'] == 'debug' or options['log_mach_level'] == 'debug': verbose = True self.device = ADBDevice(adb=options['adbPath'] or 'adb', device=options['deviceSerial'], test_root=options['remoteTestRoot'], verbose=verbose) self.remoteTestRoot = posixpath.join(self.device.test_root, "xpc") # Add Android version (SDK level) to mozinfo so that manifest entries # can be conditional on android_version. mozinfo.info['android_version'] = self.device.version self.localBin = options['localBin'] self.pathMapping = [] # remoteBinDir contains xpcshell and its wrapper script, both of which must # be executable. Since +x permissions cannot usually be set on /mnt/sdcard, # and the test root may be on /mnt/sdcard, remoteBinDir is set to be on # /data/local, always. self.remoteBinDir = posixpath.join("/data", "local", "xpcb") # Terse directory names are used here ("c" for the components directory) # to minimize the length of the command line used to execute # xpcshell on the remote device. adb has a limit to the number # of characters used in a shell command, and the xpcshell command # line can be quite complex. self.remoteTmpDir = posixpath.join(self.remoteTestRoot, "tmp") self.remoteScriptsDir = self.remoteTestRoot self.remoteComponentsDir = posixpath.join(self.remoteTestRoot, "c") self.remoteModulesDir = posixpath.join(self.remoteTestRoot, "m") self.remoteMinidumpDir = posixpath.join(self.remoteTestRoot, "minidumps") self.profileDir = posixpath.join(self.remoteTestRoot, "p") self.remoteDebugger = options['debugger'] self.remoteDebuggerArgs = options['debuggerArgs'] self.testingModulesDir = options['testingModulesDir'] self.env = {} if options['objdir']: self.xpcDir = os.path.join(options['objdir'], "_tests/xpcshell") elif os.path.isdir(os.path.join(here, 'tests')): self.xpcDir = os.path.join(here, 'tests') else: print("Couldn't find local xpcshell test directory", file=sys.stderr) sys.exit(1) self.localAPKContents = ZipFile(options['localAPK']) if options['setup']: self.setupTestDir() self.setupUtilities() self.setupModules() self.initDir(self.remoteMinidumpDir) self.remoteAPK = None self.remoteAPK = posixpath.join(self.remoteBinDir, os.path.basename(options['localAPK'])) self.setAppRoot() # data that needs to be passed to the RemoteXPCShellTestThread self.mobileArgs = { 'device': self.device, 'remoteBinDir': self.remoteBinDir, 'remoteScriptsDir': self.remoteScriptsDir, 'remoteComponentsDir': self.remoteComponentsDir, 'remoteModulesDir': self.remoteModulesDir, 'options': self.options, 'remoteDebugger': self.remoteDebugger, 'pathMapping': self.pathMapping, 'profileDir': self.profileDir, 'remoteTmpDir': self.remoteTmpDir, 'remoteMinidumpDir': self.remoteMinidumpDir, } if self.remoteAPK: self.mobileArgs['remoteAPK'] = self.remoteAPK def initDir(self, path, mask="777", timeout=None, root=True): """Initialize a directory by removing it if it exists, creating it and changing the permissions.""" self.device.rm(path, recursive=True, force=True, timeout=timeout, root=root) self.device.mkdir(path, parents=True, timeout=timeout, root=root) self.device.chmod(path, recursive=True, mask=mask, timeout=timeout, root=root) def setLD_LIBRARY_PATH(self): self.env["LD_LIBRARY_PATH"] = self.remoteBinDir def pushWrapper(self): # Rather than executing xpcshell directly, this wrapper script is # used. By setting environment variables and the cwd in the script, # the length of the per-test command line is shortened. This is # often important when using ADB, as there is a limit to the length # of the ADB command line. localWrapper = tempfile.mktemp() f = open(localWrapper, "w") f.write("#!/system/bin/sh\n") for envkey, envval in self.env.iteritems(): f.write("export %s=%s\n" % (envkey, envval)) f.writelines([ "cd $1\n", "echo xpcw: cd $1\n", "shift\n", "echo xpcw: xpcshell \"$@\"\n", "%s/xpcshell \"$@\"\n" % self.remoteBinDir]) f.close() remoteWrapper = posixpath.join(self.remoteBinDir, "xpcw") self.device.push(localWrapper, remoteWrapper) self.device.chmod(remoteWrapper, root=True) os.remove(localWrapper) def buildPrefsFile(self, extraPrefs): prefs = super(XPCShellRemote, self).buildPrefsFile(extraPrefs) remotePrefsFile = posixpath.join(self.remoteTestRoot, 'user.js') self.device.push(self.prefsFile, remotePrefsFile) self.device.chmod(remotePrefsFile, root=True) os.remove(self.prefsFile) self.prefsFile = remotePrefsFile return prefs def buildEnvironment(self): self.buildCoreEnvironment() self.setLD_LIBRARY_PATH() self.env["MOZ_LINKER_CACHE"] = self.remoteBinDir if self.appRoot: self.env["GRE_HOME"] = self.appRoot self.env["XPCSHELL_TEST_PROFILE_DIR"] = self.profileDir self.env["TMPDIR"] = self.remoteTmpDir self.env["HOME"] = self.profileDir self.env["XPCSHELL_TEST_TEMP_DIR"] = self.remoteTmpDir self.env["XPCSHELL_MINIDUMP_DIR"] = self.remoteMinidumpDir self.env["MOZ_ANDROID_CPU_ABI"] = self.device.get_prop("ro.product.cpu.abi") if self.options['setup']: self.pushWrapper() def setAppRoot(self): # Determine the application root directory associated with the package # name used by the APK. self.appRoot = None packageName = None try: packageName = self.localAPKContents.read("package-name.txt") except Exception as e: print("unable to determine app root; assuming geckoview: " + str(e)) packageName = "org.mozilla.geckoview.test" if packageName: self.appRoot = posixpath.join("/data", "data", packageName.strip()) def setupUtilities(self): self.initDir(self.remoteTmpDir) self.initDir(self.remoteBinDir) remotePrefDir = posixpath.join(self.remoteBinDir, "defaults", "pref") self.initDir(posixpath.join(remotePrefDir, "extra")) self.initDir(self.remoteComponentsDir) local = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'head.js') remoteFile = posixpath.join(self.remoteScriptsDir, "head.js") self.device.push(local, remoteFile) self.device.chmod(remoteFile, root=True) # The xpcshell binary is required for all tests. Additional binaries # are required for some tests. This list should be similar to # TEST_HARNESS_BINS in testing/mochitest/Makefile.in. binaries = ["xpcshell", "ssltunnel", "certutil", "pk12util", "BadCertServer", "OCSPStaplingServer", "GenerateOCSPResponse", "SymantecSanctionsServer"] for fname in binaries: local = os.path.join(self.localBin, fname) if os.path.isfile(local): print("Pushing %s.." % fname, file=sys.stderr) remoteFile = posixpath.join(self.remoteBinDir, fname) self.device.push(local, remoteFile) self.device.chmod(remoteFile, root=True) else: print("*** Expected binary %s not found in %s!" % (fname, self.localBin), file=sys.stderr) local = os.path.join(self.localBin, "components/httpd.js") remoteFile = posixpath.join(self.remoteComponentsDir, "httpd.js") self.device.push(local, remoteFile) self.device.chmod(remoteFile, root=True) local = os.path.join(self.localBin, "components/httpd.manifest") remoteFile = posixpath.join(self.remoteComponentsDir, "httpd.manifest") self.device.push(local, remoteFile) self.device.chmod(remoteFile, root=True) remoteFile = posixpath.join(self.remoteBinDir, os.path.basename(self.options['localAPK'])) self.device.push(self.options['localAPK'], remoteFile) self.device.chmod(remoteFile, root=True) self.pushLibs() def pushLibs(self): elfhack = os.path.join(self.localBin, 'elfhack') if not os.path.exists(elfhack): elfhack = None pushed_libs_count = 0 try: dir = tempfile.mkdtemp() for info in self.localAPKContents.infolist(): if info.filename.endswith(".so"): print("Pushing %s.." % info.filename, file=sys.stderr) remoteFile = posixpath.join(self.remoteBinDir, os.path.basename(info.filename)) self.localAPKContents.extract(info, dir) localFile = os.path.join(dir, info.filename) with open(localFile) as f: # Decompress xz-compressed file. if f.read(5)[1:] == '7zXZ': cmd = ['xz', '-df', '--suffix', '.so', localFile] subprocess.check_output(cmd) # xz strips the ".so" file suffix. os.rename(localFile[:-3], localFile) # elfhack -r should provide better crash reports if elfhack: cmd = [elfhack, '-r', localFile] subprocess.check_output(cmd) self.device.push(localFile, remoteFile) pushed_libs_count += 1 self.device.chmod(remoteFile, root=True) finally: shutil.rmtree(dir) return pushed_libs_count def setupModules(self): if self.testingModulesDir: self.device.push(self.testingModulesDir, self.remoteModulesDir) self.device.chmod(self.remoteModulesDir, root=True) def setupTestDir(self): print('pushing %s' % self.xpcDir) # The tests directory can be quite large: 5000 files and growing! # Sometimes - like on a low-end aws instance running an emulator - the push # may exceed the default 5 minute timeout, so we increase it here to 10 minutes. self.initDir(self.remoteScriptsDir) self.device.push(self.xpcDir, self.remoteScriptsDir, timeout=600) self.device.chmod(self.remoteScriptsDir, recursive=True, root=True) def buildTestList(self, test_tags=None, test_paths=None, verify=False): xpcshell.XPCShellTests.buildTestList( self, test_tags=test_tags, test_paths=test_paths, verify=verify) uniqueTestPaths = set([]) for test in self.alltests: uniqueTestPaths.add(test['here']) for testdir in uniqueTestPaths: abbrevTestDir = os.path.relpath(testdir, self.xpcDir) remoteScriptDir = posixpath.join(self.remoteScriptsDir, abbrevTestDir) self.pathMapping.append(PathMapping(testdir, remoteScriptDir))
def run_tests_remote(tests, num_tests, prefix, options, slog): # Setup device with everything needed to run our tests. from mozdevice import ADBDevice device = ADBDevice(device=options.device_serial, test_root=options.remote_test_root) init_remote_dir(device, options.remote_test_root) # Update the test root to point to our test directory. jit_tests_dir = posixpath.join(options.remote_test_root, 'jit-tests') options.remote_test_root = posixpath.join(jit_tests_dir, 'jit-tests') # Push js shell and libraries. init_remote_dir(device, jit_tests_dir) push_libs(options, device) push_progs(options, device, [prefix[0]]) device.chmod(options.remote_test_root, recursive=True, root=True) JitTest.CacheDir = posixpath.join(options.remote_test_root, '.js-cache') init_remote_dir(device, JitTest.CacheDir) jtd_tests = posixpath.join(jit_tests_dir, 'tests') init_remote_dir(device, jtd_tests) device.push(JS_TESTS_DIR, jtd_tests, timeout=600) device.chmod(jtd_tests, recursive=True, root=True) device.push(os.path.dirname(TEST_DIR), options.remote_test_root, timeout=600) device.chmod(options.remote_test_root, recursive=True, root=True) prefix[0] = os.path.join(options.remote_test_root, 'js') # Run all tests. pb = create_progressbar(num_tests, options) gen = get_remote_results(tests, device, prefix, options) ok = process_test_results(gen, num_tests, pb, options, slog) return ok
def __init__(self, options, log): xpcshell.XPCShellTests.__init__(self, log) self.options = options verbose = False if options['log_tbpl_level'] == 'debug' or options['log_mach_level'] == 'debug': verbose = True self.device = ADBDevice(adb=options['adbPath'] or 'adb', device=options['deviceSerial'], test_root=options['remoteTestRoot'], verbose=verbose) self.remoteTestRoot = posixpath.join(self.device.test_root, "xpc") # Add Android version (SDK level) to mozinfo so that manifest entries # can be conditional on android_version. mozinfo.info['android_version'] = self.device.version self.localBin = options['localBin'] self.pathMapping = [] # remoteBinDir contains xpcshell and its wrapper script, both of which must # be executable. Since +x permissions cannot usually be set on /mnt/sdcard, # and the test root may be on /mnt/sdcard, remoteBinDir is set to be on # /data/local, always. self.remoteBinDir = posixpath.join("/data", "local", "xpcb") # Terse directory names are used here ("c" for the components directory) # to minimize the length of the command line used to execute # xpcshell on the remote device. adb has a limit to the number # of characters used in a shell command, and the xpcshell command # line can be quite complex. self.remoteTmpDir = posixpath.join(self.remoteTestRoot, "tmp") self.remoteScriptsDir = self.remoteTestRoot self.remoteComponentsDir = posixpath.join(self.remoteTestRoot, "c") self.remoteModulesDir = posixpath.join(self.remoteTestRoot, "m") self.remoteMinidumpDir = posixpath.join(self.remoteTestRoot, "minidumps") self.profileDir = posixpath.join(self.remoteTestRoot, "p") self.remoteDebugger = options['debugger'] self.remoteDebuggerArgs = options['debuggerArgs'] self.testingModulesDir = options['testingModulesDir'] self.env = {} if options['objdir']: self.xpcDir = os.path.join(options['objdir'], "_tests/xpcshell") elif os.path.isdir(os.path.join(here, 'tests')): self.xpcDir = os.path.join(here, 'tests') else: print("Couldn't find local xpcshell test directory", file=sys.stderr) sys.exit(1) self.localAPKContents = ZipFile(options['localAPK']) if options['setup']: self.setupTestDir() self.setupUtilities() self.setupModules() self.initDir(self.remoteMinidumpDir) self.remoteAPK = None self.remoteAPK = posixpath.join(self.remoteBinDir, os.path.basename(options['localAPK'])) self.setAppRoot() # data that needs to be passed to the RemoteXPCShellTestThread self.mobileArgs = { 'device': self.device, 'remoteBinDir': self.remoteBinDir, 'remoteScriptsDir': self.remoteScriptsDir, 'remoteComponentsDir': self.remoteComponentsDir, 'remoteModulesDir': self.remoteModulesDir, 'options': self.options, 'remoteDebugger': self.remoteDebugger, 'pathMapping': self.pathMapping, 'profileDir': self.profileDir, 'remoteTmpDir': self.remoteTmpDir, 'remoteMinidumpDir': self.remoteMinidumpDir, } if self.remoteAPK: self.mobileArgs['remoteAPK'] = self.remoteAPK
class B2GMonkey(object): def __init__(self, device_serial=None): self.device_serial = device_serial self._logger = structured.get_default_logger(component='b2gmonkey') if not self._logger: self._logger = mozlog.getLogger('b2gmonkey') self.version = mozversion.get_version( dm_type='adb', device_serial=device_serial) device_id = self.version.get('device_id') if not device_id: raise B2GMonkeyError('Firefox OS device not found.') self.device_properties = DEVICE_PROPERTIES.get(device_id) if not self.device_properties: raise B2GMonkeyError('Unsupported device: \'%s\'' % device_id) android_version = self.version.get('device_firmware_version_release') if device_id == 'flame' and android_version == '4.4.2': self.device_properties.update(DEVICE_PROPERTIES.get('flame-kk')) self.temp_dir = tempfile.mkdtemp() if 'MINIDUMP_SAVE_PATH' not in os.environ: self.crash_dumps_path = os.path.join(self.temp_dir, 'crashes') os.environ['MINIDUMP_SAVE_PATH'] = self.crash_dumps_path else: self.crash_dumps_path = os.environ['MINIDUMP_SAVE_PATH'] def __del__(self): if hasattr(self, 'temp_dir') and os.path.exists(self.temp_dir): mozfile.remove(self.temp_dir) def generate(self, script, seed=None, steps=10000, **kwargs): seed = seed or random.random() self._logger.info('Current seed is: %s' % seed) rnd = random.Random(str(seed)) dimensions = self.device_properties['dimensions'] _steps = [] self._logger.info('Generating script with %d steps' % steps) for i in range(1, steps + 1): if i % 1000 == 0: duration = 2000 home = self.device_properties['home'] if 'key' in home: _steps.append(['keydown', home['key']]) _steps.append(['sleep', duration]) _steps.append(['keyup', home['key']]) else: _steps.append(['tap', home['x'], home['y'], 1, duration]) continue valid_actions = ['tap', 'drag'] if i == 1 or not _steps[-1][0] == 'sleep': valid_actions.append('sleep') action = rnd.choice(valid_actions) if action == 'tap': _steps.append([ action, rnd.randint(1, dimensions['x']), rnd.randint(1, dimensions['y']), rnd.randint(1, 3), # repetitions rnd.randint(50, 1000)]) # duration elif action == 'sleep': _steps.append([ action, rnd.randint(100, 3000)]) # duration else: _steps.append([ action, rnd.randint(1, dimensions['x']), # start rnd.randint(1, dimensions['y']), # start rnd.randint(1, dimensions['x']), # end rnd.randint(1, dimensions['y']), # end rnd.randint(10, 20), # steps rnd.randint(10, 350)]) # duration with open(script, 'w+') as f: for step in _steps: f.write(' '.join([str(x) for x in step]) + '\n') self._logger.info('Script written to: %s' % script) def run(self, script, address='localhost:2828', symbols=None, treeherder='https://treeherder.mozilla.org/', reset=False, **kwargs): try: host, port = address.split(':') except ValueError: raise ValueError('--address must be in the format host:port') # Check that Orangutan is installed self.adb_device = ADBDevice(self.device_serial) orng_path = posixpath.join('data', 'local', 'orng') if not self.adb_device.exists(orng_path): raise Exception('Orangutan not found! Please install it according ' 'to the documentation.') self.runner = B2GDeviceRunner( serial=self.device_serial, process_args={'stream': None}, symbols_path=symbols, logdir=self.temp_dir) if reset: self.runner.start() else: self.runner.device.connect() port = self.runner.device.setup_port_forwarding(remote_port=port) assert self.runner.device.wait_for_port(port), \ 'Timed out waiting for port!' marionette = Marionette(host=host, port=port) marionette.start_session() try: marionette.set_context(marionette.CONTEXT_CHROME) self.is_debug = marionette.execute_script( 'return Components.classes["@mozilla.org/xpcom/debug;1"].' 'getService(Components.interfaces.nsIDebug2).isDebugBuild;') marionette.set_context(marionette.CONTEXT_CONTENT) if reset: gaia_device = GaiaDevice(marionette) gaia_device.wait_for_b2g_ready(timeout=120) gaia_device.unlock() gaia_apps = GaiaApps(marionette) gaia_apps.kill_all() # TODO: Disable bluetooth, emergency calls, carrier, etc # Run Orangutan script remote_script = posixpath.join(self.adb_device.test_root, 'orng.script') self.adb_device.push(script, remote_script) self.start_time = time.time() # TODO: Kill remote process on keyboard interrupt self.adb_device.shell('%s %s %s' % (orng_path, self.device_properties['input'], remote_script)) self.end_time = time.time() self.adb_device.rm(remote_script) except (MarionetteException, IOError): if self.runner.crashed: # Crash has been detected pass else: raise self.runner.check_for_crashes(test_name='b2gmonkey') # Report results to Treeherder required_envs = ['TREEHERDER_KEY', 'TREEHERDER_SECRET'] if all([os.environ.get(v) for v in required_envs]): self.post_to_treeherder(script, treeherder) else: self._logger.info( 'Results will not be posted to Treeherder. Please set the ' 'following environment variables to enable Treeherder ' 'reports: %s' % ', '.join([ v for v in required_envs if not os.environ.get(v)])) def post_to_treeherder(self, script, treeherder_url): job_collection = TreeherderJobCollection() job = job_collection.get_job() job.add_group_name(self.device_properties['name']) job.add_group_symbol(self.device_properties['symbol']) job.add_job_name('Orangutan Monkey Script (%s)' % self.device_properties.get('symbol')) job.add_job_symbol('Om') # Determine revision hash from application revision revision = self.version['application_changeset'] project = self.version['application_repository'].split('/')[-1] lookup_url = urljoin(treeherder_url, 'api/project/%s/revision-lookup/?revision=%s' % ( project, revision)) self._logger.debug('Getting revision hash from: %s' % lookup_url) response = requests.get(lookup_url) response.raise_for_status() assert response.json(), 'Unable to determine revision hash for %s. ' \ 'Perhaps it has not been ingested by ' \ 'Treeherder?' % revision revision_hash = response.json()[revision]['revision_hash'] job.add_revision_hash(revision_hash) job.add_project(project) job.add_job_guid(str(uuid.uuid4())) job.add_product_name('b2g') job.add_state('completed') job.add_result(self.runner.crashed and 'testfailed' or 'success') job.add_submit_timestamp(int(self.start_time)) job.add_start_timestamp(int(self.start_time)) job.add_end_timestamp(int(self.end_time)) job.add_machine(socket.gethostname()) job.add_build_info('b2g', 'b2g-device-image', 'x86') job.add_machine_info('b2g', 'b2g-device-image', 'x86') if self.is_debug: job.add_option_collection({'debug': True}) else: job.add_option_collection({'opt': True}) date_format = '%d %b %Y %H:%M:%S' job_details = [{ 'content_type': 'link', 'title': 'Gaia revision:', 'url': 'https://github.com/mozilla-b2g/gaia/commit/%s' % self.version.get('gaia_changeset'), 'value': self.version.get('gaia_changeset'), }, { 'content_type': 'text', 'title': 'Gaia date:', 'value': self.version.get('gaia_date') and time.strftime( date_format, time.localtime(int( self.version.get('gaia_date')))), }, { 'content_type': 'text', 'title': 'Device identifier:', 'value': self.version.get('device_id') }, { 'content_type': 'text', 'title': 'Device firmware (date):', 'value': self.version.get('device_firmware_date') and time.strftime(date_format, time.localtime(int( self.version.get('device_firmware_date')))), }, { 'content_type': 'text', 'title': 'Device firmware (incremental):', 'value': self.version.get('device_firmware_version_incremental') }, { 'content_type': 'text', 'title': 'Device firmware (release):', 'value': self.version.get('device_firmware_version_release') }] ci_url = os.environ.get('BUILD_URL') if ci_url: job_details.append({ 'url': ci_url, 'value': ci_url, 'content_type': 'link', 'title': 'CI build:'}) # Attach log files handlers = [handler for handler in self._logger.handlers if isinstance(handler, StreamHandler) and os.path.exists(handler.stream.name)] for handler in handlers: path = handler.stream.name filename = os.path.split(path)[-1] try: url = self.upload_to_s3(path) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Log:'}) # Add log reference if type(handler.formatter) is TbplFormatter or \ type(handler.formatter) is LogLevelFilter and \ type(handler.formatter.inner) is TbplFormatter: job.add_log_reference(filename, url) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) # Attach script filename = os.path.split(script)[-1] try: url = self.upload_to_s3(script) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Script:'}) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) # Attach logcat filename = '%s.log' % self.runner.device.dm._deviceSerial path = os.path.join(self.temp_dir, filename) try: url = self.upload_to_s3(path) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Logcat:'}) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) if job_details: job.add_artifact('Job Info', 'json', {'job_details': job_details}) # Attach crash dumps if self.runner.crashed: crash_dumps = os.listdir(self.crash_dumps_path) for filename in crash_dumps: path = os.path.join(self.crash_dumps_path, filename) try: url = self.upload_to_s3(path) job_details.append({ 'url': url, 'value': filename, 'content_type': 'link', 'title': 'Crash:'}) except S3UploadError: job_details.append({ 'value': 'Failed to upload %s' % filename, 'content_type': 'text', 'title': 'Error:'}) job_collection.add(job) # Send the collection to Treeherder url = urlparse(treeherder_url) request = TreeherderRequest( protocol=url.scheme, host=url.hostname, project=project, oauth_key=os.environ.get('TREEHERDER_KEY'), oauth_secret=os.environ.get('TREEHERDER_SECRET')) self._logger.info('Sending results to Treeherder: %s' % treeherder_url) self._logger.debug('Job collection: %s' % job_collection.to_json()) response = request.post(job_collection) if response.status == 200: self._logger.debug('Response: %s' % response.read()) self._logger.info('Results are available to view at: %s' % ( urljoin(treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' % ( project, revision)))) else: self._logger.error('Failed to send results to Treeherder! ' 'Response: %s' % response.read()) def upload_to_s3(self, path): if not hasattr(self, '_s3_bucket'): try: self._logger.debug('Connecting to S3') conn = boto.connect_s3() bucket = os.environ.get('S3_UPLOAD_BUCKET', 'b2gmonkey') if conn.lookup(bucket): self._logger.debug('Getting bucket: %s' % bucket) self._s3_bucket = conn.get_bucket(bucket) else: self._logger.debug('Creating bucket: %s' % bucket) self._s3_bucket = conn.create_bucket(bucket) self._s3_bucket.set_acl('public-read') except boto.exception.NoAuthHandlerFound: self._logger.info( 'Please set the following environment variables to enable ' 'uploading of artifacts: %s' % ', '.join([v for v in [ 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY'] if not os.environ.get(v)])) raise S3UploadError() except boto.exception.S3ResponseError as e: self._logger.warning('Upload to S3 failed: %s' % e.message) raise S3UploadError() h = hashlib.sha512() with open(path, 'rb') as f: for chunk in iter(lambda: f.read(1024 ** 2), b''): h.update(chunk) _key = h.hexdigest() key = self._s3_bucket.get_key(_key) if not key: self._logger.debug('Creating key: %s' % _key) key = self._s3_bucket.new_key(_key) ext = os.path.splitext(path)[-1] if ext == '.log': key.set_metadata('Content-Type', 'text/plain') with tempfile.NamedTemporaryFile('w+b', suffix=ext) as tf: self._logger.debug('Compressing: %s' % path) with gzip.GzipFile(path, 'wb', fileobj=tf) as gz: with open(path, 'rb') as f: gz.writelines(f) tf.flush() tf.seek(0) key.set_metadata('Content-Encoding', 'gzip') self._logger.debug('Setting key contents from: %s' % tf.name) key.set_contents_from_filename(tf.name) key.set_acl('public-read') blob_url = key.generate_url(expires_in=0, query_auth=False) self._logger.info('File %s uploaded to: %s' % (path, blob_url)) return blob_url
def __init__(self, options): MochitestDesktop.__init__(self, options.flavor, vars(options)) verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True if hasattr(options, 'log'): delattr(options, 'log') self.certdbNew = True self.chromePushed = False self.mozLogName = "moz.log" self.device = ADBDevice(adb=options.adbPath or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) if options.remoteTestRoot is None: options.remoteTestRoot = self.device.test_root options.dumpOutputDirectory = options.remoteTestRoot self.remoteLogFile = posixpath.join(options.remoteTestRoot, "logs", "mochitest.log") logParent = posixpath.dirname(self.remoteLogFile) self.device.rm(logParent, force=True, recursive=True) self.device.mkdir(logParent) self.remoteProfile = posixpath.join(options.remoteTestRoot, "profile/") self.device.rm(self.remoteProfile, force=True, recursive=True) self.counts = dict() self.message_logger = MessageLogger(logger=None) self.message_logger.logger = self.log process_args = {'messageLogger': self.message_logger, 'counts': self.counts} self.automation = RemoteAutomation(self.device, options.remoteappname, self.remoteProfile, self.remoteLogFile, processArgs=process_args) self.environment = self.automation.environment # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) self.automation.deleteANRs() self.automation.deleteTombstones() self.device.clear_logcat() self.remoteModulesDir = posixpath.join(options.remoteTestRoot, "modules/") self.remoteCache = posixpath.join(options.remoteTestRoot, "cache/") self.device.rm(self.remoteCache, force=True, recursive=True) # move necko cache to a location that can be cleaned up options.extraPrefs += ["browser.cache.disk.parent_directory=%s" % self.remoteCache] self.remoteMozLog = posixpath.join(options.remoteTestRoot, "mozlog") self.device.rm(self.remoteMozLog, force=True, recursive=True) self.device.mkdir(self.remoteMozLog) self.remoteChromeTestDir = posixpath.join( options.remoteTestRoot, "chrome") self.device.rm(self.remoteChromeTestDir, force=True, recursive=True) self.device.mkdir(self.remoteChromeTestDir) procName = options.app.split('/')[-1] self.device.stop_application(procName) if self.device.process_exist(procName): self.log.warning("unable to kill %s before running tests!" % procName) # Add Android version (SDK level) to mozinfo so that manifest entries # can be conditional on android_version. self.log.info( "Android sdk version '%s'; will use this to filter manifests" % str(self.device.version)) mozinfo.info['android_version'] = str(self.device.version) mozinfo.info['isFennec'] = not ('geckoview' in options.app) mozinfo.info['is_emulator'] = self.device._device_serial.startswith('emulator-')
def post_to_treeherder(self, tests): version = mozversion.get_version( binary=self.bin, sources=self.sources, dm_type="adb", device_serial=self.device_serial ) job_collection = TreeherderJobCollection() job = job_collection.get_job() device = version.get("device_id") device_firmware_version_release = version.get("device_firmware_version_release") if not device: self.logger.error("Submitting to Treeherder is currently limited " "to devices.") return try: group = DEVICE_GROUP_MAP[device][device_firmware_version_release] job.add_group_name(group["name"]) job.add_group_symbol(group["symbol"]) job.add_job_name("Gaia Python Integration Test (%s)" % group["symbol"]) job.add_job_symbol("Gip") except KeyError: self.logger.error( "Unknown device id: %s or device firmware " "version: %s. Unable to determine Treeherder " "group. Supported devices: %s" % ( device, device_firmware_version_release, ["%s: %s" % (k, [fw for fw in v.keys()]) for k, v in DEVICE_GROUP_MAP.iteritems()], ) ) return # Determine revision hash from application revision revision = version["application_changeset"] project = version["application_repository"].split("/")[-1] lookup_url = urljoin(self.treeherder_url, "api/project/%s/revision-lookup/?revision=%s" % (project, revision)) self.logger.debug("Getting revision hash from: %s" % lookup_url) response = requests.get(lookup_url) response.raise_for_status() assert response.json(), ( "Unable to determine revision hash for %s. " "Perhaps it has not been ingested by " "Treeherder?" % revision ) revision_hash = response.json()[revision]["revision_hash"] job.add_revision_hash(revision_hash) job.add_project(project) job.add_job_guid(str(uuid.uuid4())) job.add_product_name("b2g") job.add_state("completed") # Determine test result if self.failed or self.unexpected_successes: job.add_result("testfailed") else: job.add_result("success") job.add_submit_timestamp(int(self.start_time)) job.add_start_timestamp(int(self.start_time)) job.add_end_timestamp(int(self.end_time)) job.add_machine(socket.gethostname()) job.add_build_info("b2g", "b2g-device-image", "x86") job.add_machine_info("b2g", "b2g-device-image", "x86") # All B2G device builds are currently opt builds job.add_option_collection({"opt": True}) date_format = "%d %b %Y %H:%M:%S" job_details = [ { "content_type": "link", "title": "Gaia revision:", "url": "https://github.com/mozilla-b2g/gaia/commit/%s" % version.get("gaia_changeset"), "value": version.get("gaia_changeset"), }, { "content_type": "text", "title": "Gaia date:", "value": version.get("gaia_date") and time.strftime(date_format, time.localtime(int(version.get("gaia_date")))), }, {"content_type": "text", "title": "Device identifier:", "value": version.get("device_id")}, { "content_type": "text", "title": "Device firmware (date):", "value": version.get("device_firmware_date") and time.strftime(date_format, time.localtime(int(version.get("device_firmware_date")))), }, { "content_type": "text", "title": "Device firmware (incremental):", "value": version.get("device_firmware_version_incremental"), }, { "content_type": "text", "title": "Device firmware (release):", "value": version.get("device_firmware_version_release"), }, ] ci_url = os.environ.get("BUILD_URL") if ci_url: job_details.append({"url": ci_url, "value": ci_url, "content_type": "link", "title": "CI build:"}) # Attach logcat adb_device = ADBDevice(self.device_serial) with tempfile.NamedTemporaryFile(suffix="logcat.txt") as f: f.writelines(adb_device.get_logcat()) self.logger.debug("Logcat stored in: %s" % f.name) try: url = self.upload_to_s3(f.name) job_details.append({"url": url, "value": "logcat.txt", "content_type": "link", "title": "Log:"}) except S3UploadError: job_details.append({"value": "Failed to upload logcat.txt", "content_type": "text", "title": "Error:"}) # Attach log files handlers = [ handler for handler in self.logger.handlers if isinstance(handler, StreamHandler) and os.path.exists(handler.stream.name) ] for handler in handlers: path = handler.stream.name filename = os.path.split(path)[-1] try: url = self.upload_to_s3(path) job_details.append({"url": url, "value": filename, "content_type": "link", "title": "Log:"}) # Add log reference if ( type(handler.formatter) is TbplFormatter or type(handler.formatter) is LogLevelFilter and type(handler.formatter.inner) is TbplFormatter ): job.add_log_reference(filename, url) except S3UploadError: job_details.append( {"value": "Failed to upload %s" % filename, "content_type": "text", "title": "Error:"} ) # Attach reports for report in [self.html_output]: if report is not None: filename = os.path.split(report)[-1] try: url = self.upload_to_s3(report) job_details.append({"url": url, "value": filename, "content_type": "link", "title": "Report:"}) except S3UploadError: job_details.append( {"value": "Failed to upload %s" % filename, "content_type": "text", "title": "Error:"} ) if job_details: job.add_artifact("Job Info", "json", {"job_details": job_details}) job_collection.add(job) # Send the collection to Treeherder url = urlparse(self.treeherder_url) request = TreeherderRequest( protocol=url.scheme, host=url.hostname, project=project, oauth_key=os.environ.get("TREEHERDER_KEY"), oauth_secret=os.environ.get("TREEHERDER_SECRET"), ) self.logger.debug("Sending results to Treeherder: %s" % job_collection.to_json()) response = request.post(job_collection) self.logger.debug("Response: %s" % response.read()) assert response.status == 200, "Failed to send results!" self.logger.info( "Results are available to view at: %s" % (urljoin(self.treeherder_url, "/ui/#/jobs?repo=%s&revision=%s" % (project, revision))) )