def diff_image(self, expected_contents, actual_contents, tolerance): if tolerance != self._tolerance or ( self._process and self._process.has_available_stdout()): self.stop() try: assert (expected_contents) assert (actual_contents) assert (tolerance is not None) if not self._process: self._start(tolerance) # Note that although we are handed 'old', 'new', ImageDiff wants 'new', 'old'. buffer = BytesIO() buffer.write( encode_if_necessary('Content-Length: {}\n'.format( len(actual_contents)))) buffer.write(actual_contents) buffer.write( encode_if_necessary('Content-Length: {}\n'.format( len(expected_contents)))) buffer.write(expected_contents) self._process.write(buffer.getvalue()) return self._read() except IOError as exception: return (None, 0, "Failed to compute an image diff: %s" % str(exception))
def __init__(self, stdout='MOCK STDOUT\n', stderr=''): self.pid = 42 self.stdout = BytesIO(encode_if_necessary(stdout)) self.stderr = BytesIO(encode_if_necessary(stderr)) self.stdin = BytesIO() self.returncode = 0 self._is_running = False
def prepend_svn_revision(self, diff): revision = self.head_svn_revision() if not revision: return diff return encode_if_necessary( "Subversion Revision: ") + encode_if_necessary( revision) + encode_if_necessary('\n') + encode_if_necessary( diff)
def pretty_diff_file(self, diff): # Diffs can contain multiple text files of different encodings # so we always deal with them as byte arrays, not unicode strings. diff = encode_if_necessary(diff) pretty_diff = encode_if_necessary(self.pretty_diff(diff)) diff_file = tempfile.NamedTemporaryFile(suffix=".html") diff_file.write(pretty_diff) diff_file.flush() return diff_file
def create_crash_logs_darwin(self): if not SystemHost().platform.is_mac(): return self.older_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28528) self.sandbox_crash_report = make_mock_sandbox_report_darwin('DumpRenderTree', 28530) self.mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28530) self.newer_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28529) self.other_process_mock_crash_report = make_mock_crash_report_darwin('FooProcess', 28527) self.misformatted_mock_crash_report = 'Junk that should not appear in a crash report' + make_mock_crash_report_darwin('DumpRenderTree', 28526)[200:] self.files = {} self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150715_quadzen.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150716_quadzen_1.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150717_quadzen_2.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150718_quadzen.crash'] = self.sandbox_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150719_quadzen.crash'] = self.mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150720_quadzen.crash'] = self.newer_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150721_quadzen.crash'] = None self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150722_quadzen.crash'] = self.other_process_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150723_quadzen.crash'] = self.misformatted_mock_crash_report self.files = {key: unicode_compatibility.encode_if_necessary(value) for key, value in self.files.items()} self.filesystem = MockFileSystem(self.files) crash_logs = CrashLogs(MockSystemHost(filesystem=self.filesystem), CrashLogsTest.DARWIN_MOCK_CRASH_DIRECTORY) logs = self.filesystem.files_under('/Users/mock/Library/Logs/DiagnosticReports/') for path in reversed(sorted(logs)): self.assertTrue(path in self.files.keys()) return crash_logs
def diff_image(self, expected_contents, actual_contents, tolerance=None): expected_contents = encode_if_necessary(expected_contents) actual_contents = encode_if_necessary(actual_contents) diffed = actual_contents != expected_contents if not actual_contents and not expected_contents: return (None, 0, None) if not actual_contents or not expected_contents: return (True, 0, None) if b'ref' in expected_contents: assert tolerance == 0 if diffed: return ("< {}\n---\n> {}\n".format( decode_for(expected_contents, str), decode_for(actual_contents, str), ), 1, None) return (None, 0, None)
def error_hash(self): # This is a device-independent hash identifying the suppression. # By printing out this hash we can find duplicate reports between tests and # different shards running on multiple buildbots return int( hashlib.md5(encode_if_necessary( self.unique_string())).hexdigest()[:16], 16)
def run_and_throw_if_fail(self, args, quiet=False, decode_output=True, **kwargs): # Cache the child's output locally so it can be used for error reports. child_out_file = unicode_compatibility.StringIO() tee_stdout = sys.stdout try: if quiet: dev_null = open(os.devnull, "w") # FIXME: Does this need an encoding? tee_stdout = dev_null child_stdout = Tee(child_out_file, tee_stdout) exit_code = self._run_command_with_teed_output(args, child_stdout, **kwargs) finally: if quiet: dev_null.close() child_output = child_out_file.getvalue() child_out_file.close() if decode_output: child_output = unicode_compatibility.decode_if_necessary(child_output, self._child_process_encoding()) else: child_output = unicode_compatibility.encode_if_necessary(child_output, self._child_process_encoding()) if exit_code: raise ScriptError(script_args=args, exit_code=exit_code, output=child_output) return child_output
def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = decode_for(text, str) if text else None self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = encode_if_necessary( audio) if audio else None # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid
def generate_supplemental_dependency(self, input_directory, supplemental_dependency_file, window_constructors_file, workerglobalscope_constructors_file, dedicatedworkerglobalscope_constructors_file, serviceworkerglobalscope_constructors_file, workletglobalscope_constructors_file, paintworkletglobalscope_constructors_file, testglobalscope_constructors_file): idl_files_list = tempfile.mkstemp() for input_file in os.listdir(input_directory): (name, extension) = os.path.splitext(input_file) if extension != '.idl': continue os.write(idl_files_list[0], encode_if_necessary(os.path.join(input_directory, input_file) + "\n")) os.close(idl_files_list[0]) cmd = ['perl', '-w', '-IWebCore/bindings/scripts', 'WebCore/bindings/scripts/preprocess-idls.pl', '--idlFilesList', idl_files_list[1], '--testGlobalContextName', 'TestGlobalObject', '--defines', '', '--supplementalDependencyFile', supplemental_dependency_file, '--windowConstructorsFile', window_constructors_file, '--workerGlobalScopeConstructorsFile', workerglobalscope_constructors_file, '--dedicatedWorkerGlobalScopeConstructorsFile', dedicatedworkerglobalscope_constructors_file, '--serviceWorkerGlobalScopeConstructorsFile', serviceworkerglobalscope_constructors_file, '--workletGlobalScopeConstructorsFile', workletglobalscope_constructors_file, '--paintWorkletGlobalScopeConstructorsFile', paintworkletglobalscope_constructors_file, '--testGlobalScopeConstructorsFile', testglobalscope_constructors_file] exit_code = 0 try: output = self.executive.run_command(cmd) if output: print(output) except ScriptError as e: print(e.output) exit_code = e.exit_code os.remove(idl_files_list[1]) return exit_code
def mock_contents_at_revision(changelog_path, revision): self.assertEqual(changelog_path, "foo") self.assertEqual(revision, "bar") # contents_at_revision is expected to return a byte array (str) # so we encode our unicode ChangeLog down to a utf-8 stream. # The ChangeLog utf-8 decoding should ignore invalid codepoints. invalid_utf8 = b'\255' return encode_if_necessary(_changelog1) + invalid_utf8
def run_command(self, args, cwd=None, env=None, input=None, error_handler=None, ignore_errors=False, return_exit_code=False, return_stderr=True, decode_output=True): """Popen wrapper for convenience and to work around python bugs.""" assert(isinstance(args, list) or isinstance(args, tuple)) start_time = time.time() stdin, string_to_communicate = self._compute_stdin(input) stderr = self.STDOUT if return_stderr else None process = self.popen(args, stdin=stdin, stdout=self.PIPE, stderr=stderr, cwd=cwd, env=env, close_fds=self._should_close_fds()) with process: if not string_to_communicate: output = process.communicate()[0] else: output = process.communicate(unicode_compatibility.encode_if_necessary(string_to_communicate, 'utf-8'))[0] # run_command automatically decodes to unicode() and converts CRLF to LF unless explicitly told not to. if decode_output: output = unicode_compatibility.decode_if_necessary(output, self._child_process_encoding()).replace('\r\n', '\n') # wait() is not threadsafe and can throw OSError due to: # http://bugs.python.org/issue1731717 exit_code = process.wait() _log.debug('"%s" took %.2fs' % (self.command_for_printing(args), time.time() - start_time)) if return_exit_code: return exit_code if exit_code: script_error = ScriptError(script_args=args, exit_code=exit_code, output=output, cwd=cwd) if ignore_errors: assert error_handler is None, "don't specify error_handler if ignore_errors is True" error_handler = Executive.ignore_error (error_handler or self.default_error_handler)(script_error) return output
def _parse_leaks_output(self, leaks_output): if not leaks_output: return 0, 0, 0 leaks_output = encode_if_necessary(leaks_output) _, count, bytes = re.search( b'Process (?P<pid>\\d+): (?P<count>\\d+) leaks? for (?P<bytes>\\d+) total', leaks_output).groups() excluded_match = re.search(b'(?P<excluded>\\d+) leaks? excluded', leaks_output) excluded = excluded_match.group('excluded') if excluded_match else 0 return int(count), int(excluded), int(bytes)
def test_run_command_with_unicode(self): """Validate that it is safe to pass unicode() objects to Executive.run* methods, and they will return unicode() objects by default unless decode_output=False""" unicode_tor_input = u"WebKit \u2661 Tor Arne Vestb\u00F8!" if sys.platform.startswith('win'): encoding = 'mbcs' else: encoding = 'utf-8' encoded_tor = unicode_compatibility.encode_if_necessary( unicode_tor_input, encoding) # On Windows, we expect the unicode->mbcs->unicode roundtrip to be # lossy. On other platforms, we expect a lossless roundtrip. if sys.platform.startswith('win'): unicode_tor_output = unicode_compatibility.decode_if_necessary( encoded_tor, encoding) else: unicode_tor_output = unicode_tor_input executive = Executive() output = executive.run_command(command_line('cat'), input=unicode_tor_input) self.assertEqual(output, unicode_tor_output) output = executive.run_command(command_line('echo', unicode_tor_input)) self.assertEqual(output, unicode_tor_output) output = executive.run_command(command_line('echo', unicode_tor_input), decode_output=False) self.assertEqual(output, encoded_tor) # Make sure that str() input also works. output = executive.run_command(command_line('cat'), input=encoded_tor, decode_output=False) self.assertEqual(output, encoded_tor) # FIXME: We should only have one run* method to test output = executive.run_and_throw_if_fail(command_line( 'echo', unicode_tor_input), quiet=True) self.assertEqual(output, unicode_tor_output) output = executive.run_and_throw_if_fail(command_line( 'echo', unicode_tor_input), quiet=True, decode_output=False) self.assertEqual(output, encoded_tor)
def change_state_to(device, state): assert isinstance(state, int) # Reaching into device.plist to change device state. Note that this will not change the initial state of the device # as determined from the .json output. device_plist = device.filesystem.expanduser( device.filesystem.join( SimulatedDeviceManager.simulator_device_path, device.udid, 'device.plist')) index_position = device.filesystem.files[device_plist].index( b'</integer>') - 1 device.filesystem.files[device_plist] = device.filesystem.files[ device_plist][:index_position] + encode_if_necessary( str(state) ) + device.filesystem.files[device_plist][index_position + 1:]
def write(self, bytes, ignore_crash=False): """Write a request to the subprocess. The subprocess is (re-)start()'ed if is not already running.""" if not self._proc: self._start() try: self._proc.stdin.write(encode_if_necessary(bytes)) self._proc.stdin.flush() except (IOError, ValueError) as e: self.stop(0.0) # stop() calls _reset(), so we have to set crashed to True after calling stop() # unless we already know that this is a timeout. if not ignore_crash: _log.debug('{} because of a broken pipe when writing to stdin of the server process.'.format(self._crash_message)) self._crashed = True
def _process_stdout_line(self, block, line): for header in [ (b'Content-Type: ', 'content_type', None), (b'Content-Transfer-Encoding: ', 'encoding', None), (b'Content-Length: ', '_content_length', int), (b'ActualHash: ', 'content_hash', None), (b'DumpMalloc: ', 'malloc', None), (b'DumpJSHeap: ', 'js_heap', None), ]: if self._read_header(block, line, header[0], header[1], header[2]): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content = encode_if_necessary(block.content) + line
def _compute_stdin(self, input): """Returns (stdin, string_to_communicate)""" # FIXME: We should be returning /dev/null for stdin # or closing stdin after process creation to prevent # child processes from getting input from the user. if not input: return (None, None) if hasattr(input, "read"): # Check if the input is a file. return (input, None) # Assume the file is in the right encoding. # Popen in Python 2.5 and before does not automatically encode unicode objects. # http://bugs.python.org/issue5290 # See https://bugs.webkit.org/show_bug.cgi?id=37528 # for an example of a regresion caused by passing a unicode string directly. # FIXME: We may need to encode differently on different platforms. input = unicode_compatibility.encode_if_necessary(input, self._child_process_encoding()) return (self.PIPE, input)
def __init__(self, port_obj=None, name=None, cmd=None, env=None, universal_newlines=False, lines=None, crashed=False, target_host=None, crash_message=None): self.timed_out = False self.lines = [encode_if_necessary(line) for line in (lines or [])] self.crashed = crashed self.writes = [] self.cmd = cmd self.env = env self.started = False self.stopped = False self.number_of_times_polled = 0
def write_git_patch_file(self): _, patch_file = self._filesystem.open_binary_tempfile( 'wpt_export_patch') patch_data = self._wpt_patch if b'diff' not in patch_data: _log.info('No changes to upstream, patch data is: "{}"'.format( decode_for(patch_data, str))) return b'' # FIXME: We can probably try to use --relative git parameter to not do that replacement. patch_data = patch_data.replace( encode_if_necessary(WEBKIT_WPT_DIR) + b'/', b'') # FIXME: Support stripping of <!-- webkit-test-runner --> comments. self.has_webkit_test_runner_specific_changes = b'webkit-test-runner' in patch_data if self.has_webkit_test_runner_specific_changes: _log.warning( "Patch contains webkit-test-runner specific changes, please remove them before creating a PR" ) return b'' self._filesystem.write_binary_file(patch_file, patch_data) return patch_file
def __init__(self, files=None, dirs=None, cwd='/'): """Initializes a "mock" filesystem that can be used to completely stub out a filesystem. Args: files: a dict of filenames -> file contents. A file contents value of None is used to indicate that the file should not exist. """ self.files = { name: unicode_compatibility.encode_if_necessary(contents) for name, contents in (files or {}).items() } self.written_files = {} self.last_tmpdir = None self.current_tmpno = 0 self.cwd = cwd self.dirs = set(dirs or []) self.dirs.add(cwd) for f in self.files: d = self.dirname(f) while not d in self.dirs: self.dirs.add(d) d = self.dirname(d)
def write(self, str): WritableBinaryFileObject.write( self, unicode_compatibility.encode_if_necessary(str, 'utf-8'))
def auth_token(self): assert self.has_credentials() return decode_for( base64.b64encode( encode_if_necessary('{}:{}'.format(self.user, self.token))), str)
def command_for_printing(self, args): """Returns a print-ready string representing command args. The string should be copy/paste ready for execution in a shell.""" args = self._stringify_args(args) return unicode_compatibility.decode_if_necessary(unicode_compatibility.encode_if_necessary(' '.join(args), 'unicode_escape'))
def write_text_file(self, path, contents, errors='strict'): return self.write_binary_file( path, unicode_compatibility.encode_if_necessary(contents, 'utf-8', errors=errors))
def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while True: if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line( deadline) # ServerProcess returns None for time outs and crashes. if out_line is None and err_line is None: break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != '\n' and out_line[-1] != 10: _log.error( " %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = encode_if_necessary( self._server_process.read_stdout( deadline, block._content_length)) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = b'' # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind( '\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[: end_of_previous_error_line] else: self.error_from_test = '' # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += decode_for(err_line, str) else: self.error_from_test += decode_for(err_line, str) if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.process_name() self._crashed_pid = self._server_process.system_pid() block.decode_content() return block
def write(self, string): self.fs.files[self.path] += unicode_compatibility.encode_if_necessary( string, 'utf-8') self.fs.written_files[self.path] = self.fs.files[self.path]
def _encode_argument_if_needed(self, argument): if not self._should_encode_child_process_arguments(): return argument return unicode_compatibility.encode_if_necessary(argument, self._child_process_encoding())
def propget(self, pname, path): dir, base = os.path.split(path) return encode_if_necessary( self._run_svn(['pget', pname, base], cwd=dir).rstrip("\n"))
def test_auth_token(self): self.assertEqual( self.wpt_github.auth_token(), decode_for( base64.encodestring(encode_if_necessary('rutabaga:decafbad')), str).strip())