def __init__(self, stdout='MOCK STDOUT\n', stderr=''): self.pid = 42 self.stdout = BytesIO(string_utils.encode(stdout)) self.stderr = BytesIO(string_utils.encode(stderr)) self.stdin = BytesIO() self.returncode = 0 self._is_running = False
def diff_image(self, expected_contents, actual_contents, tolerance): if tolerance != self._tolerance or ( self._process and self._process.has_available_stdout()): self.stop() try: assert (expected_contents) assert (actual_contents) assert (tolerance is not None) if not self._process: self._start(tolerance) # Note that although we are handed 'old', 'new', ImageDiff wants 'new', 'old'. buffer = BytesIO() buffer.write( string_utils.encode('Content-Length: {}\n'.format( len(actual_contents)))) buffer.write(actual_contents) buffer.write( string_utils.encode('Content-Length: {}\n'.format( len(expected_contents)))) buffer.write(expected_contents) self._process.write(buffer.getvalue()) return self._read() except IOError as exception: return (None, 0, "Failed to compute an image diff: %s" % str(exception))
def load_ews_classes(cls): filesystem = FileSystem() json_path = filesystem.join(filesystem.dirname(filesystem.path_to_module('webkitpy.common.config')), 'ews.json') try: ewses = json.loads(filesystem.read_text_file(json_path)) except ValueError: return None classes = [] for name, config in ewses.items(): if sys.version_info > (3, 0): translated = string_utils.encode(name, target_type=str).translate(' -') else: translated = string_utils.encode(name, target_type=str).translate(None, ' -') classes.append(type(translated, (cls,), { 'name': config.get('name', config['port'] + '-ews'), 'port_name': config['port'], 'architecture': config.get('architecture', None), '_build_style': config.get('style', "release"), 'watchers': config.get('watchers', []), 'run_tests': config.get('runTests', cls.run_tests), '_group': config.get('group', None), 'should_build': config.get('shouldBuild', True), })) return classes
def prepend_svn_revision(self, diff): revision = self.head_svn_revision() if not revision: return diff return string_utils.encode( "Subversion Revision: ") + string_utils.encode( revision) + string_utils.encode('\n') + string_utils.encode( diff)
def error_hash(self): # This is a device-independent hash identifying the suppression. # By printing out this hash we can find duplicate reports between tests and # different shards running on multiple buildbots return int( hashlib.md5(string_utils.encode( self.unique_string())).hexdigest()[:16], 16)
def run_and_throw_if_fail(self, args, quiet=False, decode_output=True, **kwargs): # Cache the child's output locally so it can be used for error reports. child_out_file = StringIO() tee_stdout = sys.stdout try: if quiet: dev_null = open(os.devnull, "w") # FIXME: Does this need an encoding? tee_stdout = dev_null child_stdout = Tee(child_out_file, tee_stdout) exit_code = self._run_command_with_teed_output( args, child_stdout, **kwargs) finally: if quiet: dev_null.close() child_output = child_out_file.getvalue() child_out_file.close() if decode_output: child_output = string_utils.decode( child_output, encoding=self._child_process_encoding()) else: child_output = string_utils.encode( child_output, encoding=self._child_process_encoding()) if exit_code: raise ScriptError(script_args=args, exit_code=exit_code, output=child_output) return child_output
def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = string_utils.decode(text, target_type=str) if text else None self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = string_utils.encode( audio) if audio else None # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid
def diff_image(self, expected_contents, actual_contents, tolerance=None): expected_contents = string_utils.encode(expected_contents) actual_contents = string_utils.encode(actual_contents) diffed = actual_contents != expected_contents if not actual_contents and not expected_contents: return (None, 0, None) if not actual_contents or not expected_contents: return (True, 0, None) if b'ref' in expected_contents: assert tolerance == 0 if diffed: return ("< {}\n---\n> {}\n".format( string_utils.decode(expected_contents, target_type=str), string_utils.decode(actual_contents, target_type=str), ), 1, None) return (None, 0, None)
def create_crash_logs_darwin(self): if not SystemHost().platform.is_mac(): return self.older_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28528) self.sandbox_crash_report = make_mock_sandbox_report_darwin('DumpRenderTree', 28530) self.mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28530) self.newer_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28529) self.other_process_mock_crash_report = make_mock_crash_report_darwin('FooProcess', 28527) self.misformatted_mock_crash_report = 'Junk that should not appear in a crash report' + make_mock_crash_report_darwin('DumpRenderTree', 28526)[200:] self.files = {} self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150715_quadzen.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150716_quadzen_1.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150717_quadzen_2.crash'] = self.older_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150718_quadzen.crash'] = self.sandbox_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150719_quadzen.crash'] = self.mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150720_quadzen.crash'] = self.newer_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150721_quadzen.crash'] = None self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150722_quadzen.crash'] = self.other_process_mock_crash_report self.files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150723_quadzen.crash'] = self.misformatted_mock_crash_report self.files = {key: string_utils.encode(value) for key, value in self.files.items()} self.filesystem = MockFileSystem(self.files) crash_logs = CrashLogs(MockSystemHost(filesystem=self.filesystem), CrashLogsTest.DARWIN_MOCK_CRASH_DIRECTORY) logs = self.filesystem.files_under('/Users/mock/Library/Logs/DiagnosticReports/') for path in reversed(sorted(logs)): self.assertTrue(path in self.files.keys()) return crash_logs
def mock_contents_at_revision(changelog_path, revision): self.assertEqual(changelog_path, "foo") self.assertEqual(revision, "bar") # contents_at_revision is expected to return a byte array (str) # so we encode our unicode ChangeLog down to a utf-8 stream. # The ChangeLog utf-8 decoding should ignore invalid codepoints. invalid_utf8 = b'\255' return string_utils.encode(_changelog1) + invalid_utf8
def test_unicode(self): self.assertEqual( str( Contributor(u'Michael Br\u00fcning', ['*****@*****.**'])), string_utils.encode( u'Michael Br\u00fcning <*****@*****.**>', target_type=str), )
def run_command(self, args, cwd=None, env=None, input=None, stdout=subprocess.PIPE, error_handler=None, ignore_errors=False, return_exit_code=False, return_stderr=True, decode_output=True): """Popen wrapper for convenience and to work around python bugs.""" assert(isinstance(args, list) or isinstance(args, tuple)) start_time = time.time() stdin, string_to_communicate = self._compute_stdin(input) stderr = self.STDOUT if return_stderr else None process = self.popen(args, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd, env=env, close_fds=self._should_close_fds()) with process: if not string_to_communicate: output = process.communicate()[0] else: output = process.communicate(string_utils.encode(string_to_communicate, encoding='utf-8'))[0] # run_command automatically decodes to unicode() and converts CRLF to LF unless explicitly told not to. if decode_output: output = string_utils.decode(output, encoding=self._child_process_encoding()).replace('\r\n', '\n') # wait() is not threadsafe and can throw OSError due to: # http://bugs.python.org/issue1731717 exit_code = process.wait() _log.debug('"%s" took %.2fs' % (self.command_for_printing(args), time.time() - start_time)) if return_exit_code: return exit_code if exit_code: script_error = ScriptError(script_args=args, exit_code=exit_code, output=output, cwd=cwd) if ignore_errors: assert error_handler is None, "don't specify error_handler if ignore_errors is True" error_handler = Executive.ignore_error (error_handler or self.default_error_handler)(script_error) return output
def read_checksum(filehandle): # We expect the comment to be at the beginning of the file. data = string_utils.encode(filehandle.read(2048)) comment_key = b'tEXtchecksum\x00' comment_pos = data.find(comment_key) if comment_pos == -1: return checksum_pos = comment_pos + len(comment_key) return string_utils.decode(data[checksum_pos:checksum_pos + 32], target_type=str)
def generate_supplemental_dependency( self, input_directory, supplemental_dependency_file, supplemental_makefile_dependency_file, window_constructors_file, workerglobalscope_constructors_file, dedicatedworkerglobalscope_constructors_file, serviceworkerglobalscope_constructors_file, workletglobalscope_constructors_file, paintworkletglobalscope_constructors_file, testglobalscope_constructors_file): idl_files_list = tempfile.mkstemp() for input_file in os.listdir(input_directory): (name, extension) = os.path.splitext(input_file) if extension != '.idl': continue if name.endswith('Constructors'): continue os.write( idl_files_list[0], string_utils.encode( os.path.join(input_directory, input_file) + "\n")) os.close(idl_files_list[0]) cmd = [ 'perl', '-w', '-IWebCore/bindings/scripts', 'WebCore/bindings/scripts/preprocess-idls.pl', '--idlFileNamesList', idl_files_list[1], '--testGlobalContextName', 'TestGlobalObject', '--defines', '', '--idlAttributesFile', 'WebCore/bindings/scripts/IDLAttributes.json', '--supplementalDependencyFile', supplemental_dependency_file, '--supplementalMakefileDeps', supplemental_makefile_dependency_file, '--windowConstructorsFile', window_constructors_file, '--workerGlobalScopeConstructorsFile', workerglobalscope_constructors_file, '--dedicatedWorkerGlobalScopeConstructorsFile', dedicatedworkerglobalscope_constructors_file, '--serviceWorkerGlobalScopeConstructorsFile', serviceworkerglobalscope_constructors_file, '--workletGlobalScopeConstructorsFile', workletglobalscope_constructors_file, '--paintWorkletGlobalScopeConstructorsFile', paintworkletglobalscope_constructors_file, '--testGlobalScopeConstructorsFile', testglobalscope_constructors_file, '--validateAgainstParser' ] exit_code = 0 try: output = self.executive.run_command(cmd) if output: print(output) except ScriptError as e: print(e.output) exit_code = e.exit_code os.remove(idl_files_list[1]) return exit_code
def _parse_leaks_output(self, leaks_output): if not leaks_output: return 0, 0, 0 leaks_output = string_utils.encode(leaks_output) _, count, bytes = re.search( b'Process (?P<pid>\\d+): (?P<count>\\d+) leaks? for (?P<bytes>\\d+) total', leaks_output).groups() excluded_match = re.search(b'(?P<excluded>\\d+) leaks? excluded', leaks_output) excluded = excluded_match.group('excluded') if excluded_match else 0 return int(count), int(excluded), int(bytes)
def communicate(self, input=None, timeout=None): if self._communication_started and input: raise ValueError( 'Cannot send input after starting communication') self._communication_started = True if input: self.stdin.write(string_utils.encode(input)) self.wait(timeout=timeout) return self.stdout.read( ) if self.stdout else None, self.stderr.read( ) if self.stderr else None
def change_state_to(device, state): assert isinstance(state, int) # Reaching into device.plist to change device state. Note that this will not change the initial state of the device # as determined from the .json output. device_plist = device.filesystem.expanduser( device.filesystem.join( SimulatedDeviceManager.simulator_device_path, device.udid, 'device.plist')) index_position = device.filesystem.files[device_plist].index( b'</integer>') - 1 device.filesystem.files[device_plist] = device.filesystem.files[ device_plist][:index_position] + string_utils.encode( str(state) ) + device.filesystem.files[device_plist][index_position + 1:]
def write(self, bytes, ignore_crash=False): """Write a request to the subprocess. The subprocess is (re-)start()'ed if is not already running.""" if not self._proc: self._start() try: self._proc.stdin.write(string_utils.encode(bytes)) self._proc.stdin.flush() except (IOError, ValueError): self.stop(0.0) # stop() calls _reset(), so we have to set crashed to True after calling stop() # unless we already know that this is a timeout. if not ignore_crash: _log.debug('{} because of a broken pipe when writing to stdin of the server process.'.format(self._crash_message)) self._crashed = True
def _process_stdout_line(self, block, line): for header in [ (b'Content-Type: ', 'content_type', None), (b'Content-Transfer-Encoding: ', 'encoding', None), (b'Content-Length: ', '_content_length', int), (b'ActualHash: ', 'content_hash', None), (b'DumpMalloc: ', 'malloc', None), (b'DumpJSHeap: ', 'js_heap', None), ]: if self._read_header(block, line, header[0], header[1], header[2]): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content = string_utils.encode(block.content) + line
def test_run_command_with_unicode(self): """Validate that it is safe to pass unicode() objects to Executive.run* methods, and they will return unicode() objects by default unless decode_output=False""" unicode_tor_input = u"WebKit \u2661 Tor Arne Vestb\u00F8!" if sys.platform.startswith('win'): encoding = 'mbcs' else: encoding = 'utf-8' encoded_tor = string_utils.encode(unicode_tor_input, encoding=encoding) # On Windows, we expect the unicode->mbcs->unicode roundtrip to be # lossy. On other platforms, we expect a lossless roundtrip. if sys.platform.startswith('win'): unicode_tor_output = string_utils.decode(encoded_tor, encoding=encoding) else: unicode_tor_output = unicode_tor_input executive = Executive() output = executive.run_command(command_line('cat'), input=unicode_tor_input) self.assertEqual(output, unicode_tor_output) output = executive.run_command(command_line('echo', unicode_tor_input)) self.assertEqual(output, unicode_tor_output) output = executive.run_command(command_line('echo', unicode_tor_input), decode_output=False) self.assertEqual(output, encoded_tor) # Make sure that str() input also works. output = executive.run_command(command_line('cat'), input=encoded_tor, decode_output=False) self.assertEqual(output, encoded_tor) # FIXME: We should only have one run* method to test output = executive.run_and_throw_if_fail(command_line( 'echo', unicode_tor_input), quiet=True) self.assertEqual(output, unicode_tor_output) output = executive.run_and_throw_if_fail(command_line( 'echo', unicode_tor_input), quiet=True, decode_output=False) self.assertEqual(output, encoded_tor)
def _compute_stdin(self, input): """Returns (stdin, string_to_communicate)""" # FIXME: We should be returning /dev/null for stdin # or closing stdin after process creation to prevent # child processes from getting input from the user. if not input: return (None, None) if hasattr(input, "read"): # Check if the input is a file. return (input, None) # Assume the file is in the right encoding. # Popen in Python 2.5 and before does not automatically encode unicode objects. # http://bugs.python.org/issue5290 # See https://bugs.webkit.org/show_bug.cgi?id=37528 # for an example of a regresion caused by passing a unicode string directly. # FIXME: We may need to encode differently on different platforms. input = string_utils.encode(input, encoding=self._child_process_encoding()) return (self.PIPE, input)
def __init__(self, port_obj=None, name=None, cmd=None, env=None, universal_newlines=False, lines=None, crashed=False, target_host=None, crash_message=None): self.timed_out = False self.lines = [string_utils.encode(line) for line in (lines or [])] self.crashed = crashed self.writes = [] self.cmd = cmd self.env = env self.started = False self.stopped = False self.number_of_times_polled = 0
def _xvfb_read_display_id(self, read_fd): import errno import select fd_set = [read_fd] while fd_set: try: fd_list = select.select(fd_set, [], [])[0] except select.error as e: if e.args[0] == errno.EINTR: continue raise if read_fd in fd_list: # We only expect a number, so first read should be enough. display_id = os.read(read_fd, 256).strip(string_utils.encode('\n')) fd_set = [] return int(display_id)
def __init__(self, *args, **kwargs): completion = kwargs.pop('completion', ProcessCompletion()) cwd = kwargs.pop('cwd', None) input = kwargs.pop('input', None) generator = kwargs.pop('generator', None) if kwargs.keys(): raise TypeError( '__init__() got an unexpected keyword argument {}'.format( kwargs.keys()[0])) if isinstance(args, str) or isinstance(args, unicode): self.args = [args] elif not args: raise ValueError( 'Arguments must be provided to a CommandRoute') else: self.args = args self.generator = generator or (lambda *args, **kwargs: completion) self.cwd = cwd self.input = string_utils.encode(input) if input else None
def write_git_patch_file(self): _, patch_file = self._filesystem.open_binary_tempfile( 'wpt_export_patch') patch_data = self._wpt_patch if b'diff' not in patch_data: _log.info('No changes to upstream, patch data is: "{}"'.format( string_utils.decode(patch_data, target_type=str))) return b'' # FIXME: We can probably try to use --relative git parameter to not do that replacement. patch_data = patch_data.replace( string_utils.encode(WEBKIT_WPT_DIR) + b'/', b'') # FIXME: Support stripping of <!-- webkit-test-runner --> comments. self.has_webkit_test_runner_specific_changes = b'webkit-test-runner' in patch_data if self.has_webkit_test_runner_specific_changes: _log.warning( "Patch contains webkit-test-runner specific changes, please remove them before creating a PR" ) return b'' self._filesystem.write_binary_file(patch_file, patch_data) return patch_file
def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while True: if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line( deadline) # ServerProcess returns None for time outs and crashes. if out_line is None and err_line is None: break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != '\n' and out_line[-1] != 10: _log.error( " %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = string_utils.encode( self._server_process.read_stdout( deadline, block._content_length)) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = b'' # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind( '\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[: end_of_previous_error_line] else: self.error_from_test = '' # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += string_utils.decode( err_line, target_type=str) else: self.error_from_test += string_utils.decode( err_line, target_type=str) if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.process_name() self._crashed_pid = self._server_process.system_pid() block.decode_content() return block
def test_auth_token(self): self.assertEqual( self.wpt_github.auth_token(), string_utils.decode(base64.b64encode( string_utils.encode('rutabaga:decafbad')), target_type=str).strip())
def _encode_argument_if_needed(self, argument): if not self._should_encode_child_process_arguments(): return argument return string_utils.encode(argument, encoding=self._child_process_encoding())
def __str__(self): return string_utils.encode(u'"{}" <{}>'.format(unicode(self.full_name), unicode( self.emails[0])), target_type=str)
def __init__(self, returncode=None, stdout=None, stderr=None, elapsed=0): self.returncode = 1 if returncode is None else returncode self.stdout = string_utils.encode(stdout) if stdout else b'' self.stderr = string_utils.encode(stderr) if stderr else b'' self.elapsed = elapsed