def __init__(self, stdout='MOCK STDOUT\n', stderr=''): self.pid = 42 self.stdout = BytesIO(encode_if_necessary(stdout)) self.stderr = BytesIO(encode_if_necessary(stderr)) self.stdin = BytesIO() self.returncode = 0 self._is_running = False
def _file_object_for_upload(self, file_or_string): if hasattr(file_or_string, 'read'): return file_or_string # Only if file_or_string is not already encoded do we want to encode it. if isinstance(file_or_string, unicode): file_or_string = file_or_string.encode('utf-8') return BytesIO(file_or_string)
def __init__(self, in1, in2): if isinstance(in1, str): waveFile1 = wave.open(StringIO(in1), 'r') else: waveFile1 = wave.open(BytesIO(in1), 'rb') if isinstance(in2, str): waveFile2 = wave.open(StringIO(in2), 'r') else: waveFile2 = wave.open(BytesIO(in2), 'rb') params1 = waveFile1.getparams() params2 = waveFile2.getparams() self._diff = '' self._filesAreIdentical = not sum(map(self._diffParam, params1, params2, self._paramNames)) self._filesAreIdenticalWithinTolerance = self._filesAreIdentical if not self._filesAreIdentical: return # Metadata is identical, compare the content now. channelCount1 = waveFile1.getnchannels() frameCount1 = waveFile1.getnframes() sampleWidth1 = waveFile1.getsampwidth() channelCount2 = waveFile2.getnchannels() frameCount2 = waveFile2.getnframes() sampleWidth2 = waveFile2.getsampwidth() allData1 = self._readSamples(waveFile1, sampleWidth1, frameCount1 * channelCount1) allData2 = self._readSamples(waveFile2, sampleWidth2, frameCount2 * channelCount2) results = list(map(self._diffSample, allData1, allData2, range(max(frameCount1 * channelCount1, frameCount2 * channelCount2)))) cumulativeSampleDiff = sum(results) differingSampleCount = len(list(filter(bool, results))) self._filesAreIdentical = not differingSampleCount self._filesAreIdenticalWithinTolerance = not len(list(filter(lambda x: x > self._tolerance, results))) if differingSampleCount: self._diff += '\n' self._diff += 'Total differing samples: %d\n' % differingSampleCount self._diff += 'Percentage of differing samples: %0.3f%%\n' % (100 * float(differingSampleCount) / max(frameCount1, frameCount2)) self._diff += 'Cumulative sample difference: %d\n' % cumulativeSampleDiff self._diff += 'Average sample difference: %f\n' % (float(cumulativeSampleDiff) / differingSampleCount)
def _log_from_script_error_for_upload(cls, script_error, output_limit=None): # We have seen request timeouts with app engine due to large # log uploads. Trying only the last 512k. if not output_limit: output_limit = 512 * 1024 # 512k output = script_error.message_with_output(output_limit=output_limit) # We pre-encode the string to a byte array before passing it # to status_server, because ClientForm (part of mechanize) # wants a file-like object with pre-encoded data. return BytesIO(output.encode("utf-8"))
def diff_image(self, expected_contents, actual_contents, tolerance): if tolerance != self._tolerance or ( self._process and self._process.has_available_stdout()): self.stop() try: assert (expected_contents) assert (actual_contents) assert (tolerance is not None) if not self._process: self._start(tolerance) # Note that although we are handed 'old', 'new', ImageDiff wants 'new', 'old'. buffer = BytesIO() buffer.write( encode_if_necessary('Content-Length: {}\n'.format( len(actual_contents)))) buffer.write(actual_contents) buffer.write( encode_if_necessary('Content-Length: {}\n'.format( len(expected_contents)))) buffer.write(expected_contents) self._process.write(buffer.getvalue()) return self._read() except IOError as exception: return (None, 0, "Failed to compute an image diff: %s" % str(exception))
def test_archive_upload(self): upload = Upload( suite='webkitpy-tests', commits=[Upload.create_commit( repository_id='webkit', id='5', branch='trunk', )], ) with mock.patch('requests.post', new=lambda url, headers={}, data={}, files={}, verify=True: self.MockResponse()): self.assertTrue(upload.upload_archive('https://results.webkit.org', archive=BytesIO(b'content'), log_line_func=lambda _: None)) with mock.patch('requests.post', new=lambda url, headers={}, data={}, files={}, verify=True: self.raise_requests_ConnectionError()): lines = [] self.assertTrue(upload.upload_archive('https://results.webkit.org', archive=BytesIO(b'content'), log_line_func=lambda line: lines.append(line))) self.assertEqual([ ' ' * 4 + 'Failed to upload test archive to https://results.webkit.org, results server dropped connection, likely due to archive size (0.0 MB).', ' ' * 4 + 'This error is not fatal, continuing' ], lines) mock_404 = mock.patch('requests.post', new=lambda url, headers={}, data={}, files={}, verify=True: self.MockResponse( status_code=404, text=json.dumps(dict(description='No such address')), )) with mock_404: lines = [] self.assertFalse(upload.upload_archive('https://results.webkit.org', archive='content', log_line_func=lambda line: lines.append(line))) self.assertEqual([ ' ' * 4 + 'Error uploading archive to https://results.webkit.org', ' ' * 8 + 'No such address', ], lines) mock_413 = mock.patch('requests.post', new=lambda url, headers={}, data={}, files={}, verify=True: self.MockResponse( status_code=413, text=json.dumps(dict(description='Request Entity Too Large')), )) with mock_413: lines = [] self.assertTrue(upload.upload_archive('https://results.webkit.org', archive='content', log_line_func=lambda line: lines.append(line))) self.assertEqual([ ' ' * 4 + 'Upload to https://results.webkit.org failed:', ' ' * 8 + 'Request Entity Too Large', ' ' * 4 + 'This error is not fatal, continuing', ], lines)