def test_unicode(self): project = self.project release = Release.objects.create( project=project, version='abc', ) file = File.objects.create( name='file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) file.putfile(six.BytesIO(unicode_body.encode('utf-8'))) ReleaseFile.objects.create( name='file.min.js', release=release, project=project, file=file, ) result = fetch_release_file('file.min.js', release) assert type(result[1]) is six.text_type assert result == ( {'Content-Type': 'application/json; charset=utf-8'}, unicode_body, 200 ) # test with cache hit, which should be compressed new_result = fetch_release_file('file.min.js', release) assert result == new_result
def test_unicode(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) binary_body = unicode_body.encode("utf-8") file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create(name="file.min.js", release=release, organization_id=project.organization_id, file=file) result = fetch_release_file("file.min.js", release) assert isinstance(result.body, six.binary_type) assert result == http.UrlResult( "file.min.js", {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) # test with cache hit, which should be compressed new_result = fetch_release_file("file.min.js", release) assert result == new_result
def test_unicode(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) binary_body = unicode_body.encode("utf-8") file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create(name="file.min.js", release=release, organization_id=project.organization_id, file=file) result = fetch_release_file("file.min.js", release) assert isinstance(result.body, six.binary_type) assert result == http.UrlResult( "file.min.js", {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) # looking again should hit the cache - make sure it's come through the # caching/uncaching process unscathed new_result = fetch_release_file("file.min.js", release) assert result == new_result
def test_distribution(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version='abc', ) release.add_project(project) other_file = File.objects.create( name='file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) file = File.objects.create( name='file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) binary_body = unicode_body.encode('utf-8') other_file.putfile(six.BytesIO(b'')) file.putfile(six.BytesIO(binary_body)) dist = release.add_dist('foo') ReleaseFile.objects.create( name='file.min.js', release=release, organization_id=project.organization_id, file=other_file, ) ReleaseFile.objects.create( name='file.min.js', release=release, dist=dist, organization_id=project.organization_id, file=file, ) result = fetch_release_file('file.min.js', release, dist) assert isinstance(result.body, six.binary_type) assert result == http.UrlResult( 'file.min.js', {'content-type': 'application/json; charset=utf-8'}, binary_body, 200, 'utf-8', ) # test with cache hit, which should be compressed new_result = fetch_release_file('file.min.js', release, dist) assert result == new_result
def test_distribution(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) foo_file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) foo_file.putfile(six.BytesIO("foo")) foo_dist = release.add_dist("foo") ReleaseFile.objects.create( name="file.min.js", release=release, dist=foo_dist, organization_id=project.organization_id, file=foo_file, ) bar_file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) bar_file.putfile(six.BytesIO("bar")) bar_dist = release.add_dist("bar") ReleaseFile.objects.create( name="file.min.js", release=release, dist=bar_dist, organization_id=project.organization_id, file=bar_file, ) foo_result = fetch_release_file("file.min.js", release, foo_dist) assert isinstance(foo_result.body, six.binary_type) assert foo_result == http.UrlResult( "file.min.js", {"content-type": "application/json; charset=utf-8"}, "foo", 200, "utf-8") # test that cache pays attention to dist value as well as name bar_result = fetch_release_file("file.min.js", release, bar_dist) # result is cached, but that's not what we should find assert bar_result != foo_result assert bar_result == http.UrlResult( "file.min.js", {"content-type": "application/json; charset=utf-8"}, "bar", 200, "utf-8")
def test_fallbacks(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version='abc', ) release.add_project(project) file = File.objects.create( name='~/file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) binary_body = unicode_body.encode('utf-8') file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create( name='~/file.min.js', release=release, organization_id=project.organization_id, file=file, ) result = fetch_release_file('http://example.com/file.min.js?lol', release) assert isinstance(result.body, six.binary_type) assert result == http.UrlResult( 'http://example.com/file.min.js?lol', {'content-type': 'application/json; charset=utf-8'}, binary_body, 200, 'utf-8', )
def test_tilde(self): project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) file = File.objects.create( name="~/file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) binary_body = unicode_body.encode("utf-8") file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create( name="~/file.min.js", release=release, organization_id=project.organization_id, file=file, ) result = fetch_release_file("http://example.com/file.min.js?lol", release) assert isinstance(result.body, six.binary_type) assert result == http.UrlResult( "http://example.com/file.min.js?lol", {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", )
def test_unicode(self): project = self.project release = Release.objects.create( project=project, organization_id=project.organization_id, version='abc', ) release.add_project(project) file = File.objects.create( name='file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) binary_body = unicode_body.encode('utf-8') file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create( name='file.min.js', release=release, project=project, organization_id=project.organization_id, file=file, ) result = fetch_release_file('file.min.js', release) assert type(result[1]) is six.binary_type assert result == ( { 'content-type': 'application/json; charset=utf-8' }, binary_body, 200, 'utf-8', ) # test with cache hit, which should be compressed new_result = fetch_release_file('file.min.js', release) assert result == new_result
def test_retry_file_open(self) -> None: project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) content = b"foo" file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) file.putfile(BytesIO(content)) ReleaseFile.objects.create( name=file.name, release=release, organization_id=project.organization_id, file=file, ) stale_file_error = OSError() stale_file_error.errno = errno.ESTALE bad_file = MagicMock() bad_file.chunks.side_effect = stale_file_error bad_file_reader = MagicMock() bad_file_reader.__enter__.return_value = bad_file good_file = MagicMock() good_file.chunks.return_value = iter([content]) good_file_reader = MagicMock() good_file_reader.__enter__.return_value = good_file with patch( "sentry.lang.javascript.processor.ReleaseFile.cache") as cache: cache.getfile.side_effect = [bad_file_reader, good_file_reader] assert fetch_release_file(file.name, release) == http.UrlResult( file.name, {k.lower(): v.lower() for k, v in file.headers.items()}, content, 200, "utf-8", ) assert bad_file.chunks.call_count == 1 assert good_file.chunks.call_count == 1
def test_unicode(self): project = self.project release = Release.objects.create( project=project, organization_id=project.organization_id, version='abc', ) release.add_project(project) file = File.objects.create( name='file.min.js', type='release.file', headers={'Content-Type': 'application/json; charset=utf-8'}, ) binary_body = unicode_body.encode('utf-8') file.putfile(six.BytesIO(binary_body)) ReleaseFile.objects.create( name='file.min.js', release=release, project=project, organization_id=project.organization_id, file=file, ) result = fetch_release_file('file.min.js', release) assert type(result[1]) is six.binary_type assert result == ( {'content-type': 'application/json; charset=utf-8'}, binary_body, 200, 'utf-8', ) # test with cache hit, which should be compressed new_result = fetch_release_file('file.min.js', release) assert result == new_result
def test_caching(self): # Set the threshold to zero to force caching on the file system options.set("releasefile.cache-limit", 0) project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) file = File.objects.create( name="file.min.js", type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) binary_body = unicode_body.encode("utf-8") file.putfile(BytesIO(binary_body)) ReleaseFile.objects.create( name="file.min.js", release_id=release.id, organization_id=project.organization_id, file=file, ) result = fetch_release_file("file.min.js", release) assert isinstance(result.body, bytes) assert result == http.UrlResult( "file.min.js", {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) # test with cache hit, coming from the FS new_result = fetch_release_file("file.min.js", release) assert result == new_result
def test_compression(self, mock_compress_file): """ For files larger than max memcached payload size we want to avoid pointless compression and caching attempt since it fails silently. Tests scenarios: - happy path where compressed file is successfully cached - compressed payload is too large to cache and we will avoid compression and caching while the metadata cache exists """ project = self.project release = Release.objects.create( organization_id=project.organization_id, version="abc") release.add_project(project) filename = "file.min.js" file = File.objects.create( name=filename, type="release.file", headers={"Content-Type": "application/json; charset=utf-8"}, ) binary_body = unicode_body.encode("utf-8") file.putfile(BytesIO(binary_body)) ReleaseFile.objects.create(name="file.min.js", release=release, organization_id=project.organization_id, file=file) mock_compress_file.return_value = (binary_body, binary_body) releasefile_ident = ReleaseFile.get_ident(filename, None) cache_key = get_release_file_cache_key( release_id=release.id, releasefile_ident=releasefile_ident) cache_key_meta = get_release_file_cache_key_meta( release_id=release.id, releasefile_ident=releasefile_ident) fetch_release_file(filename, release) # Here the ANY is File() retrieved from cache/db assert mock_compress_file.mock_calls == [call(ANY)] assert cache.get(cache_key_meta)["compressed_size"] == len(binary_body) assert cache.get(cache_key) # Remove cache and check that calling fetch_release_file will do the # compression and caching again cache.set(cache_key, None) mock_compress_file.reset_mock() fetch_release_file(filename, release) assert mock_compress_file.mock_calls == [call(ANY)] assert cache.get(cache_key_meta)["compressed_size"] == len(binary_body) assert cache.get(cache_key) # If the file is bigger than the max cache value threshold, avoid # compression and caching cache.set(cache_key, None) mock_compress_file.reset_mock() with patch("sentry.lang.javascript.processor.CACHE_MAX_VALUE_SIZE", len(binary_body) - 1): result = fetch_release_file(filename, release) assert result == http.UrlResult( filename, {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) assert mock_compress_file.mock_calls == [] assert cache.get(cache_key_meta)["compressed_size"] == len(binary_body) assert cache.get(cache_key) is None # If the file is bigger than the max cache value threshold, but the # metadata cache is empty as well, compress and attempt to cache anyway cache.set(cache_key, None) cache.set(cache_key_meta, None) mock_compress_file.reset_mock() with patch("sentry.lang.javascript.processor.CACHE_MAX_VALUE_SIZE", len(binary_body) - 1): result = fetch_release_file(filename, release) assert result == http.UrlResult( filename, {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) assert mock_compress_file.mock_calls == [call(ANY)] assert cache.get(cache_key_meta)["compressed_size"] == len(binary_body) assert cache.get(cache_key) # If the file is smaller than the max cache value threshold, but the # cache is empty, compress and cache cache.set(cache_key, None) mock_compress_file.reset_mock() with patch("sentry.lang.javascript.processor.CACHE_MAX_VALUE_SIZE", len(binary_body) + 1): result = fetch_release_file(filename, release) assert result == http.UrlResult( filename, {"content-type": "application/json; charset=utf-8"}, binary_body, 200, "utf-8", ) assert mock_compress_file.mock_calls == [call(ANY)] assert cache.get(cache_key_meta)["compressed_size"] == len(binary_body) assert cache.get(cache_key)