def test_artifacts(self): bundle_file = self.create_artifact_bundle() blob1 = FileBlob.from_file(ContentFile(bundle_file)) total_checksum = sha1(bundle_file).hexdigest() assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) status, details = get_assemble_status(AssembleTask.ARTIFACTS, self.organization.id, total_checksum) assert status == ChunkFileState.OK assert details is None release_file = ReleaseFile.objects.get(organization=self.organization, release=self.release, name="~/index.js", dist=None) assert release_file assert release_file.file.headers == {"Sourcemap": "index.js.map"}
def test_artifacts(self): bundle_file = self.create_artifact_bundle() blob1 = FileBlob.from_file(ContentFile(bundle_file)) total_checksum = sha1(bundle_file).hexdigest() for min_files in (10, 1): with self.options({ "processing.release-archive-min-files": min_files, }): ReleaseFile.objects.filter(release_id=self.release.id).delete() assert self.release.count_artifacts() == 0 assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) assert self.release.count_artifacts() == 2 status, details = get_assemble_status(AssembleTask.ARTIFACTS, self.organization.id, total_checksum) assert status == ChunkFileState.OK assert details is None if min_files == 1: # An archive was saved index = read_artifact_index(self.release, dist=None) archive_ident = index["files"]["~/index.js"][ "archive_ident"] releasefile = ReleaseFile.objects.get( release_id=self.release.id, ident=archive_ident) # Artifact is the same as original bundle assert releasefile.file.size == len(bundle_file) else: # Individual files were saved release_file = ReleaseFile.objects.get( organization_id=self.organization.id, release_id=self.release.id, name="~/index.js", dist_id=None, ) assert release_file.file.headers == { "Sourcemap": "index.js.map" }
def test_artifacts_invalid_zip(self): bundle_file = b'' blob1 = FileBlob.from_file(ContentFile(bundle_file)) total_checksum = sha1(bundle_file).hexdigest() assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) status, details = get_assemble_status(AssembleTask.ARTIFACTS, self.organization.id, total_checksum) assert status == ChunkFileState.ERROR
def test_assemble_response(self): bundle_file = self.create_artifact_bundle() total_checksum = sha1(bundle_file).hexdigest() blob1 = FileBlob.from_file(ContentFile(bundle_file)) assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) response = self.client.post( self.url, data={"checksum": total_checksum, "chunks": [blob1.checksum]}, HTTP_AUTHORIZATION=u"Bearer {}".format(self.token.token), ) assert response.status_code == 200, response.content assert response.data["state"] == ChunkFileState.OK
def test_failing_update(self, _): bundle_file = self.create_artifact_bundle() blob1 = FileBlob.from_file(ContentFile(bundle_file)) total_checksum = sha1(bundle_file).hexdigest() with self.options({ "processing.save-release-archives": True, "processing.release-archive-min-files": 1, }): assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) # Status is still OK: status, details = get_assemble_status(AssembleTask.ARTIFACTS, self.organization.id, total_checksum) assert status == ChunkFileState.OK
def test_dif_error_response(self): bundle_file = b'invalid' total_checksum = sha1(bundle_file).hexdigest() blob1 = FileBlob.from_file(ContentFile(bundle_file)) assemble_artifacts( org_id=self.organization.id, version=self.release.version, checksum=total_checksum, chunks=[blob1.checksum], ) response = self.client.post( self.url, data={ 'checksum': total_checksum, 'chunks': [blob1.checksum], }, HTTP_AUTHORIZATION=u'Bearer {}'.format(self.token.token) ) assert response.status_code == 200, response.content assert response.data['state'] == ChunkFileState.ERROR