def receive_artifact(self, chunk): """Handles assembling of Manifest as it's being uploaded.""" with NamedTemporaryFile("ab") as temp_file: size = 0 hashers = {} for algorithm in Artifact.DIGEST_FIELDS: hashers[algorithm] = getattr(hashlib, algorithm)() while True: subchunk = chunk.read(2000000) if not subchunk: break temp_file.write(subchunk) size += len(subchunk) for algorithm in Artifact.DIGEST_FIELDS: hashers[algorithm].update(subchunk) temp_file.flush() digests = {} for algorithm in Artifact.DIGEST_FIELDS: digests[algorithm] = hashers[algorithm].hexdigest() artifact = Artifact(file=temp_file.name, size=size, **digests) try: artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) return artifact
def _save_artifact_blocking(artifact_attributes): saved_artifact = Artifact(**artifact_attributes) try: saved_artifact.save() except IntegrityError: del artifact_attributes["file"] saved_artifact = Artifact.objects.get(**artifact_attributes) saved_artifact.touch() return saved_artifact
def test_direct_set_forbidden(self, mock_FORBIDDEN_DIGESTS, mock_DIGEST_FIELDS): with self.assertRaises(UnsupportedDigestValidationError): a = Artifact( file=SimpleUploadedFile("test_filename", b"test content"), sha512="asdf", sha384="asdf", sha224="asdf", sha1="asdf", sha256="asdf", size=1024, ) a.md5 = "asdf" a.save()
def test_forgot_something(self, mock_FORBIDDEN_DIGESTS, mock_DIGEST_FIELDS): with self.assertRaises(MissingDigestValidationError): a = Artifact( file=SimpleUploadedFile("test_filename", b"test content"), sha512="asdf", sha384="asdf", sha224="asdf", sha1="asdf", sha256="asdf", size=1024, ) a.sha224 = None a.save()
def _artifact_from_data(raw_data): tmpfile = PulpTemporaryUploadedFile("", "application/octet-stream", len(raw_data), "", "") tmpfile.write(raw_data) for hasher in Artifact.DIGEST_FIELDS: tmpfile.hashers[hasher].update(raw_data) artifact = Artifact() artifact.file = tmpfile artifact.size = tmpfile.size for hasher in Artifact.DIGEST_FIELDS: setattr(artifact, hasher, tmpfile.hashers[hasher].hexdigest()) artifact.save() return artifact
def put(self, request, path, pk=None): """Handles creation of Uploads.""" _, repository = self.get_dr_push(request, path) digest = request.query_params["digest"] upload = models.Upload.objects.get(pk=pk, repository=repository) if upload.sha256 == digest[len("sha256:"):]: try: artifact = Artifact( file=upload.file.name, md5=upload.md5, sha1=upload.sha1, sha256=upload.sha256, sha384=upload.sha384, sha512=upload.sha512, size=upload.file.size, ) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) try: blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB) blob.save() except IntegrityError: blob = models.Blob.objects.get(digest=digest) try: blob_artifact = ContentArtifact(artifact=artifact, content=blob, relative_path=digest) blob_artifact.save() except IntegrityError: pass with repository.new_version() as new_version: new_version.add_content(models.Blob.objects.filter(pk=blob.pk)) upload.delete() return BlobResponse(blob, path, 201, request) else: raise Exception("The digest did not match")
async def run(self): """ ContainerFirstStage. """ future_manifests = [] tag_list = [] to_download = [] man_dcs = {} total_blobs = [] with ProgressReport( message='Downloading tag list', code='downloading.tag_list', total=1 ) as pb: repo_name = self.remote.namespaced_upstream_name relative_url = '/v2/{name}/tags/list'.format(name=repo_name) tag_list_url = urljoin(self.remote.url, relative_url) list_downloader = self.remote.get_downloader(url=tag_list_url) await list_downloader.run(extra_data={'repo_name': repo_name}) with open(list_downloader.path) as tags_raw: tags_dict = json.loads(tags_raw.read()) tag_list = tags_dict['tags'] # check for the presence of the pagination link header link = list_downloader.response_headers.get('Link') await self.handle_pagination(link, repo_name, tag_list) tag_list = self.filter_tags(tag_list) pb.increment() for tag_name in tag_list: relative_url = '/v2/{name}/manifests/{tag}'.format( name=self.remote.namespaced_upstream_name, tag=tag_name, ) url = urljoin(self.remote.url, relative_url) downloader = self.remote.get_downloader(url=url) to_download.append(downloader.run(extra_data={'headers': V2_ACCEPT_HEADERS})) pb_parsed_tags = ProgressReport( message='Processing Tags', code='processing.tag', state=TASK_STATES.RUNNING, total=len(tag_list) ) for download_tag in asyncio.as_completed(to_download): tag = await download_tag with open(tag.path, 'rb') as content_file: raw_data = content_file.read() content_data = json.loads(raw_data) media_type = content_data.get('mediaType') tag.artifact_attributes['file'] = tag.path saved_artifact = Artifact(**tag.artifact_attributes) try: saved_artifact.save() except IntegrityError: del tag.artifact_attributes['file'] saved_artifact = Artifact.objects.get(**tag.artifact_attributes) tag_dc = self.create_tag(saved_artifact, tag.url) if media_type in (MEDIA_TYPE.MANIFEST_LIST, MEDIA_TYPE.INDEX_OCI): list_dc = self.create_tagged_manifest_list( tag_dc, content_data) await self.put(list_dc) tag_dc.extra_data['man_relation'] = list_dc for manifest_data in content_data.get('manifests'): man_dc = self.create_manifest(list_dc, manifest_data) future_manifests.append(man_dc) man_dcs[man_dc.content.digest] = man_dc await self.put(man_dc) else: man_dc = self.create_tagged_manifest(tag_dc, content_data, raw_data) await self.put(man_dc) tag_dc.extra_data['man_relation'] = man_dc self.handle_blobs(man_dc, content_data, total_blobs) await self.put(tag_dc) pb_parsed_tags.increment() pb_parsed_tags.state = 'completed' pb_parsed_tags.save() for manifest_future in future_manifests: man = await manifest_future.resolution() with man._artifacts.get().file.open() as content_file: raw = content_file.read() content_data = json.loads(raw) man_dc = man_dcs[man.digest] self.handle_blobs(man_dc, content_data, total_blobs) for blob in total_blobs: await self.put(blob)
class TestPackage(TestCase): """Test Package content type.""" PACKAGE_PARAGRAPH = ( "Package: aegir\n" "Version: 0.1-edda0\n" "Architecture: sea\n" "Essential: yes\n" "Maintainer: Utgardloki\n" "Description: A sea jötunn associated with the ocean.\n" "MD5sum: aabb\n" "SHA1: ccdd\n" "SHA256: eeff\n" "Filename: pool/a/aegir/aegir_0.1-edda0_sea.deb\n") def setUp(self): """Setup database fixtures.""" self.package1 = Package( package="aegir", version="0.1-edda0", architecture="sea", essential=True, maintainer="Utgardloki", description="A sea jötunn associated with the ocean.", ) self.package1.save() self.artifact1 = Artifact( size=42, md5="aabb", sha1="ccdd", sha256="eeff", sha224="gghh", sha384="iijj", sha512="kkll", file=SimpleUploadedFile("test_filename", b"test content"), ) self.artifact1.save() ContentArtifact(artifact=self.artifact1, content=self.package1).save() def test_str(self): """Test package str.""" self.assertEqual(str(self.package1), "<Package: aegir_0.1-edda0_sea>") def test_filename(self): """Test that the pool filename of a package is correct.""" self.assertEqual(self.package1.filename(), "pool/a/aegir/aegir_0.1-edda0_sea.deb") def test_filename_with_component(self): """Test that the pool filename of a package with component is correct.""" self.assertEqual(self.package1.filename("joetunn"), "pool/joetunn/a/aegir/aegir_0.1-edda0_sea.deb") def test_to822(self): """Test if package transforms correctly into 822dict.""" package_dict = Package822Serializer(self.package1, context={ "request": None }).to822("joetunn") self.assertEqual(package_dict["package"], self.package1.package) self.assertEqual(package_dict["version"], self.package1.version) self.assertEqual(package_dict["architecture"], self.package1.architecture) self.assertEqual(package_dict["maintainer"], self.package1.maintainer) self.assertEqual(package_dict["description"], self.package1.description) self.assertEqual(package_dict["md5sum"], self.artifact1.md5) self.assertEqual(package_dict["sha1"], self.artifact1.sha1) self.assertEqual(package_dict["sha256"], self.artifact1.sha256) self.assertEqual(package_dict["filename"], self.package1.filename("joetunn")) def test_to822_dump(self): """Test dump to package index.""" self.assertEqual( Package822Serializer(self.package1, context={ "request": None }).to822().dump(), self.PACKAGE_PARAGRAPH, )
async def run(self): """ DockerFirstStage. """ future_manifests = [] tag_list = [] to_download = [] man_dcs = {} total_blobs = [] with ProgressBar(message='Downloading tag list', total=1) as pb: repo_name = self.remote.namespaced_upstream_name relative_url = '/v2/{name}/tags/list'.format(name=repo_name) tag_list_url = urljoin(self.remote.url, relative_url) list_downloader = self.remote.get_downloader(url=tag_list_url) await list_downloader.run(extra_data={'repo_name': repo_name}) with open(list_downloader.path) as tags_raw: tags_dict = json.loads(tags_raw.read()) tag_list = tags_dict['tags'] # check for the presence of the pagination link header link = list_downloader.response_headers.get('Link') await self.handle_pagination(link, repo_name, tag_list) whitelist_tags = self.remote.whitelist_tags if whitelist_tags: tag_list = list(set(tag_list) & set(whitelist_tags.split(','))) pb.increment() msg = 'Creating Download requests for v2 Tags' with ProgressBar(message=msg, total=len(tag_list)) as pb: for tag_name in tag_list: relative_url = '/v2/{name}/manifests/{tag}'.format( name=self.remote.namespaced_upstream_name, tag=tag_name, ) url = urljoin(self.remote.url, relative_url) downloader = self.remote.get_downloader(url=url) to_download.append(downloader.run(extra_data={'headers': V2_ACCEPT_HEADERS})) pb.increment() pb_parsed_tags = ProgressBar(message='Processing v2 Tags', state='running') pb_parsed_ml_tags = ProgressBar(message='Parsing Manifest List Tags', state='running') pb_parsed_m_tags = ProgressBar(message='Parsing Manifests Tags', state='running') global pb_parsed_blobs pb_parsed_blobs = ProgressBar(message='Parsing Blobs', state='running') pb_parsed_man = ProgressBar(message='Parsing Manifests', state='running') for download_tag in asyncio.as_completed(to_download): tag = await download_tag with open(tag.path) as content_file: raw = content_file.read() content_data = json.loads(raw) mediatype = content_data.get('mediaType') tag.artifact_attributes['file'] = tag.path saved_artifact = Artifact(**tag.artifact_attributes) try: saved_artifact.save() except IntegrityError: del tag.artifact_attributes['file'] saved_artifact = Artifact.objects.get(**tag.artifact_attributes) tag_dc = self.create_tag(mediatype, saved_artifact, tag.url) if type(tag_dc.content) is ManifestListTag: list_dc = self.create_tagged_manifest_list( tag_dc, content_data) await self.put(list_dc) pb_parsed_ml_tags.increment() tag_dc.extra_data['list_relation'] = list_dc for manifest_data in content_data.get('manifests'): man_dc = self.create_manifest(list_dc, manifest_data) future_manifests.append(man_dc.get_or_create_future()) man_dcs[man_dc.content.digest] = man_dc await self.put(man_dc) pb_parsed_man.increment() elif type(tag_dc.content) is ManifestTag: man_dc = self.create_tagged_manifest(tag_dc, content_data) await self.put(man_dc) pb_parsed_m_tags.increment() tag_dc.extra_data['man_relation'] = man_dc self.handle_blobs(man_dc, content_data, total_blobs) await self.put(tag_dc) pb_parsed_tags.increment() pb_parsed_tags.state = 'completed' pb_parsed_tags.total = pb_parsed_tags.done pb_parsed_tags.save() pb_parsed_ml_tags.state = 'completed' pb_parsed_ml_tags.total = pb_parsed_ml_tags.done pb_parsed_ml_tags.save() pb_parsed_m_tags.state = 'completed' pb_parsed_m_tags.total = pb_parsed_m_tags.done pb_parsed_m_tags.save() pb_parsed_man.state = 'completed' pb_parsed_man.total = pb_parsed_man.done pb_parsed_man.save() for manifest_future in asyncio.as_completed(future_manifests): man = await manifest_future with man._artifacts.get().file.open() as content_file: raw = content_file.read() content_data = json.loads(raw) man_dc = man_dcs[man.digest] self.handle_blobs(man_dc, content_data, total_blobs) for blob in total_blobs: await self.put(blob) pb_parsed_blobs.state = 'completed' pb_parsed_blobs.total = pb_parsed_blobs.done pb_parsed_blobs.save()
class TestPackage(TestCase): """Test Package content type.""" PACKAGE_PARAGRAPH = 'Package: aegir\n' \ 'Version: 0.1-edda0\n' \ 'Architecture: sea\n' \ 'Maintainer: Utgardloki\n' \ 'Description: A sea jötunn associated with the ocean.\n' \ 'MD5sum: aabb\n' \ 'SHA1: ccdd\n' \ 'SHA256: eeff\n' \ 'Filename: pool/a/aegir/aegir_0.1-edda0_sea.deb\n' def setUp(self): """Setup database fixtures.""" self.package1 = Package( package_name='aegir', version='0.1-edda0', architecture='sea', maintainer='Utgardloki', description='A sea jötunn associated with the ocean.', ) self.package1.save() self.artifact1 = Artifact( size=42, md5='aabb', sha1='ccdd', sha256='eeff', file=SimpleUploadedFile('test_filename', b'test content'), ) self.artifact1.save() ContentArtifact(artifact=self.artifact1, content=self.package1).save() def test_str(self): """Test package str.""" self.assertEqual(str(self.package1), '<Package: aegir_0.1-edda0_sea>') def test_filename(self): """Test that the pool filename of a package is correct.""" self.assertEqual(self.package1.filename(), 'pool/a/aegir/aegir_0.1-edda0_sea.deb') def test_filename_with_component(self): """Test that the pool filename of a package with component is correct.""" self.assertEqual(self.package1.filename('joetunn'), 'pool/joetunn/a/aegir/aegir_0.1-edda0_sea.deb') def test_to822(self): """Test if package transforms correctly into 822dict.""" package_dict = self.package1.to822('joetunn') self.assertEqual(package_dict['package'], self.package1.package_name) self.assertEqual(package_dict['version'], self.package1.version) self.assertEqual(package_dict['architecture'], self.package1.architecture) self.assertEqual(package_dict['maintainer'], self.package1.maintainer) self.assertEqual(package_dict['description'], self.package1.description) self.assertEqual(package_dict['md5sum'], self.artifact1.md5) self.assertEqual(package_dict['sha1'], self.artifact1.sha1) self.assertEqual(package_dict['sha256'], self.artifact1.sha256) self.assertEqual(package_dict['filename'], self.package1.filename('joetunn')) def test_to822_dump(self): """Test dump to package index.""" self.assertEqual(self.package1.to822().dump(), self.PACKAGE_PARAGRAPH)
class PendingArtifact(Pending): """ Represents an artifact related to content that is contained within the remote repository. Attributes: url (str): The URL used to download the artifact. relative_path (str): The relative path within the content. content (PendingContent): The associated pending content. This is the reverse relationship. Examples: >>> >>> from pulpcore.plugin.models import Artifact >>> >>> model = Artifact(...) # DB model instance. >>> download = ... >>> ... >>> artifact = PendingArtifact(model, 'http://zoo.org/lion.rpm', 'lion.rpm') >>> """ __slots__ = ( 'url', 'relative_path', 'content', ) def __init__(self, model, url, relative_path, content=None): """ Args: model (pulpcore.plugin.models.Artifact): A pending artifact model. url (str): The URL used to download the artifact. relative_path (str): The relative path within the content. content (PendingContent): The associated pending content. This is the reverse relationship. """ super().__init__(model) self.url = url self.relative_path = relative_path self.content = content if content: content.artifacts.add(self) @property def model(self): """ The model getter. Returns: pulpcore.plugin.models.Artifact: The pending model. """ return self._model @property def stored_model(self): """ The stored model getter. Returns: pulpcore.plugin.models.Artifact: The stored model. """ return self._stored_model @stored_model.setter def stored_model(self, model): """ The stored model setter. Args: model (pulpcore.plugin.models.Artifact): The stored model. """ self._stored_model = model @property def changeset(self): """ The changeset getter. Returns: pulpcore.plugin.changeset.Changeset: The active changeset. """ return self.content.changeset @property def remote(self): """ The remote getter. Returns: pulpcore.plugin.models.Remote: A remote. """ return self.changeset.remote @property def downloader(self): """ A downloader used to download the artifact. The downloader may be a NopDownloader (no-operation) when: - The _stored_model is set to an model fetched from the DB. - The download policy is deferred. Returns: asyncio.Future: A download future based on a downloader. """ def done(task): try: task.result() except Exception: pass else: self.downloaded(downloader) if self._stored_model: downloader = NopDownloader() future = asyncio.ensure_future(downloader.run()) else: downloader = self.remote.get_downloader(self.url) future = asyncio.ensure_future(downloader.run()) future.add_done_callback(done) return future def downloaded(self, downloader): """ The artifact (file) has been downloaded. A new _stored_model is created (and assigned) for the downloaded file. Args: downloader (BaseDownloader): The downloader that successfully completed. """ self._stored_model = Artifact(file=downloader.path, **downloader.artifact_attributes) def artifact_q(self): """ Get a query for the actual artifact. Returns: django.db.models.Q: A query to get the actual artifact. """ q = Q(pk=None) for field in Artifact.RELIABLE_DIGEST_FIELDS: digest = getattr(self._model, field) if digest: q |= Q(**{field: digest}) return q def settle(self): """ Ensures that all prerequisite matters pertaining to adding the artifact to the DB have been settled: Notes: Called whenever an artifact has been processed. """ self._settled = True def save(self): """ Update the DB: - Create (or fetch) the Artifact. - Create (or fetch) the ContentArtifact. - Create (or update) the RemoteArtifact. """ if self._stored_model: try: with transaction.atomic(): self._stored_model.save() except IntegrityError: q = self.artifact_q() self._stored_model = Artifact.objects.get(q) try: with transaction.atomic(): content_artifact = ContentArtifact( relative_path=self.relative_path, content=self.content.stored_model, artifact=self._stored_model) content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( relative_path=self.relative_path, content=self.content.stored_model) if self._stored_model: content_artifact.artifact = self._stored_model content_artifact.save() digests = {f: getattr(self._model, f) for f in Artifact.DIGEST_FIELDS} try: with transaction.atomic(): remote_artifact = RemoteArtifact( url=self.url, remote=self.remote, content_artifact=content_artifact, size=self._model.size, **digests) remote_artifact.save() except IntegrityError: q_set = RemoteArtifact.objects.filter( remote=self.remote, content_artifact=content_artifact) q_set.update( url=self.url, size=self._model.size, **digests) def __hash__(self): return hash(self.relative_path)
def _create_and_associate_content(self, group): """ Saves ExampleContent and all related models to the database. This method saves ExampleContent, ContentArtifacts, RemoteArtifacts and Artifacts to the database inside a single transaction. Args: content (:class:`pulp_example.app.models.ExampleContent`): An instance of ExampleContent to be saved to the database. deferred_artifacts (dict): A dictionary where keys are instances of :class:`pulpcore.plugin.models.RemoteArtifact` and values are dictionaries that contain information about files downloaded using the RemoteArtifacts. """ # Save Artifacts, ContentArtifacts, RemoteArtifacts, and Content in a transaction content = self.content_dict.pop(group.id) with transaction.atomic(): # Save content try: with transaction.atomic(): content.save() log.debug(_("Created content")) except IntegrityError: key = {f: getattr(content, f) for f in content.natural_key_fields()} content = type(content).objects.get(**key) try: with transaction.atomic(): # Add content to the repository association = RepositoryContent( repository=self.repository, content=content) association.save() log.debug(_("Created association with repository")) except IntegrityError: # Content is already associated with the repository pass for url in group.urls: if group.downloaded_files: # Create artifact that was downloaded and deal with race condition with File(open(group.downloaded_files[url].path, mode='rb')) as file: try: with transaction.atomic(): artifact = Artifact(file=file, **group.downloaded_files[ url].artifact_attributes) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=group.downloaded_files[ url].artifact_attributes['sha256']) else: # Try to find an artifact if one already exists try: with transaction.atomic(): # try to find an artifact from information in deferred artifact artifact = Artifact.objects.get(sha256=group.remote_artifacts[ url].sha256) except Artifact.DoesNotExist: artifact = None content_artifact = group.remote_artifacts[url].content_artifact content_artifact.artifact = artifact content_artifact.content = content try: with transaction.atomic(): content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( content=content_artifact.content, relative_path=content_artifact.relative_path) group.remote_artifacts[url].content_artifact = content_artifact content_artifact.artifact = artifact content_artifact.save() try: with transaction.atomic(): group.remote_artifacts[url].save() except IntegrityError: pass
def _create_and_associate_content(self, group): """ Saves ExampleContent and all related models to the database. This method saves ExampleContent, ContentArtifacts, RemoteArtifacts and Artifacts to the database and adds ExampleContent to repository inside a single transaction. Args: group (:class:`~pulpcore.plugin.download.asyncio.Group`): A group of :class:`~pulpcore.plugin.models.RemoteArtifact` objects to process. """ # Save Artifacts, ContentArtifacts, RemoteArtifacts, and Content in a transaction content = self.content_dict.pop(group.id) with transaction.atomic(): # Save content try: with transaction.atomic(): content.save() log.debug(_("Created content")) except IntegrityError: key = { f: getattr(content, f) for f in content.natural_key_fields() } content = type(content).objects.get(**key) try: with transaction.atomic(): # Add content to the repository association = RepositoryContent(repository=self.repository, content=content) association.save() log.debug(_("Created association with repository")) except IntegrityError: # Content is already associated with the repository pass for url in group.urls: if group.downloaded_files: downloaded_file = group.downloaded_files[url] # Create artifact that was downloaded and deal with race condition try: with transaction.atomic(): artifact = Artifact( file=downloaded_file.path, **downloaded_file.artifact_attributes) artifact.save() except IntegrityError: artifact = Artifact.objects.get( sha256=downloaded_file. artifact_attributes['sha256']) else: # Try to find an artifact if one already exists try: with transaction.atomic(): # try to find an artifact from information in deferred artifact artifact = Artifact.objects.get( sha256=group.remote_artifacts[url].sha256) except Artifact.DoesNotExist: artifact = None content_artifact = group.remote_artifacts[url].content_artifact content_artifact.artifact = artifact content_artifact.content = content try: with transaction.atomic(): content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( content=content_artifact.content, relative_path=content_artifact.relative_path) group.remote_artifacts[ url].content_artifact = content_artifact content_artifact.artifact = artifact content_artifact.save() try: with transaction.atomic(): group.remote_artifacts[url].save() except IntegrityError: pass
def _create_and_associate_content(self, content, group_result): """ Saves ExampleContent and all related models to the database This method saves ExampleContent, ContentArtifacts, RemoteArtifacts and Artifacts to the database inside a single transaction. Args: content (:class:`pulp_example.app.models.ExampleContent`): An instance of ExampleContent to be saved to the database. group_result (dict): A dictionary where keys are instances of :class:`pulpcore.plugin.models.RemoteArtifact` and values are dictionaries that contain information about files downloaded using the RemoteArtifacts. """ # Save Artifacts, ContentArtifacts, RemoteArtifacts, and Content in a transaction with transaction.atomic(): # Save content try: with transaction.atomic(): content.save() log.debug(_("Created content")) except IntegrityError: key = { f: getattr(content, f) for f in content.natural_key_fields() } content = type(content).objects.get(**key) try: with transaction.atomic(): # Add content to the repository association = RepositoryContent(repository=self.repository, content=content) association.save() log.debug(_("Created association with repository")) except IntegrityError: # Content unit is already associated with the repository pass for remote_artifact, download_result in group_result.items(): if download_result: # Create artifact that was downloaded and deal with race condition path = download_result.pop('path') try: with transaction.atomic(): artifact = Artifact(file=path, **download_result) artifact.save() except IntegrityError: artifact = Artifact.objects.get( sha256=download_result['sha256']) else: # Try to find an artifact if one already exists try: with transaction.atomic(): # try to find an artifact from information in remote artifact artifact = Artifact.objects.get( sha256=remote_artifact.sha256) except Artifact.DoesNotExist: artifact = None content_artifact = remote_artifact.content_artifact content_artifact.artifact = artifact content_artifact.content = content try: with transaction.atomic(): content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( content=content_artifact.content, relative_path=content_artifact.relative_path) remote_artifact.content_artifact = content_artifact content_artifact.artifact = artifact content_artifact.save() try: with transaction.atomic(): remote_artifact.save() except IntegrityError: pass