def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info( "Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual( [], list(archive.get_archived_artifacts_for_build(build)))
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def test_archive_build_projectbuild(self): """ The archiver can handle archiving a build from a projectbuild. """ project, dependency1, dependency2 = self.create_dependencies(2) ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) archive = ArchiveFactory.create(policy="cdimage") build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create( build=build1, filename="artifact1.gz") update_projectbuilds(build1) build1_items = archive.add_build(build1) self.assertEqual(2, len(build1_items[artifact1])) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) artifact2 = ArtifactFactory.create(build=build2, filename="artifact2.gz") update_projectbuilds(build2) build2_items = archive.add_build(build2) self.assertEqual(2, len(build2_items[artifact2])) self.assertEqual(4, archive.items.count()) self.assertEqual( 2, ArchiveArtifact.objects.filter( projectbuild_dependency__projectbuild=projectbuild).count())
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info("Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual([], list(archive.get_archived_artifacts_for_build(build)))
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def test_archive_projectbuild_with_prearchived_artifact(self): """ If we archive a project build with several artifacts, it should return only the newly added artifacts. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") artifact = ArtifactFactory.create( build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact, projectbuild=projectbuild) result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(1, len(result))
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append( policy.get_path_for_artifact(item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_archive_projectbuild(self): """ The archiver can handle archiving an entire project build. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") ArtifactFactory.create(build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(2, len(result))
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append(policy.get_path_for_artifact( item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_get_archived_artifact_artifact_not_in_archive(self): """ If the specified build is not recorded in the archive then we should get an empty set back. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) ArtifactFactory.create(build=build) archive = ArchiveFactory.create() self.assertEqual( 0, archive.get_archived_artifacts_for_build(build).count())
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log)
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log)
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log) self.assertIsNotNone(item.archived_at)
def test_project_build_detail_view_with_archived_artifacts(self): """ If we have archived artifacts for this build, we should provide the list of archived items in the response context. """ dependency = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency) projectbuild = build_project(self.project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="file1.gz") process_build_dependencies(build.pk) archive = ArchiveFactory.create(policy="cdimage", default=True) items = [x for x in archive.add_build(build)[artifact] if x.projectbuild_dependency] url = reverse( "project_projectbuild_detail", kwargs={"project_pk": self.project.pk, "build_pk": projectbuild.pk}) response = self.app.get(url, user="******") self.assertEqual(items, list(response.context["archived_items"]))
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log) self.assertIsNotNone(item.archived_at)
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path is quite different. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create(policy="cdimage") archive.add_artifact( artifact, projectbuild=projectbuild) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual( "%s/%s/testing.gz" % ( slugify(project.name), projectbuild.build_id), archived.archived_path) self.assertIsNone(archived.archived_at)
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_get_current_artifacts(self): """ Project.get_current_artifacts returns the current set of artifacts for this project. """ project = ProjectFactory.create() job = JobFactory.create() dependency = DependencyFactory.create(job=job) ProjectDependency.objects.create( project=project, dependency=dependency) build1 = BuildFactory.create(job=job) build2 = BuildFactory.create(job=job) ArtifactFactory.create(build=build1) artifact2 = ArtifactFactory.create(build=build2) self.assertEqual([artifact2], list(project.get_current_artifacts()))
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing1.txt") ArtifactFactory.create( build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch( "archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch( "archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_add_artifact(self): """ An archive records the artifacts that get added. """ artifact = ArtifactFactory.create() archive = ArchiveFactory.create() archive.add_artifact(artifact) self.assertEqual(1, archive.items.count())
def test_get_path_for_artifact(self): """ The default archive policy should return the path from the artifact url. """ artifact = ArtifactFactory.create(filename="testing.img") policy = DefaultPolicy() self.assertEqual( "testing.img", policy.get_path_for_artifact(artifact))
def test_get_archived_artifact_artifact_not_in_archive(self): """ If the specified artifact is not in the archive, we shold get None back. """ artifact = ArtifactFactory.create() archive = ArchiveFactory.create() self.assertIsNone(archive.get_archived_artifact(artifact))
def test_archive_build_several_projectbuild_dependencies(self): """ If we archive a build that is used in several projectbuilds, then we should get multiple copies of the artifact. """ project1, dependency1, dependency2 = self.create_dependencies( 2) project2 = ProjectFactory.create(name="Project 2") ProjectDependency.objects.create( project=project2, dependency=dependency1) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="file1.gz") artifact2 = ArtifactFactory.create(build=build2, filename="file2.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build1) create_projectbuilds_for_autotracking(build1) archive.add_build(build1) self.assertEqual(3, archive.items.count()) update_projectbuilds(build2) create_projectbuilds_for_autotracking(build2) archive.add_build(build2) self.assertEqual(5, archive.items.count()) artifacts = ArchiveArtifact.objects.all().order_by("archived_path") policy = CdimageArchivePolicy() self.assertEqual( "{dependency1}\n{dependency2}\n" "project-1/{build}/file1.gz\nproject-1/{build}/file2.gz\n" "project-2/{build}/file1.gz".format( dependency1=policy.get_path_for_artifact( artifact=artifact1, build=build1, dependency=dependency1), dependency2=policy.get_path_for_artifact( artifact=artifact2, build=build2, dependency=dependency2), build=projectbuild.build_id), "\n".join(artifacts.values_list("archived_path", flat=True)))
def test_archive_build_several_projectbuild_dependencies(self): """ If we archive a build that is used in several projectbuilds, then we should get multiple copies of the artifact. """ project1, dependency1, dependency2 = self.create_dependencies(2) project2 = ProjectFactory.create(name="Project 2") ProjectDependency.objects.create(project=project2, dependency=dependency1) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="file1.gz") artifact2 = ArtifactFactory.create(build=build2, filename="file2.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build1) create_projectbuilds_for_autotracking(build1) archive.add_build(build1) self.assertEqual(3, archive.items.count()) update_projectbuilds(build2) create_projectbuilds_for_autotracking(build2) archive.add_build(build2) self.assertEqual(5, archive.items.count()) artifacts = ArchiveArtifact.objects.all().order_by("archived_path") policy = CdimageArchivePolicy() self.assertEqual( "{dependency1}\n{dependency2}\n" "project-1/{build}/file1.gz\nproject-1/{build}/file2.gz\n" "project-2/{build}/file1.gz".format( dependency1=policy.get_path_for_artifact( artifact=artifact1, build=build1, dependency=dependency1), dependency2=policy.get_path_for_artifact( artifact=artifact2, build=build2, dependency=dependency2), build=projectbuild.build_id), "\n".join(artifacts.values_list("archived_path", flat=True)))
def test_get_current_artifacts(self): """ Project.get_current_artifacts returns the current set of artifacts for this project. """ project = ProjectFactory.create() job = JobFactory.create() dependency = DependencyFactory.create(job=job) ProjectDependency.objects.create(project=project, dependency=dependency) build1 = BuildFactory.create(job=job) build2 = BuildFactory.create(job=job) ArtifactFactory.create(build=build1) artifact2 = ArtifactFactory.create(build=build2) process_build_dependencies(build2.pk) self.assertEqual([artifact2], list(project.get_current_artifacts()))
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing1.txt") ArtifactFactory.create(build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch("archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch("archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_add_artifact_repeatedly(self): """ If we add the same artifact more than once, this shouldn't be an error, but no additional copies should be added. """ artifact = ArtifactFactory.create() archive = ArchiveFactory.create() archive.add_artifact(artifact) archive.add_artifact(artifact) self.assertEqual(1, archive.items.count())
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_get_archived_artifact(self): """ We can fetch the artifacts that get added. """ artifact = ArtifactFactory.create(filename="artifact.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual("artifact.gz", archived.archived_path) self.assertIsNone(archived.archived_at)
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_can_be_archived(self): """ A ProjectBuild knows whether or not it's ready to be archived. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) # Current status is STARTED so we can't archive this build. self.assertEqual("UNKNOWN", projectbuild.phase) self.assertFalse(projectbuild.can_be_archived) builds = [] for job in [dependency1.job, dependency2.job]: build = BuildFactory.create( job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) builds.append(build) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertFalse( projectbuild.can_be_archived, "Build with no artifacts can be archived") for build in builds: ArtifactFactory.create(build=build) self.assertTrue( projectbuild.can_be_archived, "Build with artifacts can't be archived") projectbuild.archived = timezone.now() self.assertFalse(projectbuild.can_be_archived)
def test_can_be_archived(self): """ A ProjectBuild knows whether or not it's ready to be archived. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) # Current status is STARTED so we can't archive this build. self.assertEqual("UNKNOWN", projectbuild.phase) self.assertFalse(projectbuild.can_be_archived) builds = [] for job in [dependency1.job, dependency2.job]: build = BuildFactory.create(job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) builds.append(build) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertFalse(projectbuild.can_be_archived, "Build with no artifacts can be archived") for build in builds: ArtifactFactory.create(build=build) self.assertTrue(projectbuild.can_be_archived, "Build with artifacts can't be archived") projectbuild.archived = timezone.now() self.assertFalse(projectbuild.can_be_archived)
def test_archive_build_projectbuild(self): """ The archiver can handle archiving a build from a projectbuild. """ project, dependency1, dependency2 = self.create_dependencies(2) ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) archive = ArchiveFactory.create(policy="cdimage") build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="artifact1.gz") update_projectbuilds(build1) build1_items = archive.add_build(build1) self.assertEqual(2, len(build1_items[artifact1])) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key) artifact2 = ArtifactFactory.create(build=build2, filename="artifact2.gz") update_projectbuilds(build2) build2_items = archive.add_build(build2) self.assertEqual(2, len(build2_items[artifact2])) self.assertEqual(4, archive.items.count()) self.assertEqual( 2, ArchiveArtifact.objects.filter( projectbuild_dependency__projectbuild=projectbuild).count())
def test_get_archived_artifacts_for_build(self): """ We can fetch the artifacts that get added from a build. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create() archive.add_build(build) archived = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual([(artifact, build)], [(x.artifact, x.build) for x in archived])
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual([ "START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END" ], transport.log)
def test_get_archived_artifacts_for_build(self): """ We can fetch the artifacts that get added from a build. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create() archive.add_build(build) archived = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( [(artifact, build)], [(x.artifact, x.build) for x in archived])
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual( ["START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END"], transport.log)
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) item = [ x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency ][0] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END" ], transport.log)
def test_add_build_from_dependency(self): """ Adding a dependency build (i.e. not a ProjectBuild) should result in a single item in the archive. """ project, dependency = self.create_dependencies() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create() items = archive.add_build(build) self.assertEqual(1, archive.items.count()) self.assertEqual([artifact], [x.artifact for x in items[artifact]]) policy_path = archive.get_policy().get_path_for_artifact( artifact, build=build, dependency=dependency) self.assertEqual(policy_path, archive.items.first().archived_path)
def test_get_path_for_artifact(self): """ The default archive policy should return the path from the artifact url. """ dependency = DependencyFactory.create(name="My Dependency") build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(filename="testing.img", build=build) policy = DefaultPolicy() timestamp = "%s-%s" % (build.created_at.strftime("%Y-%m-%d"), time.mktime(build.created_at.timetuple())) self.assertEqual( "my-dependency/%s/testing.img" % timestamp, policy.get_path_for_artifact(artifact, dependency=dependency, build=build))
def test_get_path_for_artifact(self): """ The default archive policy should return the path from the artifact url. """ dependency = DependencyFactory.create(name="My Dependency") build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( filename="testing.img", build=build) policy = DefaultPolicy() timestamp = "%s-%s" % ( build.created_at.strftime("%Y-%m-%d"), time.mktime(build.created_at.timetuple())) self.assertEqual( "my-dependency/%s/testing.img" % timestamp, policy.get_path_for_artifact( artifact, dependency=dependency, build=build))
def test_item_from_artifact_and_archived_artifact(self): """ Return an artifact or archived artifact in a standard format for display. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) # Create the artifact and check the display format artifact = ArtifactFactory.create(build=build, filename="file1.gz") artifact_item = ProjectDetailView.item_from_artifact(artifact) self.assertIsNotNone(artifact_item) self.assertTrue(isinstance(artifact_item, dict)) self.assertTrue("build_name" in artifact_item) self.assertTrue("filename" in artifact_item) self.assertTrue("url" in artifact_item) self.assertTrue("archived" in artifact_item) # Archive the artifact and check the display format process_build_dependencies(build.pk) archive = ArchiveFactory.create(policy="cdimage", default=True) items = [] for x in archive.add_build(build)[artifact]: if x.projectbuild_dependency: items.append(x) self.assertEquals(len(items), 1) archived_item = ProjectDetailView.item_from_archived_artifact(items[0]) self.assertIsNotNone(archived_item) self.assertTrue(isinstance(archived_item, dict)) self.assertTrue("build_name" in archived_item) self.assertTrue("filename" in archived_item) self.assertTrue("url" in archived_item) self.assertTrue("archived" in archived_item)
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_project_detail_artifacts(self): """ The project detail should return artifacts with the URL from Jenkins or from the archive (for archived artifacts). """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build, filename="file1.gz") process_build_dependencies(build.pk) # The project detail should link to the artifact from Jenkins project_url = reverse("project_detail", kwargs={"pk": project.pk}) response = self.app.get(project_url, user="******") self.assertEqual(200, response.status_code) self.assertIsNotNone( response.context["project"].get_current_projectbuild()) self.assertEqual( [ProjectDetailView.item_from_artifact(artifact)], response.context["current_artifacts"]) # Archive the artifact and the view should display the archived item archive = ArchiveFactory.create(policy="cdimage", default=True) items = [] for x in archive.add_build(build)[artifact]: if x.projectbuild_dependency: items.append(ProjectDetailView.item_from_archived_artifact(x)) project_url = reverse("project_detail", kwargs={"pk": project.pk}) response = self.app.get(project_url, user="******") self.assertEqual(200, response.status_code) self.assertEqual(items, response.context["current_artifacts"])
def test_get_path_for_artifact_with_dependency(self): """ If the cdimage archiver gets a dependency with no projectbuild, it should delegate to the DefaultPolicy to generate the archive filename. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create(filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, dependency=dependency, projectbuild=projectbuild))
def test_cdimage_archiver_policy_with_only_dependency_build(self): """ If we only build a dependency with no project builds, then the cdimage archiver should delegate to the default policy for the name when generating the archive name for the dependency's artifacts. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") name = DefaultPolicy().get_path_for_artifact(artifact, build=build, dependency=dependency) self.assertEqual( name, "\n".join(archived.values_list("archived_path", flat=True))) self.assertEqual([None], list(archived.values_list("archived_at", flat=True)))
def test_get_path_for_artifact(self): """ The CdimageArchivePolicy should calculate a cdimage-like path using the project name and build id. url. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create(filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, dependency=dependency, projectbuild=projectbuild))