def test_archive_projectbuild(self): """ The archiver can handle archiving an entire project build. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") ArtifactFactory.create(build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(2, len(result))
def test_archive_build_projectbuild(self): """ The archiver can handle archiving a build from a projectbuild. """ project, dependency1, dependency2 = self.create_dependencies(2) ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) archive = ArchiveFactory.create(policy="cdimage") build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create( build=build1, filename="artifact1.gz") update_projectbuilds(build1) build1_items = archive.add_build(build1) self.assertEqual(2, len(build1_items[artifact1])) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) artifact2 = ArtifactFactory.create(build=build2, filename="artifact2.gz") update_projectbuilds(build2) build2_items = archive.add_build(build2) self.assertEqual(2, len(build2_items[artifact2])) self.assertEqual(4, archive.items.count()) self.assertEqual( 2, ArchiveArtifact.objects.filter( projectbuild_dependency__projectbuild=projectbuild).count())
def test_project_build_detail_view(self): """ Project build detail should show the build. """ dependency = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency) projectbuild = build_project(self.project, queue_build=False) BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) url = reverse( "project_projectbuild_detail", kwargs={"project_pk": self.project.pk, "build_pk": projectbuild.pk}) response = self.app.get(url, user="******") dependencies = ProjectBuildDependency.objects.filter( projectbuild=projectbuild) self.assertEqual(projectbuild, response.context["projectbuild"]) self.assertEqual( list(dependencies), list(response.context["dependencies"])) self.assertTrue( "archived_items" not in response.context, "Project Build has archive items.")
def test_auto_track_dependency_triggers_project_build_creation(self): """ If we record a build of a project dependency that is auto-tracked, then this should trigger the creation of a new ProjectBuild for that project. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() existing_build = BuildFactory.create(job=dependency2.job, phase="FINISHED") ProjectDependency.objects.create(project=self.project, dependency=dependency2, current_build=existing_build) self.assertEqual(0, ProjectBuild.objects.filter(project=self.project).count()) build = BuildFactory.create(job=dependency1.job, phase="FINISHED") process_build_dependencies(build.pk) self.assertEqual(1, ProjectBuild.objects.filter(project=self.project).count()) projectbuild = ProjectBuild.objects.get(project=self.project) self.assertEqual(2, ProjectBuildDependency.objects.filter(projectbuild=projectbuild).count()) build_dependency1 = ProjectBuildDependency.objects.get(projectbuild=projectbuild, dependency=dependency1) self.assertEqual(build, build_dependency1.build) build_dependency2 = ProjectBuildDependency.objects.get(projectbuild=projectbuild, dependency=dependency2) self.assertEqual(existing_build, build_dependency2.build)
def test_build_project_automated_autotracked_dependencies(self): """ For autotracked dependencies, we should use the most recent projectbuild to find the builds for associating with the new projectbuild. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) build1 = build_project(project, automated=True) # We store the current built dependency for dependency2 because we're # going to build dependency1. built_dependency1 = ProjectBuildDependency.objects.get( projectbuild=build1, dependency=dependency2) build2 = build_project(project, dependencies=[dependency1], automated=True) built_dependency2 = ProjectBuildDependency.objects.get( projectbuild=build2, dependency=dependency2) self.assertEqual(built_dependency2.build, built_dependency1.build)
def test_build_project_automated_non_autotracked(self): """ For automated, non-autotracked builds, each of the projectbuild dependencies created should use the current build of the dependency. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) new_build = build_project(project, automated=True) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual( [dependency1, dependency2], [x.dependency for x in build_dependencies]) self.assertEqual( [project_dep1.current_build, project_dep2.current_build], [x.build for x in build_dependencies])
def test_project_build_sends_finished_signal(self): """ When we set the projectbuild status to finished, we should signal this. """ @receiver(projectbuild_finished, sender=ProjectBuild) def handle_signal(sender, projectbuild, **kwargs): self.projectbuild = projectbuild project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) for job in [dependency1.job, dependency2.job]: BuildFactory.create( job=job, build_id=projectbuild.build_id, phase="FINISHED") self.assertEqual(projectbuild, self.projectbuild)
def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINISHED builds for all the dependencies, the projectbuild state should be FINISHED. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) for job in [dependency1.job, dependency2.job]: BuildFactory.create( job=job, build_id=projectbuild.build_id, phase="FINISHED") projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual("FINISHED", projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_archive_projectbuild_with_prearchived_artifact(self): """ If we archive a project build with several artifacts, it should return only the newly added artifacts. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") artifact = ArtifactFactory.create( build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact, projectbuild=projectbuild) result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(1, len(result))
def test_build_project_automated_non_autotracked(self): """ For automated, non-autotracked builds, each of the projectbuild dependencies created should use the current build of the dependency. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) new_build = build_project(project, automated=True) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual([dependency1, dependency2], [x.dependency for x in build_dependencies]) self.assertEqual( [project_dep1.current_build, project_dep2.current_build], [x.build for x in build_dependencies])
def test_is_dependency_building(self): """ is_building should return True if we have an active build for this dependency in the works. """ dependency = DependencyFactory.create() self.assertFalse(dependency.is_building) BuildFactory.create(job=dependency.job) self.assertTrue(dependency.is_building)
def test_get_current_build(self): """ Dependency.get_current_build should return the most recent build that has completed and was SUCCESSful. """ build1 = BuildFactory.create() build2 = BuildFactory.create( phase=Build.FINALIZED, status="SUCCESS", job=build1.job) dependency = DependencyFactory.create(job=build1.job) self.assertEqual(build2, dependency.get_current_build())
def test_dependency_detail_with_currently_building(self): """ If the Dependency is currently building, we should get an info message with this in the page. """ dependency = DependencyFactory.create() BuildFactory.create(job=dependency.job, status="STARTED") url = reverse("dependency_detail", kwargs={"pk": dependency.pk}) response = self.app.get(url, user="******") self.assertContains(response, "Dependency currently building")
def test_get_current_build(self): """ Dependency.get_current_build should return the most recent build that has completed and was SUCCESSful. """ build1 = BuildFactory.create() build2 = BuildFactory.create(phase=Build.FINALIZED, status="SUCCESS", job=build1.job) dependency = DependencyFactory.create(job=build1.job) self.assertEqual(build2, dependency.get_current_build())
def test_build_dependency_already_building(self): """ If the Dependency appears to be already building, then we should """ self.client.force_authenticate(user=self.user) dependency = DependencyFactory.create() BuildFactory.create(job=dependency.job) url = reverse("dependency-build-dependency", kwargs={"pk": dependency.pk}) with mock.patch("projects.helpers.build_job") as build_job_mock: response = self.client.post(url) self.assertEqual(status.HTTP_202_ACCEPTED, response.status_code) self.assertFalse(build_job_mock.delay.called)
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual([ "START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END" ], transport.log)
def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINALIZED builds for all the dependencies, the projectbuild state should be FINALIZED. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: build = BuildFactory.create(job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) projectbuild.phase == Build.FINALIZED projectbuild.save() build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual([dependency, dependency], sorted([ b.dependency for b in ProjectBuildDependency.objects.all() ])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual( [dependency, dependency], sorted([b.dependency for b in ProjectBuildDependency.objects.all()])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path is quite different. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create(policy="cdimage") archive.add_artifact( artifact, projectbuild=projectbuild) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual( "%s/%s/testing.gz" % ( slugify(project.name), projectbuild.build_id), archived.archived_path) self.assertIsNone(archived.archived_at)
def test_get_build_table_with_current_build_outside_recent(self): """ If we have a current build outside the most recent 5, then we should extend the dependencies list for that row to illustrate the current build. """ project = ProjectFactory.create() dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) [build1, build2, build3, build4, build5] = BuildFactory.create_batch( 5, job=dependency.job) ProjectDependency.objects.create( project=project, dependency=dependency, auto_track=False, current_build=build) header, table = get_build_table_for_project(project) self.assertEqual([dependency], header) self.assertEqual([ [{"build": build5, "current": False}], [{"build": build4, "current": False}], [{"build": build3, "current": False}], [{"build": build2, "current": False}], [{"build": build1, "current": False}], [{"build": build, "current": True}]], table)
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log)
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_build_url_with_non_projectbuild(self): """ build_url should return an empty string for non-project builds. # TODO: This should link to a Build Detail page in the jenkins app. """ build = BuildFactory.create() self.assertEqual("", build_url(build.build_id))
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter(projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append(policy.get_path_for_artifact( item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log) self.assertIsNotNone(item.archived_at)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append( policy.get_path_for_artifact(item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_build_url_with_non_projectbuild(self): """ build_url should return an empty string for non-project builds. # TODO: This should link to a Build Detail page in the jenkins app. """ build = BuildFactory.create() self.assertEqual("", build_url(build))
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info( "Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual( [], list(archive.get_archived_artifacts_for_build(build)))
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log) self.assertIsNotNone(item.archived_at)
def test_project_build_detail_view_with_archived_artifacts(self): """ If we have archived artifacts for this build, we should provide the list of archived items in the response context. """ dependency = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency) projectbuild = build_project(self.project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="file1.gz") process_build_dependencies(build.pk) archive = ArchiveFactory.create(policy="cdimage", default=True) items = [x for x in archive.add_build(build)[artifact] if x.projectbuild_dependency] url = reverse( "project_projectbuild_detail", kwargs={"project_pk": self.project.pk, "build_pk": projectbuild.pk}) response = self.app.get(url, user="******") self.assertEqual(items, list(response.context["archived_items"]))
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info("Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual([], list(archive.get_archived_artifacts_for_build(build)))
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log)
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def test_archive_build_several_projectbuild_dependencies(self): """ If we archive a build that is used in several projectbuilds, then we should get multiple copies of the artifact. """ project1, dependency1, dependency2 = self.create_dependencies(2) project2 = ProjectFactory.create(name="Project 2") ProjectDependency.objects.create(project=project2, dependency=dependency1) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="file1.gz") artifact2 = ArtifactFactory.create(build=build2, filename="file2.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build1) create_projectbuilds_for_autotracking(build1) archive.add_build(build1) self.assertEqual(3, archive.items.count()) update_projectbuilds(build2) create_projectbuilds_for_autotracking(build2) archive.add_build(build2) self.assertEqual(5, archive.items.count()) artifacts = ArchiveArtifact.objects.all().order_by("archived_path") policy = CdimageArchivePolicy() self.assertEqual( "{dependency1}\n{dependency2}\n" "project-1/{build}/file1.gz\nproject-1/{build}/file2.gz\n" "project-2/{build}/file1.gz".format( dependency1=policy.get_path_for_artifact( artifact=artifact1, build=build1, dependency=dependency1), dependency2=policy.get_path_for_artifact( artifact=artifact2, build=build2, dependency=dependency2), build=projectbuild.build_id), "\n".join(artifacts.values_list("archived_path", flat=True)))
def test_get_current_artifacts(self): """ Project.get_current_artifacts returns the current set of artifacts for this project. """ project = ProjectFactory.create() job = JobFactory.create() dependency = DependencyFactory.create(job=job) ProjectDependency.objects.create(project=project, dependency=dependency) build1 = BuildFactory.create(job=job) build2 = BuildFactory.create(job=job) ArtifactFactory.create(build=build1) artifact2 = ArtifactFactory.create(build=build2) process_build_dependencies(build2.pk) self.assertEqual([artifact2], list(project.get_current_artifacts()))
def test_can_be_archived_with_no_artifacts(self): """ A projectbuild with no artifacts can't be archived. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: BuildFactory.create(job=job, build_id=projectbuild.build_id, phase=Build.FINALIZED) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertFalse(projectbuild.can_be_archived)
def test_projectbuild_list_view(self): """ The list view should provide a list of projects. """ job = JobFactory.create() BuildFactory.create_batch(5, job=job) project = ProjectFactory.create() ProjectDependency.objects.create( project=project, dependency=DependencyFactory.create(job=job)) projectbuild = ProjectBuildFactory.create(project=project) BuildFactory.create(job=job, build_id=projectbuild.build_id) url = reverse("project_projectbuild_list", kwargs={"pk": project.pk}) response = self.app.get(url, user="******") self.assertEqual(200, response.status_code) self.assertEqual( set([projectbuild]), set(response.context["projectbuilds"])) self.assertEqual(project, response.context["project"])
def test_new_build_with_no_auto_track_build(self): """ If we create a new build for a dependency of a Project, and the ProjectDependency is not set to auto_track then the current_build should not be updated. """ build1 = BuildFactory.create() dependency = DependencyFactory.create(job=build1.job) project_dependency = ProjectDependency.objects.create( project=self.project, dependency=dependency, auto_track=False) project_dependency.current_build = build1 project_dependency.save() build2 = BuildFactory.create(job=build1.job) process_build_dependencies(build2.pk) # Reload the project dependency project_dependency = ProjectDependency.objects.get( pk=project_dependency.pk) self.assertEqual(build1, project_dependency.current_build)
def test_auto_track_dependency_triggers_project_build_creation(self): """ If we record a build of a project dependency that is auto-tracked, then this should trigger the creation of a new ProjectBuild for that project. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() existing_build = BuildFactory.create(job=dependency2.job, phase=Build.FINALIZED) ProjectDependency.objects.create(project=self.project, dependency=dependency2, current_build=existing_build) self.assertEqual( 0, ProjectBuild.objects.filter(project=self.project).count()) build = BuildFactory.create(job=dependency1.job, phase=Build.FINALIZED) process_build_dependencies(build.pk) self.assertEqual( 1, ProjectBuild.objects.filter(project=self.project).count()) projectbuild = ProjectBuild.objects.get(project=self.project) self.assertEqual( 2, ProjectBuildDependency.objects.filter( projectbuild=projectbuild).count()) build_dependency1 = ProjectBuildDependency.objects.get( projectbuild=projectbuild, dependency=dependency1) self.assertEqual(build, build_dependency1.build) build_dependency2 = ProjectBuildDependency.objects.get( projectbuild=projectbuild, dependency=dependency2) self.assertEqual(existing_build, build_dependency2.build)