def test_build_project_automated_autotracked_dependencies(self): """ For autotracked dependencies, we should use the most recent projectbuild to find the builds for associating with the new projectbuild. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) build1 = build_project(project, automated=True) # We store the current built dependency for dependency2 because we're # going to build dependency1. built_dependency1 = ProjectBuildDependency.objects.get( projectbuild=build1, dependency=dependency2) build2 = build_project(project, dependencies=[dependency1], automated=True) built_dependency2 = ProjectBuildDependency.objects.get( projectbuild=build2, dependency=dependency2) self.assertEqual(built_dependency2.build, built_dependency1.build)
def test_get_current_projectbuild(self): """ Project.get_current_projectbuild returns the most recent ProjectBuild for this project. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) from projects.helpers import build_project build1 = build_project(project, queue_build=False) build2 = build_project(project, queue_build=False) now = timezone.now() test_data = [(Build.FINALIZED, Build.STARTED, now - timedelta(hours=1), None, build1), (Build.FINALIZED, Build.FINALIZED, now - timedelta(hours=1), now, build2)] for case in test_data: (phase1, phase2, end1, end2, result) = case build1.phase = phase1 build1.ended_at = end1 build1.save() build2.phase = phase2 build2.ended_at = end2 build2.save() self.assertEqual(result, project.get_current_projectbuild())
def test_get_current_projectbuild(self): """ Project.get_current_projectbuild returns the most recent ProjectBuild for this project. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) from projects.helpers import build_project build1 = build_project(project, queue_build=False) build2 = build_project(project, queue_build=False) now = timezone.now() test_data = [ (Build.FINALIZED, Build.STARTED, now - timedelta(hours=1), None, build1), (Build.FINALIZED, Build.FINALIZED, now - timedelta(hours=1), now, build2) ] for case in test_data: (phase1, phase2, end1, end2, result) = case build1.phase = phase1 build1.ended_at = end1 build1.save() build2.phase = phase2 build2.ended_at = end2 build2.save() self.assertEqual(result, project.get_current_projectbuild())
def test_build_project_with_no_queue_build(self): """ If we pass queue_build = False to build_project, then no builds should happen. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) with mock.patch("projects.helpers.build_job") as mock_build_job: build_project(project) self.assertItemsEqual([], mock_build_job.call_args_list)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info("Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual([], list(archive.get_archived_artifacts_for_build(build)))
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_build_project_with_specified_dependencies(self): """ If a list of dependencies is provided, then we should only build those dependencies. """ [dep1, dep2, dep3] = DependencyFactory.create_batch(3) project = ProjectFactory.create() for dep in [dep1, dep2, dep3]: ProjectDependency.objects.create( project=project, dependency=dep, auto_track=True) build = BuildFactory.create(job=dep1.job) # Reload object from database. project_dep1 = ProjectDependency.objects.get( project=project, dependency=dep1) self.assertEqual(build, project_dep1.current_build) with mock.patch("projects.helpers.build_job") as mock_build_job: new_build = build_project(project, dependencies=[dep1, dep2]) projectbuild_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(3, projectbuild_dependencies.all().count()) self.assertEqual( set([dep1, dep2, dep3]), set([x.dependency for x in projectbuild_dependencies.all()])) mock_build_job.delay.assert_has_calls( [mock.call(dep1.job.pk, build_id=new_build.build_id), mock.call(dep2.job.pk, build_id=new_build.build_id)])
def test_project_build_sends_finished_signal(self): """ When we set the projectbuild status to finished, we should signal this. """ @receiver(projectbuild_finished, sender=ProjectBuild) def handle_signal(sender, projectbuild, **kwargs): self.projectbuild = projectbuild project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) for job in [dependency1.job, dependency2.job]: BuildFactory.create( job=job, build_id=projectbuild.build_id, phase="FINISHED") self.assertEqual(projectbuild, self.projectbuild)
def test_project_build_detail_view(self): """ Project build detail should show the build. """ dependency = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency) projectbuild = build_project(self.project, queue_build=False) BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) url = reverse( "project_projectbuild_detail", kwargs={"project_pk": self.project.pk, "build_pk": projectbuild.pk}) response = self.app.get(url, user="******") dependencies = ProjectBuildDependency.objects.filter( projectbuild=projectbuild) self.assertEqual(projectbuild, response.context["projectbuild"]) self.assertEqual( list(dependencies), list(response.context["dependencies"])) self.assertTrue( "archived_items" not in response.context, "Project Build has archive items.")
def test_archive_projectbuild_with_prearchived_artifact(self): """ If we archive a project build with several artifacts, it should return only the newly added artifacts. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") artifact = ArtifactFactory.create( build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact, projectbuild=projectbuild) result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(1, len(result))
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path is quite different. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create(policy="cdimage") archive.add_artifact( artifact, projectbuild=projectbuild) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual( "%s/%s/testing.gz" % ( slugify(project.name), projectbuild.build_id), archived.archived_path) self.assertIsNone(archived.archived_at)
def test_archive_projectbuild(self): """ The archiver can handle archiving an entire project build. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") ArtifactFactory.create(build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(2, len(result))
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append(policy.get_path_for_artifact( item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_build_project_automated_non_autotracked(self): """ For automated, non-autotracked builds, each of the projectbuild dependencies created should use the current build of the dependency. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) new_build = build_project(project, automated=True) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual([dependency1, dependency2], [x.dependency for x in build_dependencies]) self.assertEqual( [project_dep1.current_build, project_dep2.current_build], [x.build for x in build_dependencies])
def test_project_build_detail_view_with_archived_artifacts(self): """ If we have archived artifacts for this build, we should provide the list of archived items in the response context. """ dependency = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency) projectbuild = build_project(self.project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="file1.gz") process_build_dependencies(build.pk) archive = ArchiveFactory.create(policy="cdimage", default=True) items = [x for x in archive.add_build(build)[artifact] if x.projectbuild_dependency] url = reverse( "project_projectbuild_detail", kwargs={"project_pk": self.project.pk, "build_pk": projectbuild.pk}) response = self.app.get(url, user="******") self.assertEqual(items, list(response.context["archived_items"]))
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_archive_build_projectbuild(self): """ The archiver can handle archiving a build from a projectbuild. """ project, dependency1, dependency2 = self.create_dependencies(2) ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) archive = ArchiveFactory.create(policy="cdimage") build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create( build=build1, filename="artifact1.gz") update_projectbuilds(build1) build1_items = archive.add_build(build1) self.assertEqual(2, len(build1_items[artifact1])) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) artifact2 = ArtifactFactory.create(build=build2, filename="artifact2.gz") update_projectbuilds(build2) build2_items = archive.add_build(build2) self.assertEqual(2, len(build2_items[artifact2])) self.assertEqual(4, archive.items.count()) self.assertEqual( 2, ArchiveArtifact.objects.filter( projectbuild_dependency__projectbuild=projectbuild).count())
def test_artifact_get_url(self): """ ArchiveArtifact.get_url should return a valid URL for an artifact within the archive. """ project, dependency = self.create_dependencies() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="file1.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build) create_projectbuilds_for_autotracking(build) archive.add_build(build) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( "http://example.com/projects/%s" % item1.archived_path, item1.get_url()) self.assertEqual( "http://example.com/projects/%s" % item2.archived_path, item2.get_url())
def create_projectbuilds_for_autotracking(build): """ If we have have projects that are autotracking the dependency associated with this build, then we should create project builds for them. """ logging.debug("Autocreating projectbuilds for build %s", build) build_dependency = get_projectbuild_dependency_for_build(build) # At this point, we need to identify Projects which have this # dependency and create ProjectBuilds for them. for dependency in build.job.dependency_set.all(): logging.debug("Processing dependency %s", dependency) for project_dependency in dependency.projectdependency_set.filter( auto_track=True): logging.debug("Processing %s", project_dependency) if (build_dependency is not None and build_dependency.projectbuild.project == project_dependency.project): continue logging.debug(" autocreating projectbuild") # We have a Project with a an auto-tracked element. projectbuild = build_project( project_dependency.project, dependencies=None, queue_build=False, automated=True) projectbuild_dependency = projectbuild.dependencies.get( dependency=dependency) projectbuild_dependency.build = build projectbuild_dependency.save()
def test_build_project(self): """ build_project should create build dependencies for each of the project dependencies and schedule builds of each. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) with mock.patch("projects.helpers.build_job") as mock_build_job: new_build = build_project(project) self.assertIsInstance(new_build, ProjectBuild) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual([dependency1.pk, dependency2.pk], list( build_dependencies.values_list("dependency", flat=True))) mock_build_job.delay.assert_has_calls([ mock.call(dependency1.job.pk, build_id=new_build.build_key), mock.call(dependency2.job.pk, build_id=new_build.build_key) ])
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) projectbuild.phase == Build.FINALIZED projectbuild.save() build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual([dependency, dependency], sorted([ b.dependency for b in ProjectBuildDependency.objects.all() ])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_build_project(self): """ build_project should create build dependencies for each of the project dependencies and schedule builds of each. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) with mock.patch("projects.helpers.build_job") as mock_build_job: new_build = build_project(project) self.assertIsInstance(new_build, ProjectBuild) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual( [dependency1.pk, dependency2.pk], list(build_dependencies.values_list("dependency", flat=True))) mock_build_job.delay.assert_has_calls( [mock.call(dependency1.job.pk, build_id=new_build.build_key), mock.call(dependency2.job.pk, build_id=new_build.build_key)])
def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINALIZED builds for all the dependencies, the projectbuild state should be FINALIZED. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: build = BuildFactory.create(job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_build_project_automated_non_autotracked(self): """ For automated, non-autotracked builds, each of the projectbuild dependencies created should use the current build of the dependency. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() project_dep1 = ProjectDependency.objects.create( project=project, dependency=dependency1, current_build=BuildFactory.create()) dependency2 = DependencyFactory.create() project_dep2 = ProjectDependency.objects.create( project=project, dependency=dependency2, current_build=BuildFactory.create()) new_build = build_project(project, automated=True) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=new_build) self.assertEqual(2, build_dependencies.count()) self.assertEqual( [dependency1, dependency2], [x.dependency for x in build_dependencies]) self.assertEqual( [project_dep1.current_build, project_dep2.current_build], [x.build for x in build_dependencies])
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual( [dependency, dependency], sorted([b.dependency for b in ProjectBuildDependency.objects.all()])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINALIZED builds for all the dependencies, the projectbuild state should be FINALIZED. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: build = BuildFactory.create( job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter(projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path but should delegate to the default policy for builds without a projectbuild. """ project, dependency = self.create_dependencies() projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") policy = CdimageArchivePolicy() paths = [] for item in archived: projectbuild = (item.projectbuild_dependency and item.projectbuild_dependency.projectbuild or None) paths.append( policy.get_path_for_artifact(item.artifact, build=build, dependency=item.dependency, projectbuild=projectbuild)) self.assertEqual( "\n".join(paths), "\n".join(archived.values_list("archived_path", flat=True)))
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info( "Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual( [], list(archive.get_archived_artifacts_for_build(build)))
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def form_valid(self, form): project = form.cleaned_data["project"] projectbuild = build_project( project, user=self.request.user, dependencies=form.cleaned_data["dependencies"]) messages.add_message( self.request, messages.INFO, "Build '%s' queued." % projectbuild.build_id) url_args = {"project_pk": project.pk, "build_pk": projectbuild.pk} url = reverse("project_projectbuild_detail", kwargs=url_args) return HttpResponseRedirect(url)
def test_build_project_assigns_user_correctly(self): """ If we pass a user to build_project, the user is assigned as the user for the projectbuild. """ user = User.objects.create_user("testing") project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) new_build = build_project(project, user=user, queue_build=False) self.assertEqual(user, new_build.requested_by)
def process_project_dependency(build, dependency, project_dependency): """ Create a new projectbuild, without building the dependencies, and associate the projectbuild_dependency for the dependency associated with the build we're processing. """ logging.debug(" autocreating projectbuild") # We have a Project with a an auto-tracked element. projectbuild = build_project( project_dependency.project, dependencies=None, queue_build=False, automated=True) projectbuild_dependency = projectbuild.dependencies.get( dependency=dependency) projectbuild_dependency.build = build projectbuild_dependency.save()
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual([ "START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END" ], transport.log)
def test_build_project_with_dependency_with_parameters(self): """ build_project should create pass the parameters for a dependency to the build_job request. """ project = ProjectFactory.create() dependency = DependencyFactory.create(parameters="THISVALUE=mako") ProjectDependency.objects.create(project=project, dependency=dependency) with mock.patch("projects.helpers.build_job") as mock_build_job: new_build = build_project(project) self.assertIsInstance(new_build, ProjectBuild) mock_build_job.delay.assert_called_once_with( dependency.job.pk, build_id=new_build.build_key, params={"THISVALUE": "mako"} )
def process_project_dependency(build, dependency, project_dependency): """ Create a new projectbuild, without building the dependencies, and associate the projectbuild_dependency for the dependency associated with the build we're processing. """ logging.debug(" autocreating projectbuild") # We have a Project with a an auto-tracked element. projectbuild = build_project(project_dependency.project, dependencies=None, queue_build=False, automated=True) projectbuild_dependency = projectbuild.dependencies.get( dependency=dependency) projectbuild_dependency.build = build projectbuild_dependency.save()
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual( ["START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END"], transport.log)
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) item = [ x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency ][0] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END" ], transport.log)
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing1.txt") ArtifactFactory.create( build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch( "archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch( "archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_build_project_with_dependency_with_parameters(self): """ build_project should create pass the parameters for a dependency to the build_job request. """ project = ProjectFactory.create() dependency = DependencyFactory.create(parameters="THISVALUE=mako") ProjectDependency.objects.create(project=project, dependency=dependency) with mock.patch("projects.helpers.build_job") as mock_build_job: new_build = build_project(project) self.assertIsInstance(new_build, ProjectBuild) mock_build_job.delay.assert_called_once_with( dependency.job.pk, build_id=new_build.build_key, params={"THISVALUE": "mako"})
def test_build_url_with_projectbuild(self): """ build_url should return the url for a project build if the build_id corresponds to a ProjectBuild. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) expected_url = reverse( "project_projectbuild_detail", kwargs={"project_pk": project.pk, "build_pk": projectbuild.pk}) self.assertEqual(expected_url, build_url(build.build_id))
def test_archive_build_several_projectbuild_dependencies(self): """ If we archive a build that is used in several projectbuilds, then we should get multiple copies of the artifact. """ project1, dependency1, dependency2 = self.create_dependencies(2) project2 = ProjectFactory.create(name="Project 2") ProjectDependency.objects.create(project=project2, dependency=dependency1) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="file1.gz") artifact2 = ArtifactFactory.create(build=build2, filename="file2.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build1) create_projectbuilds_for_autotracking(build1) archive.add_build(build1) self.assertEqual(3, archive.items.count()) update_projectbuilds(build2) create_projectbuilds_for_autotracking(build2) archive.add_build(build2) self.assertEqual(5, archive.items.count()) artifacts = ArchiveArtifact.objects.all().order_by("archived_path") policy = CdimageArchivePolicy() self.assertEqual( "{dependency1}\n{dependency2}\n" "project-1/{build}/file1.gz\nproject-1/{build}/file2.gz\n" "project-2/{build}/file1.gz".format( dependency1=policy.get_path_for_artifact( artifact=artifact1, build=build1, dependency=dependency1), dependency2=policy.get_path_for_artifact( artifact=artifact2, build=build2, dependency=dependency2), build=projectbuild.build_id), "\n".join(artifacts.values_list("archived_path", flat=True)))
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing1.txt") ArtifactFactory.create(build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch("archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch("archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_build_with_projectbuild_dependencies(self): """ ProjectBuildDependencies should be tied to the newly created build. """ project1, dependency1, dependency2 = self.create_dependencies(2) project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency2) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) dependencies = ProjectBuildDependency.objects.all().order_by( "dependency__name") self.assertEqual( sorted([dependency1, dependency2], key=lambda x: x.name), [b.dependency for b in dependencies]) self.assertEqual([None, build1], sorted([b.build for b in dependencies]))
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_project_detail(self): """ The detail view should render the project. """ project = ProjectFactory.create() # TODO: Work out how to configure DjangoFactory to setup m2m through dependency = ProjectDependency.objects.create( project=project, dependency=DependencyFactory.create()) # TODO: It'd be nice if this was driven by ProjectBuildFactory. projectbuilds = [ build_project(project, queue_build=False) for x in range(6)] project_url = reverse("project_detail", kwargs={"pk": project.pk}) response = self.app.get(project_url, user="******") self.assertEqual(200, response.status_code) self.assertEqual(project, response.context["project"]) self.assertEqual([dependency], list(response.context["dependencies"])) self.assertEqual( sorted(projectbuilds[1:], key=lambda x: x.build_id, reverse=True), list(response.context["projectbuilds"]))
def test_can_be_archived_with_no_artifacts(self): """ A projectbuild with no artifacts can't be archived. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: BuildFactory.create(job=job, build_id=projectbuild.build_id, phase=Build.FINALIZED) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertFalse(projectbuild.can_be_archived)
def test_item_from_artifact_and_archived_artifact(self): """ Return an artifact or archived artifact in a standard format for display. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) # Create the artifact and check the display format artifact = ArtifactFactory.create(build=build, filename="file1.gz") artifact_item = ProjectDetailView.item_from_artifact(artifact) self.assertIsNotNone(artifact_item) self.assertTrue(isinstance(artifact_item, dict)) self.assertTrue("build_name" in artifact_item) self.assertTrue("filename" in artifact_item) self.assertTrue("url" in artifact_item) self.assertTrue("archived" in artifact_item) # Archive the artifact and check the display format process_build_dependencies(build.pk) archive = ArchiveFactory.create(policy="cdimage", default=True) items = [] for x in archive.add_build(build)[artifact]: if x.projectbuild_dependency: items.append(x) self.assertEquals(len(items), 1) archived_item = ProjectDetailView.item_from_archived_artifact(items[0]) self.assertIsNotNone(archived_item) self.assertTrue(isinstance(archived_item, dict)) self.assertTrue("build_name" in archived_item) self.assertTrue("filename" in archived_item) self.assertTrue("url" in archived_item) self.assertTrue("archived" in archived_item)