def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINALIZED builds for all the dependencies, the projectbuild state should be FINALIZED. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: build = BuildFactory.create(job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_archive_projectbuild(self): """ The archiver can handle archiving an entire project build. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") ArtifactFactory.create(build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(2, len(result))
def test_project_build_status_when_all_dependencies_have_builds(self): """ When we have FINALIZED builds for all the dependencies, the projectbuild state should be FINALIZED. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=self.project, dependency=dependency2) from projects.helpers import build_project projectbuild = build_project(self.project, queue_build=False) for job in [dependency1.job, dependency2.job]: build = BuildFactory.create( job=job, build_id=projectbuild.build_key, phase=Build.FINALIZED) process_build_dependencies(build.pk) projectbuild = ProjectBuild.objects.get(pk=projectbuild.pk) self.assertEqual("SUCCESS", projectbuild.status) self.assertEqual(Build.FINALIZED, projectbuild.phase) self.assertIsNotNone(projectbuild.ended_at)
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter(projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_auto_track_dependency_triggers_project_build_creation(self): """ If we record a build of a project dependency that is auto-tracked, then this should trigger the creation of a new ProjectBuild for that project. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() existing_build = BuildFactory.create(job=dependency2.job, phase="FINISHED") ProjectDependency.objects.create(project=self.project, dependency=dependency2, current_build=existing_build) self.assertEqual(0, ProjectBuild.objects.filter(project=self.project).count()) build = BuildFactory.create(job=dependency1.job, phase="FINISHED") process_build_dependencies(build.pk) self.assertEqual(1, ProjectBuild.objects.filter(project=self.project).count()) projectbuild = ProjectBuild.objects.get(project=self.project) self.assertEqual(2, ProjectBuildDependency.objects.filter(projectbuild=projectbuild).count()) build_dependency1 = ProjectBuildDependency.objects.get(projectbuild=projectbuild, dependency=dependency1) self.assertEqual(build, build_dependency1.build) build_dependency2 = ProjectBuildDependency.objects.get(projectbuild=projectbuild, dependency=dependency2) self.assertEqual(existing_build, build_dependency2.build)
def test_projectbuild_updates_when_build_created(self): """ If we have a ProjectBuild with a dependency, which is associated with a job, and we get a build from that job, then if the build_id is correct, we should associate the build dependency with that build. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency2) projectbuild = build_project(self.project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) build_dependencies = ProjectBuildDependency.objects.filter( projectbuild=projectbuild) self.assertEqual(2, build_dependencies.count()) dependency = build_dependencies.get(dependency=dependency1) self.assertEqual(build1, dependency.build) dependency = build_dependencies.get(dependency=dependency2) self.assertIsNone(dependency.build)
def test_archive_projectbuild_with_prearchived_artifact(self): """ If we archive a project build with several artifacts, it should return only the newly added artifacts. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") artifact = ArtifactFactory.create( build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact, projectbuild=projectbuild) result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(1, len(result))
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual([ "START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END" ], transport.log)
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log)
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info("Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual([], list(archive.get_archived_artifacts_for_build(build)))
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create(transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END" ], transport.log) self.assertIsNotNone(item.archived_at)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path is quite different. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create(policy="cdimage") archive.add_artifact( artifact, projectbuild=projectbuild) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual( "%s/%s/testing.gz" % ( slugify(project.name), projectbuild.build_id), archived.archived_path) self.assertIsNone(archived.archived_at)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info( "Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual( [], list(archive.get_archived_artifacts_for_build(build)))
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_archive_artifact_from_jenkins(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") items = archive.add_build(artifact.build) fakefile = StringIO(u"Artifact from Jenkins") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.return_value = fakefile archive_artifact_from_jenkins(items[artifact][0].pk) [item] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") self.assertEqual(21, item.archived_size)
def test_archive_artifact_from_jenkins_transport_lifecycle(self): """ archive_artifact_from_jenkins should get a transport, and copy the file to the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive.add_build(artifact.build) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertIsNone(item.archived_at) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) [item] = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log) self.assertIsNotNone(item.archived_at)
def test_archive_artifact_from_finalized_dependency_build(self): """ archive_artifact_from_jenkins should get a transport, and then call start, end and archive_artifact on the transport. the correct storage. """ archive = ArchiveFactory.create( transport="local", basedir=self.basedir) dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") [item] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "Make %s current" % item.archived_path, "END"], transport.log)
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual( ["START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END"], transport.log)
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) item = [ x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency ][0] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END" ], transport.log)
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) item = [x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency][0] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END"], transport.log)
def create_dependencies(self, count=1, name="Project 1"): """ Utility function to create projects and dependencies. """ project = ProjectFactory.create(name=name) dependencies = [project] for x in range(count): dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) dependencies.append(dependency) return dependencies
def create_dependencies(self, count=1, name="Project 1"): """ Utility function to create projects and dependencies. """ project = ProjectFactory.create(name=name) dependencies = [project] for x in range(count): dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) dependencies.append(dependency) return dependencies
def test_auto_track_dependency_triggers_project_build_creation(self): """ If we record a build of a project dependency that is auto-tracked, then this should trigger the creation of a new ProjectBuild for that project. """ dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=self.project, dependency=dependency1) dependency2 = DependencyFactory.create() existing_build = BuildFactory.create(job=dependency2.job, phase=Build.FINALIZED) ProjectDependency.objects.create(project=self.project, dependency=dependency2, current_build=existing_build) self.assertEqual( 0, ProjectBuild.objects.filter(project=self.project).count()) build = BuildFactory.create(job=dependency1.job, phase=Build.FINALIZED) process_build_dependencies(build.pk) self.assertEqual( 1, ProjectBuild.objects.filter(project=self.project).count()) projectbuild = ProjectBuild.objects.get(project=self.project) self.assertEqual( 2, ProjectBuildDependency.objects.filter( projectbuild=projectbuild).count()) build_dependency1 = ProjectBuildDependency.objects.get( projectbuild=projectbuild, dependency=dependency1) self.assertEqual(build, build_dependency1.build) build_dependency2 = ProjectBuildDependency.objects.get( projectbuild=projectbuild, dependency=dependency2) self.assertEqual(existing_build, build_dependency2.build)
def test_get_archived_artifact_artifact_not_in_archive(self): """ If the specified build is not recorded in the archive then we should get an empty set back. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) ArtifactFactory.create(build=build) archive = ArchiveFactory.create() self.assertEqual( 0, archive.get_archived_artifacts_for_build(build).count())
def test_build_dependency(self): """ We can build a dependency through the API. """ self.client.force_authenticate(user=self.user) dependency = DependencyFactory.create() url = reverse("dependency-build-dependency", kwargs={"pk": dependency.pk}) with mock.patch("projects.helpers.build_job") as build_job_mock: response = self.client.post(url) self.assertEqual(status.HTTP_202_ACCEPTED, response.status_code) build_job_mock.delay.assert_called_once_with(dependency.job.pk)
def test_build_dependency_already_building(self): """ If the Dependency appears to be already building, then we should """ self.client.force_authenticate(user=self.user) dependency = DependencyFactory.create() BuildFactory.create(job=dependency.job) url = reverse("dependency-build-dependency", kwargs={"pk": dependency.pk}) with mock.patch("projects.helpers.build_job") as build_job_mock: response = self.client.post(url) self.assertEqual(status.HTTP_202_ACCEPTED, response.status_code) self.assertFalse(build_job_mock.delay.called)
def test_get_archived_artifacts_for_build(self): """ We can fetch the artifacts that get added from a build. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create() archive.add_build(build) archived = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual([(artifact, build)], [(x.artifact, x.build) for x in archived])
def test_build_dependency(self): """ We can build a dependency through the API. """ self.client.force_authenticate(user=self.user) dependency = DependencyFactory.create() url = reverse("dependency-build-dependency", kwargs={"pk": dependency.pk}) with mock.patch("projects.helpers.build_job") as build_job_mock: response = self.client.post(url) self.assertEqual(status.HTTP_202_ACCEPTED, response.status_code) build_job_mock.delay.assert_called_once_with( dependency.job.pk)
def test_get_archived_artifacts_for_build(self): """ We can fetch the artifacts that get added from a build. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create() archive.add_build(build) archived = list(archive.get_archived_artifacts_for_build(build)) self.assertEqual( [(artifact, build)], [(x.artifact, x.build) for x in archived])
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing1.txt") ArtifactFactory.create( build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch( "archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch( "archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_build_url_with_projectbuild(self): """ build_url should return the url for a project build if the build_id corresponds to a ProjectBuild. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) expected_url = reverse( "project_projectbuild_detail", kwargs={"project_pk": project.pk, "build_pk": projectbuild.pk}) self.assertEqual(expected_url, build_url(build.build_id))
def test_build_url_with_projectbuild(self): """ build_url should return the url for a project build if the build_id corresponds to a ProjectBuild. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) expected_url = reverse( "project_projectbuild_detail", kwargs={"project_pk": project.pk, "build_pk": projectbuild.pk}) self.assertEqual(expected_url, build_url(build))
def test_get_path_for_artifact(self): """ The default archive policy should return the path from the artifact url. """ dependency = DependencyFactory.create(name="My Dependency") build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(filename="testing.img", build=build) policy = DefaultPolicy() timestamp = "%s-%s" % (build.created_at.strftime("%Y-%m-%d"), time.mktime(build.created_at.timetuple())) self.assertEqual( "my-dependency/%s/testing.img" % timestamp, policy.get_path_for_artifact(artifact, dependency=dependency, build=build))
def test_get_path_for_artifact(self): """ The default archive policy should return the path from the artifact url. """ dependency = DependencyFactory.create(name="My Dependency") build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create( filename="testing.img", build=build) policy = DefaultPolicy() timestamp = "%s-%s" % ( build.created_at.strftime("%Y-%m-%d"), time.mktime(build.created_at.timetuple())) self.assertEqual( "my-dependency/%s/testing.img" % timestamp, policy.get_path_for_artifact( artifact, dependency=dependency, build=build))
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing1.txt") ArtifactFactory.create(build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch("archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch("archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_new_build_with_no_auto_track_build(self): """ If we create a new build for a dependency of a Project, and the ProjectDependency is not set to auto_track then the current_build should not be updated. """ build1 = BuildFactory.create() dependency = DependencyFactory.create(job=build1.job) project_dependency = ProjectDependency.objects.create( project=self.project, dependency=dependency, auto_track=False) project_dependency.current_build = build1 project_dependency.save() build2 = BuildFactory.create(job=build1.job) process_build_dependencies(build2.pk) # Reload the project dependency project_dependency = ProjectDependency.objects.get( pk=project_dependency.pk) self.assertEqual(build1, project_dependency.current_build)
def test_auto_track_build(self): """ If we create a new build for a dependency of a Project, and the ProjectDependency is set to auto_track then the current_build should be updated to reflect the new build. """ build1 = BuildFactory.create() dependency = DependencyFactory.create(job=build1.job) project_dependency = ProjectDependency.objects.create(project=self.project, dependency=dependency) project_dependency.current_build = build1 project_dependency.save() build2 = BuildFactory.create(job=build1.job) result = process_build_dependencies(build2.pk) # Reload the project dependency project_dependency = ProjectDependency.objects.get(pk=project_dependency.pk) self.assertEqual(build2, project_dependency.current_build) self.assertEqual(build2.pk, result)
def test_new_build_with_no_auto_track_build(self): """ If we create a new build for a dependency of a Project, and the ProjectDependency is not set to auto_track then the current_build should not be updated. """ build1 = BuildFactory.create() dependency = DependencyFactory.create(job=build1.job) project_dependency = ProjectDependency.objects.create( project=self.project, dependency=dependency, auto_track=False ) project_dependency.current_build = build1 project_dependency.save() build2 = BuildFactory.create(job=build1.job) process_build_dependencies(build2.pk) # Reload the project dependency project_dependency = ProjectDependency.objects.get(pk=project_dependency.pk) self.assertEqual(build1, project_dependency.current_build)
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_auto_track_build(self): """ If we create a new build for a dependency of a Project, and the ProjectDependency is set to auto_track then the current_build should be updated to reflect the new build. """ build1 = BuildFactory.create() dependency = DependencyFactory.create(job=build1.job) project_dependency = ProjectDependency.objects.create( project=self.project, dependency=dependency) project_dependency.current_build = build1 project_dependency.save() build2 = BuildFactory.create(job=build1.job) result = process_build_dependencies(build2.pk) # Reload the project dependency project_dependency = ProjectDependency.objects.get( pk=project_dependency.pk) self.assertEqual(build2, project_dependency.current_build) self.assertEqual(build2.pk, result)
def test_get_path_for_artifact(self): """ The CdimageArchivePolicy should calculate a cdimage-like path using the project name and build id. url. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create( filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, projectbuild=projectbuild))
def test_get_path_for_artifact(self): """ The CdimageArchivePolicy should calculate a cdimage-like path using the project name and build id. url. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create(filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, dependency=dependency, projectbuild=projectbuild))
def test_get_path_for_artifact_with_dependency(self): """ If the cdimage archiver gets a dependency with no projectbuild, it should delegate to the DefaultPolicy to generate the archive filename. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create( filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact( artifact, dependency=dependency, projectbuild=projectbuild))
def test_cdimage_archiver_policy_with_only_dependency_build(self): """ If we only build a dependency with no project builds, then the cdimage archiver should delegate to the default policy for the name when generating the archive name for the dependency's artifacts. """ dependency = DependencyFactory.create() build = BuildFactory.create(job=dependency.job) artifact = ArtifactFactory.create(build=build, filename="testing.gz") update_projectbuilds(build) archive = ArchiveFactory.create(policy="cdimage") archive.add_build(build) archived = archive.get_archived_artifacts_for_build(build).order_by( "archived_path") name = DefaultPolicy().get_path_for_artifact(artifact, build=build, dependency=dependency) self.assertEqual( name, "\n".join(archived.values_list("archived_path", flat=True))) self.assertEqual([None], list(archived.values_list("archived_at", flat=True)))
def test_get_path_for_artifact_with_dependency(self): """ If the cdimage archiver gets a dependency with no projectbuild, it should delegate to the DefaultPolicy to generate the archive filename. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create(filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, dependency=dependency, projectbuild=projectbuild))