def test_new_project_no_permissions(): user, organization, client = setup() assert login(client, user) assert len(user.get_projects()) == 0 response = client.post( path=reverse('projects:new_project'), data={ }, ) assert response.status_code == HTTP_403_FORBIDDEN project = ProjectFactory(organization=organization) project.add_manage(user) assert len(user.get_projects()) == 1 response = client.post( path=reverse('projects:new_project'), data={ 'name': 'Test Project', 'description': 'Test Description', 'organization': organization.pk }, ) assert response.status_code == HTTP_403_FORBIDDEN assert len(user.get_projects()) == 1
def setUp(self): self.test_instances = [ ProjectFactory(title="Boring Project"), ProjectFactory(title="Exciting Project"), ProjectFactory(title="Other Project"), ] self.response = self.client.get('/proj/')
def setUp(self): self.test_instance = ProjectFactory() self.fields = ['title', 'description'] self.templates = ['base.html', 'projects/project.html'] # Add some users self.admin_user = UserFactory() self.test_instance.administrators.add(self.admin_user) self.outside_user = UserFactory() self.nominator = UserFactory() self.test_instance.nominators.add(self.nominator) self.blacklisted_user = UserFactory() self.test_instance.nominator_blacklist.add(self.blacklisted_user) # Add some nominations self.user_nominations = { 'facebook.com': ('user1', 'user3'), 'linkedin.com': ('user2', 'user3'), 'myspace.com': ('user3'), 'twitter.com': ('user1', 'user2', 'user3'), 'ucla.edu': ('user1', 'user3'), } for url, users in self.user_nominations.items(): nom = NominationFactory( project=self.test_instance, resource=ResourceFactory(url=url), ) for username in users: nom.endorsements.add(UserFactory(username=username)) self.client.logout() self.test_response = self.client.get( self.test_instance.get_absolute_url() )
def setUp(self): self.project = ProjectFactory() self.project.save() # make some users self.admin_user = UserFactory() self.project.administrators.add(self.admin_user) # get a default response self.url = self.project.get_edit_url() self.client.force_login(self.admin_user) self.response = self.client.get(self.url)
def test_create_new_project(self): project = ProjectFactory() new_project_data = { 'name': project.name, 'description': project.description, 'start_date': project.start_date, 'end_date': project.end_date, 'price': project.price, 'company': project.company.pk } project.delete() response = self.client.post(reverse('project:create'), new_project_data) self.assertEqual(response.status_code, 302) self.assertEqual(new_project_data['name'], Project.objects.last().name)
def test_archive_projectbuild_with_prearchived_artifact(self): """ If we archive a project build with several artifacts, it should return only the newly added artifacts. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") artifact = ArtifactFactory.create( build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() archive.add_artifact(artifact, projectbuild=projectbuild) result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(1, len(result))
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) projectbuild.phase == Build.FINALIZED projectbuild.save() build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual([dependency, dependency], sorted([ b.dependency for b in ProjectBuildDependency.objects.all() ])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_interaction_with_filter_by_company(self): company_1 = CompanyFactory() company_2 = CompanyFactory() for _ in range(2): InteractionFactory(project=ProjectFactory(company=company_1)) InteractionFactory(project=ProjectFactory(company=company_2)) response = self.client.get(f"{reverse('interaction:list')}?company={company_1.pk}") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['interaction_list'].count(), 2) response = self.client.get(f"{reverse('interaction:list')}?company={company_2.pk}") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['interaction_list'].count(), 1)
def test_link_artifact_in_archive(self): """ The link_artifact_in_archive task should use the transport to link the specified artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) build = BuildFactory.create(job=dependency.job, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] item1.archived_size = 1000 item1.save() transport = mock.Mock(spec=LocalTransport) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) transport.link_filename_to_filename.assert_called_once_with( item1.archived_path, item2.archived_path) transport.link_to_current.assert_called_once_with(item2.archived_path) item1 = ArchiveArtifact.objects.get(pk=item1.pk) self.assertEqual(1000, item1.archived_size)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info("Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual([], list(archive.get_archived_artifacts_for_build(build)))
def test_process_build_artifacts(self): """ process_build_artifacts is chained from the Jenkins postbuild processing, it should arrange for the artifacts for the provided build to be archived in the default archive. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact from Jenkins") process_build_artifacts(build.pk) [item1, item2] = list(archive.get_archived_artifacts_for_build(build)) filename = os.path.join(self.basedir, item1.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins") filename = os.path.join(self.basedir, item2.archived_path) self.assertEqual(file(filename).read(), "Artifact from Jenkins")
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_interaction_with_filter_by_project(self): project_1 = ProjectFactory() project_2 = ProjectFactory() for _ in range(2): InteractionFactory(project=project_1) InteractionFactory(project=project_2) response = self.client.get(f"{reverse('interaction:list')}?project={project_1.pk}") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['interaction_list'].count(), 2) response = self.client.get(f"{reverse('interaction:list')}?project={project_2.pk}") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['interaction_list'].count(), 1)
def test_cdimage_archiver_policy(self): """ If we use the cdimage policy, then the file path is quite different. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) artifact = ArtifactFactory.create(build=build, filename="testing.gz") archive = ArchiveFactory.create(policy="cdimage") archive.add_artifact( artifact, projectbuild=projectbuild) archived = archive.get_archived_artifact(artifact) self.assertEqual(artifact, archived.artifact) self.assertEqual( "%s/%s/testing.gz" % ( slugify(project.name), projectbuild.build_id), archived.archived_path) self.assertIsNone(archived.archived_at)
def test_process_build_artifacts_with_no_default_archive(self): """ If we have no default archive, we should log the fact that we can't automatically archive artifacts. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=False) with mock.patch("archives.tasks.logging") as mock_logging: result = process_build_artifacts.delay(build.pk) # We must return the build.pk for further chained calls to work. self.assertEqual(build.pk, result.get()) mock_logging.assert_has_calls([ mock.call.info( "Processing build artifacts from build %s %d", build, build.number), mock.call.info( "No default archiver - build not automatically archived.") ]) self.assertEqual( [], list(archive.get_archived_artifacts_for_build(build)))
def test_archive_projectbuild(self): """ The archiver can handle archiving an entire project build. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) ArtifactFactory.create(build=build1, filename="artifact1.gz") ArtifactFactory.create(build=build2, filename="artifact2.gz") archive = ArchiveFactory.create() result = archive.archive_projectbuild(projectbuild) self.assertEqual(2, archive.items.count()) self.assertEqual(2, len(result))
def setup(): user = UserFactory.create() organization = OrganizationFactory.create() project = ProjectFactory.create(organization=organization) document = DocumentFactory.create(project=project) return user, organization, project, document, Client()
def test_build_with_several_projectbuild_dependencies(self): """ A build of dependency that's autotracked by several projects should trigger creation of all projectbuilds correctly. """ project1, dependency = self.create_dependencies() project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency) projectbuild = build_project(project1, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) process_build_dependencies(build.pk) self.assertEqual( [dependency, dependency], sorted([b.dependency for b in ProjectBuildDependency.objects.all()])) self.assertEqual( [build, build], sorted([b.build for b in ProjectBuildDependency.objects.all()]))
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create(build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_generate_checksums(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) archived_artifact = ArchiveArtifact.objects.create( build=build, archive=archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): generate_checksums(build.pk) self.assertEqual( ["START", "Checksums generated for %s" % archived_artifact, "END"], transport.log)
def test_generate_checksums(self): """ generate_checksums should send commands to the ssh client to generate an sha256sum for the passed in archived artifact. """ # a project with a build and an archived artifact project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) projectbuild_dependency = ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) artifact = ArtifactFactory.create( build=build, filename="artifact_filename") archived_artifact = ArchiveArtifact.objects.create( build=build, archive=self.archive, artifact=artifact, archived_path="/srv/builds/200101.01/artifact_filename", projectbuild_dependency=projectbuild_dependency) transport = SshTransport(self.archive) with mock.patch.object(transport, "_run_command") as mock_run: transport.generate_checksums(archived_artifact) mock_run.assert_called_once_with( "cd `dirname /var/tmp/srv/builds/200101.01/artifact_filename` " "&& sha256sum artifact_filename >> SHA256SUMS")
def test_user_has_document_permissions(): user = UserFactory.create() organization = OrganizationFactory.create() project = ProjectFactory.create(organization=organization) document = DocumentFactory.create(project=project) document.add_create(user) assert len(user.get_organizations()) == 1 assert len(user.get_projects()) == 1 assert len(user.get_documents()) == 1 assert organization.can_manage(user) == False assert organization.can_invite(user) == False assert organization.can_create(user) == False assert project.can_invite(user) == False assert project.can_create(user) == False assert project.can_manage(user) == False assert document.can_create(user) == True assert document.can_invite(user) == False assert document.can_manage(user) == False document.delete_create(user) document.add_invite(user) assert document.can_create(user) == True assert document.can_invite(user) == True assert document.can_manage(user) == False document.delete_invite(user) document.add_manage(user) assert document.can_create(user) == True assert document.can_invite(user) == True assert document.can_manage(user) == True
def test_generate_checksums_no_transport(self): """ generate_checksums should call the generate_checksums method on the transport from the archive with the build to generate the checksums for. If there is no default archive, a checksum cannot be calculated and there should be an early exit. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") # No archive defined transport = LoggingTransport(None) # Mock the logger with mock.patch.object(logging, "info", return_value=None) as mock_log: return_value = generate_checksums(build.pk) self.assertEqual([], transport.log) self.assertEqual(build.pk, return_value) mock_log.assert_called_once_with( "No default archiver - no checksum to generate")
def test_owner_access_private(self): """Test owner access private""" user = self._create_user() ProjectFactory(owner=user, is_private=True, name='test') self._create_tasks(is_private=True, allowed_users=[user.id]) response = self.api_client.get(self.url) data = self.deserialize(response) data['meta']['total_count'].should.be.equal(20)
def test_projects_list_is_not_empty(self): for _ in range(3): ProjectFactory() response = self.client.get(reverse('project:list')) self.assertEqual(response.status_code, 200) self.assertNotContains(response, "Список проектов пуст.") self.assertEqual(response.context['project_list'].count(), 3)
def test_remove_hook_if_need(self): """Test remove hook if need""" project = ProjectFactory.create(run_here=False) site = Site.objects.get_current() url = 'https://{}/api/v1/nodes/hook/'.format(site.domain) hook = MagicMock(config={'url': url}) project.repo.get_hooks.return_value = [hook] project.save() hook.delete.call_count.should.be.equal(1)
def test_create_task(self): """Test create node task""" ProjectFactory( name='octokitty/testing', run_here=True, is_enabled=True, ) response = self.api_client.post(self._url, data=self._data) response.status_code.should.be.equal(201) NodeTask.objects.count().should.be.equal(1)
def test_other_user_cant_access_private(self): """Test other user cant access private""" user = self._create_user() ProjectFactory( owner=user, is_private=True, name='test', ) self._create_tasks(is_private=True) response = self.api_client.get(self.url) data = self.deserialize(response) data['meta']['total_count'].should.be.equal(0)
def test_user_has_superuser(): user = UserFactory.create(is_superuser=True) organization = OrganizationFactory.create() project = ProjectFactory.create(organization=organization) DocumentFactory.create(project=project) UserFactory.create() assert len(user.get_organizations()) == 1 assert len(user.get_projects()) == 1 assert len(user.get_documents()) == 1 assert len(user.get_users()) == 2
def create_dependencies(self, count=1, name="Project 1"): """ Utility function to create projects and dependencies. """ project = ProjectFactory.create(name=name) dependencies = [project] for x in range(count): dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) dependencies.append(dependency) return dependencies
def create_dependencies(self, count=1, name="Project 1"): """ Utility function to create projects and dependencies. """ project = ProjectFactory.create(name=name) dependencies = [project] for x in range(count): dependency = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency) dependencies.append(dependency) return dependencies
def test_organization_member_access_private(self): """Test organization member access private""" user = self._create_user() project = ProjectFactory( owner=user, is_private=True, name='test', organization__users=[user], ) self._create_tasks( is_private=True, organization=project.organization.id, ) response = self.api_client.get(self.url) data = self.deserialize(response) data['meta']['total_count'].should.be.equal(20)
def test_user_has_no_permissions(): user = UserFactory.create() # Create some objects that the user should not be able to see organization = OrganizationFactory.create() project = ProjectFactory.create(organization=organization) DocumentFactory.create(project=project) UserFactory.create() assert len(user.get_organizations()) == 0 assert len(user.get_projects()) == 0 assert len(user.get_documents()) == 0 assert len(user.get_users()) == 0
def setUp(self): self.factory = APIRequestFactory() self.user = User.objects.create_user(username='******', email='*****@*****.**', password='******') self.user.is_superuser = True self.user.save() self.token = Token.objects.create(user=self.user) self.token.save() self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) for i in range(10): ProjectFactory()
def test_fail_with_wrong_project(self): """Test fail with wrong project""" task_id = Tasks.save({ 'project': 'test', 'service': { 'name': 'token', 'token': ProjectFactory().token, } }) data = Tasks.find_one(task_id) with LogCapture() as log_capture: token_service(data).should.be.none list(log_capture.actual())[0].should.contain('ERROR') Tasks.find({}).count().should.be.equal(0)
def _create_task(self): """Create task""" ProjectFactory(name='test') data = { 'service': { 'name': 'dummy', }, 'project': 'test', 'branch': 'develop', 'commit': 'asdfg', 'violations': [ {'name': 'dummy', 'raw': '1'}, ] } jobs.create_task(models.Tasks.save(data))
def test_new_project_no_permissions(): user, organization, client = setup() assert login(client, user) assert len(user.get_projects()) == 0 response = client.post( path=reverse('projects:new_project'), data={}, ) assert response.status_code == HTTP_403_FORBIDDEN project = ProjectFactory(organization=organization) project.add_manage(user) assert len(user.get_projects()) == 1 response = client.post( path=reverse('projects:new_project'), data={ 'name': 'Test Project', 'description': 'Test Description', 'organization': organization.pk }, ) assert response.status_code == HTTP_403_FORBIDDEN assert len(user.get_projects()) == 1
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create(job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create(build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual([ "START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END" ], transport.log)
def test_comment(self): """Test comment""" ProjectFactory(name='test') task = { 'project': 'test', 'pull_request_id': 2, 'violations': [ {'name': 'dummy', 'preview': 'rew'}, {'name': 'dummy!!!', 'raw': 'rwww'}, {'name': 'dummy', 'raw': 'row'}, ], 'commit': {'hash': 'test'}, } jobs.comment_pull_request.when\ .called_with(models.Tasks.save(task))\ .should_not.throw(Exception)
def test_archive_artifact_from_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) artifact = ArtifactFactory.create( build=build2, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build1.pk) process_build_dependencies(build2.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) [item1, item2] = archive.add_build(artifact.build)[artifact] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): link_artifact_in_archive(item1.pk, item2.pk) # Both builds are complete, we expect this to be made the current # build. self.assertEqual( ["START", "Link %s to %s" % (item1.archived_path, item2.archived_path), "Make %s current" % item2.archived_path, "END"], transport.log)
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create(project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create(job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create(build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create(build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create(transport="local", basedir=self.basedir, default=True) item = [ x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency ][0] transport = LoggingTransport(archive) with mock.patch.object(Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual([ "START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END" ], transport.log)
def test_process_build_artifacts_with_multiple_artifacts(self): """ All the artifacts should be individually linked. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key) ArtifactFactory.create( build=build, filename="testing/testing1.txt") ArtifactFactory.create( build=build, filename="testing/testing2.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True, policy="cdimage") with mock.patch("archives.transports.urllib2") as urllib2_mock: urllib2_mock.urlopen.side_effect = lambda x: StringIO( u"Artifact %s") with mock.patch( "archives.tasks.archive_artifact_from_jenkins" ) as archive_task: with mock.patch( "archives.tasks.link_artifact_in_archive" ) as link_task: process_build_artifacts(build.pk) [item1, item2, item3, item4] = list( archive.get_archived_artifacts_for_build(build).order_by( "artifact")) self.assertEqual( [mock.call(item4.pk), mock.call(item2.pk)], archive_task.si.call_args_list) self.assertEqual( [mock.call(item4.pk, item3.pk), mock.call(item2.pk, item1.pk)], link_task.si.call_args_list)
def test_build_url_with_projectbuild(self): """ build_url should return the url for a project build if the build_id corresponds to a ProjectBuild. """ project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) expected_url = reverse( "project_projectbuild_detail", kwargs={"project_pk": project.pk, "build_pk": projectbuild.pk}) self.assertEqual(expected_url, build_url(build))
def test_archive_build_several_projectbuild_dependencies(self): """ If we archive a build that is used in several projectbuilds, then we should get multiple copies of the artifact. """ project1, dependency1, dependency2 = self.create_dependencies( 2) project2 = ProjectFactory.create(name="Project 2") ProjectDependency.objects.create( project=project2, dependency=dependency1) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) build2 = BuildFactory.create( job=dependency2.job, build_id=projectbuild.build_key) artifact1 = ArtifactFactory.create(build=build1, filename="file1.gz") artifact2 = ArtifactFactory.create(build=build2, filename="file2.gz") archive = ArchiveFactory.create(policy="cdimage") update_projectbuilds(build1) create_projectbuilds_for_autotracking(build1) archive.add_build(build1) self.assertEqual(3, archive.items.count()) update_projectbuilds(build2) create_projectbuilds_for_autotracking(build2) archive.add_build(build2) self.assertEqual(5, archive.items.count()) artifacts = ArchiveArtifact.objects.all().order_by("archived_path") policy = CdimageArchivePolicy() self.assertEqual( "{dependency1}\n{dependency2}\n" "project-1/{build}/file1.gz\nproject-1/{build}/file2.gz\n" "project-2/{build}/file1.gz".format( dependency1=policy.get_path_for_artifact( artifact=artifact1, build=build1, dependency=dependency1), dependency2=policy.get_path_for_artifact( artifact=artifact2, build=build2, dependency=dependency2), build=projectbuild.build_id), "\n".join(artifacts.values_list("archived_path", flat=True)))
def test_archive_artifact_from_non_finalized_projectbuild(self): """ If the build is complete, and the item being archived is in a FINALIZED ProjectBuild, it should use the transport to set the current directory correctly. """ project = ProjectFactory.create() dependency1 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency1) dependency2 = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency2) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key, phase=Build.FINALIZED) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency1) artifact = ArtifactFactory.create( build=build, filename="testing/testing.txt") # We need to ensure that the artifacts are all connected up. process_build_dependencies(build.pk) archive = ArchiveFactory.create( transport="local", basedir=self.basedir, default=True) item = [x for x in archive.add_build(artifact.build)[artifact] if x.projectbuild_dependency][0] transport = LoggingTransport(archive) with mock.patch.object( Archive, "get_transport", return_value=transport): archive_artifact_from_jenkins(item.pk) self.assertEqual( ["START", "%s -> %s root:testing" % (artifact.url, item.archived_path), "END"], transport.log)
def create_build_data(self, use_requested_by=True, email=None): """ Create the test data for a build. """ if use_requested_by: user = User.objects.create_user("testing", email=email) else: user = None project = ProjectFactory.create() dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_key, requested_by=user) ProjectBuildDependency.objects.create( build=build, projectbuild=projectbuild, dependency=dependency) ArtifactFactory.create(build=build, filename="testing/testing.txt") return projectbuild, build
def test_build_with_projectbuild_dependencies(self): """ ProjectBuildDependencies should be tied to the newly created build. """ project1, dependency1, dependency2 = self.create_dependencies(2) project2 = ProjectFactory.create() ProjectDependency.objects.create(project=project2, dependency=dependency2) projectbuild = build_project(project1, queue_build=False) build1 = BuildFactory.create( job=dependency1.job, build_id=projectbuild.build_key) process_build_dependencies(build1.pk) dependencies = ProjectBuildDependency.objects.all().order_by( "dependency__name") self.assertEqual( sorted([dependency1, dependency2], key=lambda x: x.name), [b.dependency for b in dependencies]) self.assertEqual( [None, build1], sorted([b.build for b in dependencies]))
def test_get_path_for_artifact(self): """ The CdimageArchivePolicy should calculate a cdimage-like path using the project name and build id. url. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create( filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact(artifact, projectbuild=projectbuild))
def test_get_path_for_artifact_with_dependency(self): """ If the cdimage archiver gets a dependency with no projectbuild, it should delegate to the DefaultPolicy to generate the archive filename. """ project = ProjectFactory.create(name="My Test Project") dependency = DependencyFactory.create() ProjectDependency.objects.create( project=project, dependency=dependency) from projects.helpers import build_project projectbuild = build_project(project, queue_build=False) build = BuildFactory.create( job=dependency.job, build_id=projectbuild.build_id) artifact = ArtifactFactory.create( filename="thing.txt", build=build) policy = CdimageArchivePolicy() self.assertEqual( "%s/%s/thing.txt" % ("my-test-project", build.build_id), policy.get_path_for_artifact( artifact, dependency=dependency, projectbuild=projectbuild))
def setUp(self): AccountFactory.create(tags=('python',)) ProjectFactory.create_batch(2, tags=('python', 'golang'))
def test_not_create_duplicated_keys_on_save(self): """Test not create duplicated keys on save""" project = ProjectFactory.create(run_here=True, is_enabled=True) project.save() models.ProjectKeys.objects.filter(project=project).count()\ .should.be.equal(1)
def test_add_hook_if_need(self): """Test add hook if need""" project = ProjectFactory.create(run_here=True, is_enabled=True) project.repo.create_hook.call_count.should.be.equal(1)
def setUp(self): self.project = ProjectFactory.create()