def test_api_documents_exist(self): """Test /api/document/documents_exist""" with factories.single_commit(): doc1 = factories.DocumentFileFactory(gdrive_id="123") doc1_id = doc1.id factories.DocumentFileFactory(gdrive_id="456") endpoint_uri = "/api/document/documents_exist" request_data1 = json.dumps(dict(gdrive_ids=["123", "456"])) response1 = self.api.client.post(endpoint_uri, data=request_data1, content_type="application/json") self.assertEquals(len(response1.json), 2) self.assertTrue(all([r["exists"] for r in response1.json])) request_data2 = json.dumps(dict(gdrive_ids=["123", "999"])) response2 = self.api.client.post(endpoint_uri, data=request_data2, content_type="application/json") self.assertEquals(len(response2.json), 2) existing = [obj for obj in response2.json if obj["exists"]] not_existing = [obj for obj in response2.json if not obj["exists"]] self.assertEquals(len(existing), 1) self.assertEquals(len(not_existing), 1) self.assertEquals(existing[0]["object"]["id"], doc1_id)
def test_add_to_parent_folder_relationship(self): """If parent has folder => add document to that folder mapped via rel""" method_to_patch = 'ggrc.gdrive.file_actions.add_gdrive_file_folder' with mock.patch(method_to_patch) as mocked: mocked.return_value = 'http://mega.doc' with factories.single_commit(): control = factories.ControlFactory(folder="gdrive_folder_id") control_id = control.id doc = factories.DocumentFileFactory( source_gdrive_id="source_gdrive_id", link='some link') doc_id = doc.id response = self.api.post( all_models.Relationship, { "relationship": { "source": { "id": control_id, "type": control.type }, "destination": { "id": doc_id, "type": doc.type }, "context": None }, }) self.assertStatus(response, 201) mocked.assert_called_with("source_gdrive_id", "gdrive_folder_id")
def test_document_make_admin_endpoint(self): """Test /api/document/make_admin endpoint should add current user as document admin """ _, editor = self.gen.generate_person(user_role="Creator") doc = factories.DocumentFileFactory(gdrive_id="123") doc_id = doc.id self.api.set_user(editor) request_data = json.dumps(dict(gdrive_ids=["123", "456"])) response = self.api.client.post("/api/document/make_admin", data=request_data, content_type="application/json") updated = [obj for obj in response.json if obj["updated"]] not_updated = [obj for obj in response.json if not obj["updated"]] self.assertEquals(len(updated), 1) self.assertEquals(updated[0]["object"]["id"], doc_id) self.assertEquals(len(not_updated), 1) doc = all_models.Document.query.filter_by(id=doc_id).one() self.assertEquals(len(doc.access_control_list), 1) control_user = all_models.Person.query.get(editor.id) self.assertIn(control_user.id, [person.id for person, acr in doc.access_control_list])
def test_filter_document_by_type(self, kind): """Test filter documents by document type.""" data = { all_models.Document.FILE: factories.DocumentFileFactory().id, all_models.Document.REFERENCE_URL: factories.DocumentReferenceUrlFactory().id, } query_request_data = [{ u'fields': [], u'filters': { u'expression': { u'left': u'kind', u'op': { u'name': u'=' }, u'right': kind, } }, u'limit': [0, 5], u'object_name': u'Document', u'permissions': u'read', u'type': u'values', }] resp = self.api.send_request(self.api.client.post, data=query_request_data, api_link="/query") self.assertEqual(1, resp.json[0]["Document"]["count"]) self.assertEqual(data[kind], resp.json[0]["Document"]["values"][0]["id"])
def test_parent_obj_validation_is_id_presents(self): """Validation parent_obj id should present.""" with self.assertRaises(exceptions.ValidationError): factories.DocumentFileFactory( parent_obj={ 'type': 'Control' })
def test_parent_obj_validation_is_type_presents(self): """Validation parent_obj type should present.""" control = factories.ControlFactory() with self.assertRaises(exceptions.ValidationError): factories.DocumentFileFactory( parent_obj={ 'id': control.id })
def test_get_parent_obj_control_type(self): """Test mapping parent of Control type""" control = factories.ControlFactory() document = factories.DocumentFileFactory(parent_obj={ 'id': control.id, 'type': 'Control' }) expected_control = document.related_objects( _types=[control.type]).pop() self.assertEqual(expected_control, control)
def test_import_assessment_with_doc_file_blank_multiple(self): """No warnings in Document Files mapping""" doc_file = "test_gdrive_url \n \n test_gdrive_url_2" with factories.single_commit(): control = factories.ControlFactory() control_slug = control.slug doc1 = factories.DocumentFileFactory(link="test_gdrive_url") factories.RelationshipFactory(source=control, destination=doc1) doc2 = factories.DocumentFileFactory(link="test_gdrive_url_2") factories.RelationshipFactory(source=control, destination=doc2) response = self.import_data( collections.OrderedDict([ ("object_type", "Control"), ("Code*", control_slug), ("Document File", doc_file), ])) self.assertEquals([], response[0]['row_warnings'])
def test_parent_obj_validation_wrong_type(self): """Validation parent_obj type. Type should be Documentable. """ audit = factories.AuditFactory() with self.assertRaises(exceptions.ValidationError): factories.DocumentFileFactory(parent_obj={ 'id': audit.id, 'type': 'Audit' })
def test_parent_obj_validation_wrong_type(self): """Validation parent_obj type. Type should be in 'Control', 'Issue', 'RiskAssessment'. """ control = factories.ControlFactory() with self.assertRaises(exceptions.ValidationError): factories.DocumentFileFactory(parent_obj={ 'id': control.id, 'type': 'Program' })
def test_add_file_to_gdrive_folder(self, factory_name): """Test add document file to {0:22}gdrive folder""" mock_path = "ggrc.gdrive.file_actions.add_gdrive_file_folder" with mock.patch(mock_path) as g_drive: g_drive.return_value = "magic_response" with factories.single_commit(): factory = factories.get_model_factory(factory_name) instance = factory(folder="correct_folder") factories.DocumentFileFactory(source_gdrive_id="correct_file", parent_obj={ "id": instance.id, "type": instance.type, }) g_drive.assert_called_with("correct_file", "correct_folder")
def test_add_to_parent_folder_not_specified(self): """If parent has not folder => just save gdrive link""" with mock.patch( 'ggrc.gdrive.file_actions.get_gdrive_file_link') as mocked: mocked.return_value = 'http://mega.doc' with factories.single_commit(): control = factories.ControlFactory() factories.DocumentFileFactory( source_gdrive_id="source_gdrive_id", parent_obj={ "id": control.id, "type": "Control" }) mocked.assert_called_with("source_gdrive_id")
def test_add_to_parent_folder(self): """If parent has folder => add document to that folder""" method_to_patch = 'ggrc.gdrive.file_actions.add_gdrive_file_folder' with mock.patch(method_to_patch) as mocked: mocked.return_value = 'http://mega.doc' with factories.single_commit(): control = factories.ControlFactory(folder="gdrive_folder_id") factories.DocumentFileFactory( source_gdrive_id="source_gdrive_id", parent_obj={ "id": control.id, "type": "Control" }) mocked.assert_called_with("source_gdrive_id", "gdrive_folder_id")
def test_import_control_with_doc_file_multiple(self): """Show warning if at least one of Document Files not mapped""" doc_url = "test_gdrive_url" with factories.single_commit(): control = factories.ControlFactory() control_slug = control.slug doc1 = factories.DocumentFileFactory(link=doc_url) factories.RelationshipFactory(source=control, destination=doc1) doc2 = factories.DocumentFileFactory(link="test_gdrive_url_2") factories.RelationshipFactory(source=control, destination=doc2) response = self.import_data( collections.OrderedDict([ ("object_type", "Control"), ("Code*", control_slug), ("Document File", doc_url + "\n another_gdrive_url"), ])) expected_warning = ( u"Line 3: 'Document File' can't be changed via import." u" Please go on {} page and make changes" u" manually. The column will be " u"skipped".format(control.type)) self.assertEquals([expected_warning], response[0]['row_warnings'])
def test_import_control_with_doc_file_existing(self): """If file already mapped to control not show warning to user""" doc_url = "test_gdrive_url" with factories.single_commit(): control = factories.ControlFactory() control_slug = control.slug doc = factories.DocumentFileFactory(link=doc_url) factories.RelationshipFactory(source=control, destination=doc) response = self.import_data( collections.OrderedDict([ ("object_type", "Control"), ("Code*", control_slug), ("Document File", doc_url), ])) self.assertEquals([], response[0]['row_warnings'])
class TestFullFilledModelExport(TestCase): """Test for export a full filled model""" # pylint: disable=undefined-variable FIELDS_FILLED_RULES = { 'archived': lambda **kwargs: set_attribute( kwargs['obj'], 'archived', False ), 'assertions': lambda **kwargs: set_attribute( kwargs['obj'], 'assertions', 'assertions' ), 'assessment_template': lambda **kwargs: set_attribute( kwargs['obj'], 'assessment_template', 'assessment_template' ), 'assessment_type': lambda **kwargs: set_attribute( kwargs['obj'], 'assessment_type', 'Control' ), 'audit': lambda **kwargs: TestFullFilledModelExport._map_object( source=kwargs['obj'], destination=kwargs['obj'].audit ), 'categories': lambda **kwargs: set_attribute( kwargs['obj'], 'categories', '["categories"]' ), 'comments': lambda **kwargs: TestFullFilledModelExport._map_object( source=kwargs['obj'], destination=factories.CommentFactory( description='description', assignee_type='Admin' ) ), 'company': lambda **kwargs: set_attribute( kwargs['obj'], 'company', 'company' ), 'component_id': lambda **kwargs: factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=kwargs['obj'], issue_id=123, issue_type="PROCESS", component_id=12345, hotlist_id=12345, issue_priority="P2", issue_severity="S2", issue_url="somelink", ), 'contact': lambda **kwargs: set_attribute( kwargs['obj'], 'contact', kwargs['user'] ), 'created_at': lambda **kwargs: set_attribute( kwargs['obj'], 'created_at', datetime.date(2019, 9, 24) ), 'created_by': lambda **kwargs: set_attribute( kwargs['obj'], 'created_by_id', kwargs['user'].id ), 'cycle': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle', 'cycle' ), 'cycle_task_group': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle_task_group', 'cycle_task_group' ), 'cycle_workflow': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle_workflow', 'cycle_workflow' ), 'default_assignees': lambda **kwargs: set_attribute( kwargs['obj'], 'default_assignees', kwargs['user'] ), 'default_verifier': lambda **kwargs: set_attribute( kwargs['obj'], 'default_verifier', kwargs['user'] ), 'directive': lambda **kwargs: set_attribute( kwargs['obj'], 'directive', 'directive' ), 'delete': lambda **kwargs: set_attribute( kwargs['obj'], 'delete', 'delete' ), 'description': lambda **kwargs: set_attribute( kwargs['obj'], 'description', 'description' ), 'design': lambda **kwargs: set_attribute( kwargs['obj'], 'design', 'Effective' ), 'documents_file': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.DocumentFileFactory(link='link'), destination=kwargs['obj'], ), 'documents_reference_url': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.DocumentReferenceUrlFactory(link='link'), destination=kwargs['obj'], ), 'due_date': lambda **kwargs: set_attribute( kwargs['obj'], 'due_date', datetime.date(2019, 9, 24) ), 'email': lambda **kwargs: set_attribute( kwargs['obj'], 'email', '*****@*****.**' ), 'enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'enabled', True ), 'end_date': lambda **kwargs: set_attribute( kwargs['obj'], 'end_date', datetime.date(2019, 9, 24) ), 'evidences_file': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.EvidenceFileFactory(), destination=kwargs['obj'], ), 'evidences_url': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.EvidenceUrlFactory(), destination=kwargs['obj'], ), 'finished_date': lambda **kwargs: set_attribute( kwargs['obj'], 'finished_date', datetime.date(2019, 9, 24) ), 'folder': lambda **kwargs: set_attribute( kwargs['obj'], 'folder', 'folder' ), 'fraud_related': lambda **kwargs: set_attribute( kwargs['obj'], 'fraud_related', True ), 'hotlist_id': lambda **kwargs: set_attribute( kwargs['obj'], 'hotlist_id', 'hotlist_id' ), 'is_verification_needed': lambda **kwargs: set_attribute( kwargs['obj'], 'is_verification_needed', True ), 'issue_priority': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_priority', 'issue_priority' ), 'issue_severity': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_severity', 'issue_severity' ), 'issue_title': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_title', 'issue_title' ), 'issue_tracker': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_tracker', 'issue_tracker' ), 'issue_type': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_type', 'issue_type' ), 'key_control': lambda **kwargs: set_attribute( kwargs['obj'], 'key_control', True ), 'kind': lambda **kwargs: set_attribute( kwargs['obj'], 'kind', all_models.Option.query.filter( all_models.Option == 'product_type' ).first() ), 'labels': lambda **kwargs: factories.ObjectLabelFactory( labeled_object=kwargs['obj'], label=factories.LabelFactory( object_type=kwargs['obj'].__tablename__ ), ), 'last_assessment_date': lambda **kwargs: TestFullFilledModelExport._create_attributes( kwargs['obj'], 1 ), 'last_comment': lambda **kwargs: TestFullFilledModelExport._create_attributes( kwargs['obj'], 3 ), 'last_deprecated_date': lambda **kwargs: set_attribute( kwargs['obj'], 'last_deprecated_date', datetime.date(2019, 9, 24) ), 'last_submitted_at': lambda **kwargs: set_attribute( kwargs['obj'], 'last_submitted_at', datetime.date(2019, 9, 24) ), 'last_submitted_by': lambda **kwargs: set_attribute( kwargs['obj'], 'last_submitted_by_id', kwargs['user'].id ), 'last_verified_at': lambda **kwargs: set_attribute( kwargs['obj'], 'last_verified_at', datetime.date(2019, 9, 24) ), 'last_verified_by': lambda **kwargs: set_attribute( kwargs['obj'], 'last_verified_by_id', kwargs['user'].id ), 'means': lambda **kwargs: set_attribute(kwargs['obj'], 'means', 'means'), 'modified_by': lambda **kwargs: set_attribute( kwargs['obj'], 'modified_by', kwargs['user'] ), 'name': lambda **kwargs: set_attribute(kwargs['obj'], 'name', 'name'), 'network_zone': lambda **kwargs: set_attribute( kwargs['obj'], 'network_zone', all_models.Option.query.filter( all_models.Option == 'network_zone' ).first() ), 'notes': lambda **kwargs: set_attribute(kwargs['obj'], 'notes', 'notes'), 'notify_custom_message': lambda **kwargs: set_attribute( kwargs['obj'], 'notify_custom_message', 'notify_custom_message' ), 'notify_on_change': lambda **kwargs: set_attribute( kwargs['obj'], 'notify_on_change', True ), 'operationally': lambda **kwargs: set_attribute( kwargs['obj'], 'operationally', 'Effective' ), 'people_sync_enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'people_sync_enabled', True ), 'procedure_description': lambda **kwargs: set_attribute( kwargs['obj'], 'procedure_description', 'procedure description' ), 'program': lambda **kwargs: set_attribute( kwargs['obj'], 'program', 'program' ), 'readonly': lambda **kwargs: set_attribute( kwargs['obj'], 'readonly', True ), 'recipients': lambda **kwargs: set_attribute( kwargs['obj'], 'recipients', 'recipients' ), 'repeat_every': lambda **kwargs: set_attribute( kwargs['obj'], 'repeat_every', 1 ), 'report_end_date': lambda **kwargs: set_attribute( kwargs['obj'], 'report_end_date', datetime.date(2019, 9, 24)), 'report_start_date': lambda **kwargs: set_attribute( kwargs['obj'], 'report_start_date', datetime.date(2019, 8, 20) ), 'review_status': lambda **kwargs: set_attribute( kwargs['obj'], 'review_status', 'review status' ), 'review_status_display_name': lambda **kwargs: set_attribute( kwargs['obj'], 'review_status_display_name', 'review status display name' ), 'reviewers': lambda **kwargs: TestFullFilledModelExport._create_acl( 'Reviewers', factories.ReviewFactory(reviewable=kwargs['obj']), kwargs['user'], ), 'risk_type': lambda **kwargs: set_attribute( kwargs['obj'], 'risk_type', 'risk_type' ), 'secondary_contact': lambda **kwargs: set_attribute( kwargs['obj'], 'secondary_contact', kwargs['user'] ), 'send_by_default': lambda **kwargs: set_attribute( kwargs['obj'], 'send_by_default', True ), 'slug': lambda **kwargs: set_attribute(kwargs['obj'], 'slug', 'slug'), 'sox_302_enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'sox_302_enabled', True ), 'start_date': lambda **kwargs: set_attribute( kwargs['obj'], 'start_date', datetime.date(2019, 8, 20) ), 'status': lambda **kwargs: set_attribute( kwargs['obj'], 'status', 'In Progress' ), 'task_group': lambda **kwargs: set_attribute( kwargs['obj'], 'task_group', 'task_group' ), 'task_type': lambda **kwargs: set_attribute( kwargs['obj'], 'task_type', 'text' ), 'template_custom_attributes': lambda **kwargs: set_attribute( kwargs['obj'], 'template_custom_attributes', 'adsasd' ), 'template_object_type': lambda **kwargs: set_attribute( kwargs['obj'], 'template_object_type', 'Objective' ), 'test_plan': lambda **kwargs: set_attribute( kwargs['obj'], 'test_plan', 'test_plan' ), 'test_plan_procedure': lambda **kwargs: set_attribute( kwargs['obj'], 'test_plan_procedure', True ), 'threat_event': lambda **kwargs: set_attribute( kwargs['obj'], 'threat_event', 'threat event' ), 'threat_source': lambda **kwargs: set_attribute( kwargs['obj'], 'threat_source', 'threat source' ), 'title': lambda **kwargs: set_attribute(kwargs['obj'], 'title', 'title'), 'unit': lambda **kwargs: set_attribute( kwargs['obj'], 'unit', all_models.Workflow.DAY_UNIT ), 'updated_at': lambda **kwargs: set_attribute( kwargs['obj'], 'updated_at', datetime.date(2019, 8, 20) ), 'user_role': lambda **kwargs: set_attribute( kwargs['obj'], 'user_role', 'user_role' ), 'verified_date': lambda **kwargs: set_attribute( kwargs['obj'], 'verified_date', datetime.date(2019, 9, 24) ), 'verify_frequency': lambda **kwargs: set_attribute( kwargs['obj'], 'verify_frequency', 'verify frequency' ), 'vulnerability': lambda **kwargs: set_attribute( kwargs['obj'], 'vulnerability', 'vulnerability' ), 'workflow': lambda **kwargs: set_attribute( kwargs['obj'], 'workflow', 'workflow' ), } @staticmethod def _create_attributes(obj, attribute_template_id): """Create attribute for object""" attr = all_models.Attributes( object_id=obj.id, object_type=obj.__class__.__name__, value_datetime=datetime.datetime(2019, 9, 26), value_string="last comment", attribute_template_id=attribute_template_id, updated_at=datetime.datetime.now(), created_at=datetime.datetime.now(), ) db.session.add(attr) db.session.commit() @staticmethod def _map_object(source, destination): """Create relationship with between two objects""" from ggrc.models import exceptions try: factories.RelationshipFactory(source=source, destination=destination) except exceptions.ValidationError: return def _map_snapshot(self, obj, destination): """Create relationship between object and his snapshot""" revision = self._get_latest_object_revisions([obj])[0] parent = destination if not isinstance(parent, all_models.Audit): parent = destination.audit snapshot = factories.SnapshotFactory( child_id=revision.resource_id, child_type=revision.resource_type, revision=revision, parent=parent, parent_id=parent.id, ) factories.RelationshipFactory(source=snapshot, destination=destination) @staticmethod def _create_acl(role, obj, user): """Propagate acl for obj""" ac_role = all_models.AccessControlRole.query.filter( all_models.AccessControlRole.name == role, all_models.AccessControlRole.object_type == obj.__class__.__name__, ).one() factories.AccessControlPersonFactory( ac_list=obj.acr_acl_map[ac_role], person=user, ) @staticmethod def _set_field(alias, obj, user): """Set field for model""" kwargs = {'obj': obj, 'user': user} set_attr = TestFullFilledModelExport.FIELDS_FILLED_RULES[alias] set_attr(**kwargs) @staticmethod def _get_aliases(model): """Get aliases for provided model""" return [c for c in import_helper.get_object_column_definitions(model)] def assert_full_filled_model(self, data): """Assert that all columns are filled for export Args: data: list of rows from csv table Raises: AssertionError: if not all require columns are filled for model """ errors = [] # these columns only for import ignore = ['Delete', 'Comments', 'Policy / Regulation / Standard / Contract', 'Template', 'Custom Attributes'] rows = csv.reader(StringIO(data)) rows = [r for r in rows][:3] for top, column_name, field_value in zip(*rows): if column_name.startswith('unmap:') or column_name in ignore: continue elif top == 'Object type': title = column_name continue if field_value == '': errors.append(column_name) self.assertEqual(errors, [], 'These columns for {0} are not exported: {1}'.format( title, ', '.join(errors))) def build_object(self, model): """Fill all fields in model""" errors = [] obj = factories.get_model_factory(model.__name__)() aliases = sorted(self._get_aliases(model)) for alias in aliases: if alias.startswith('__mapping__'): title = alias.split(':')[1] mapped_model = ''.join([part.title() for part in title.split()]) destination_obj = factories.get_model_factory(mapped_model)() self._map_object(source=obj, destination=destination_obj) elif alias.startswith('__snapshot_mapping__'): title = alias.split(':')[1] mapped_model = ''.join([part.title() for part in title.split()]) destination_obj = factories.get_model_factory(mapped_model)() self._map_snapshot(destination_obj, obj) elif alias.startswith('__acl__'): role = alias.split(':')[1] self._create_acl(role, obj, self.user) elif not alias.startswith('__'): try: self._set_field(alias, obj, self.user) except KeyError: errors.append(alias) else: continue db.session.commit() self.assertEqual(errors, [], 'These columns are not filled for model: {}. ' 'Need to add rule for these into ' 'FIELDS_FILLED_RULES'.format( ', '.join(errors))) return obj def setUp(self): super(TestFullFilledModelExport, self).setUp() self.api = api_helper.Api() self.client.get('/login') self.user = all_models.Person.query.filter( all_models.Person.email == "*****@*****.**" ).one() @ddt.data( *get_exportables() ) def test_full_filled_model_export(self, model): """Test export of {0.__name__} with all filled columns We defined dict with fillable fields from all models. So since we get some new column in model we need define it in `attrs_dict` Raises: AssertionError: 1. Raised when column isn't present in `attrs_dict`. So we need to add rules for this column to dict. 2. Raised when some columns are missed for export. So we need fix rules for columns in `attrs_dict`. """ model_name = model.__name__ obj = self.build_object(model) data = [{ "object_name": model_name, "filters": { "expression": { 'left': 'id', 'op': {'name': '='}, 'right': obj.id, } }, "fields": "all", }] response = self.export_csv(data) response_data = response.data self.assert_full_filled_model(response_data) self.assert200(response)