def _setup_document(self): """Crate and map document""" with factories.single_commit(): document = factories.DocumentReferenceUrlFactory() parent = db.session.query(self.parent.__class__).get(self.parent_id) factories.RelationshipFactory(source=parent, destination=document) return document.id
def test_filter_document_by_type(self, kind): """Test filter documents by document type.""" data = { all_models.Document.FILE: factories.DocumentFileFactory().id, all_models.Document.REFERENCE_URL: factories.DocumentReferenceUrlFactory().id, } query_request_data = [{ u'fields': [], u'filters': { u'expression': { u'left': u'kind', u'op': { u'name': u'=' }, u'right': kind, } }, u'limit': [0, 5], u'object_name': u'Document', u'permissions': u'read', u'type': u'values', }] resp = self.api.send_request(self.api.client.post, data=query_request_data, api_link="/query") self.assertEqual(1, resp.json[0]["Document"]["count"]) self.assertEqual(data[kind], resp.json[0]["Document"]["values"][0]["id"])
def map(self, document=None): """Map Document to parent object.""" parent = self.parent.__class__.query.get(self.parent_id) map_document = document if document \ else factories.DocumentReferenceUrlFactory() return self.objgen.generate_relationship(source=parent, destination=map_document)[0]
def setup_models(self, parent_name): """Setup document, parent, relationship""" with factories.single_commit(): document = factories.DocumentReferenceUrlFactory() self.document_id = document.id self.parent = self.build_parent(parent_name) self.parent_id = self.parent.id factories.RelationshipFactory(source=self.parent, destination=document) self.assign_person(self.parent, self.acr, self.user_id)
def test_document_ref_url_type_with_parent(self): """Document of REFERENCE_URL type mapped to parent if parent specified""" control = factories.ControlFactory() document = factories.DocumentReferenceUrlFactory( description='mega description', parent_obj={ 'id': control.id, 'type': 'Control' }) rel_evidences = control.related_objects(_types=[document.type]) self.assertEqual(document, rel_evidences.pop())
def test_reference_url(self): """If reference url is updated state should not updated""" with factories.single_commit(): control = factories.ControlFactory() doc = factories.DocumentReferenceUrlFactory( title="Simple title", link="some_url.com", description="mega description", parent_obj={ "id": control.id, "type": "Control" }) review = factories.ReviewFactory( status=all_models.Review.STATES.REVIEWED, reviewable=control) review_id = review.id self.api.modify_object(doc, {"link": "new_link.com"}) review = all_models.Review.query.get(review_id) self.assertEqual(review.status, all_models.Review.STATES.REVIEWED)
def test_import_control_with_doc_url_existing(self): """If reference url already mapped to control ignore it""" doc_reference_url = "test_reference_url" with factories.single_commit(): control = factories.ControlFactory() control_slug = control.slug doc = factories.DocumentReferenceUrlFactory(link=doc_reference_url) factories.RelationshipFactory(source=control, destination=doc) response = self.import_data( collections.OrderedDict([ ("object_type", "Control"), ("Code*", control_slug), ("Reference Url", doc_reference_url), ])) documents = all_models.Document.query.filter_by( link=doc_reference_url).all() self.assertEquals(1, len(documents)) self.assertEquals([], response[0]['row_warnings'])
def test_changing_log_on_doc_change(self): """Changing object documents should generate new object revision.""" url_link = u"www.foo.com" with factories.single_commit(): control = factories.ControlFactory() url = factories.DocumentReferenceUrlFactory(link=url_link) def get_revisions(): return all_models.Revision.query.filter( all_models.Revision.resource_id == control.id, all_models.Revision.resource_type == control.type, ).order_by(all_models.Revision.id.desc()).all() # attach an url to a control revisions = get_revisions() count = len(revisions) response = self.client.post(self.REL_URL, data=self.build_relationship_json( control, url), headers=self.HEADERS) self.assert200(response) relationship = all_models.Relationship.query.get( response.json[0][-1]["relationship"]["id"]) # check if a revision was created and contains the attached url revisions = get_revisions() self.assertEqual(count + 1, len(revisions)) url_list = revisions[0].content.get("documents_reference_url") or [] self.assertEqual(1, len(url_list)) self.assertIn("link", url_list[0]) self.assertEqual(url_link, url_list[0]["link"]) # now test whether a new revision is created when url is unmapped self.assert200(self.api.delete(relationship)) revisions = get_revisions() self.assertEqual(count + 2, len(revisions)) url_list = revisions[0].content.get("documents_reference_url") or [] self.assertEqual(url_list, [])
def test_update_reference_url(self): """Reference Url updated properly via import""" doc_url = "test_gdrive_url" with factories.single_commit(): control1 = factories.ControlFactory() control1_slug = control1.slug control2 = factories.ControlFactory() doc = factories.DocumentReferenceUrlFactory(link=doc_url) factories.RelationshipFactory(source=control1, destination=doc) factories.RelationshipFactory(source=control2, destination=doc) self.import_data( collections.OrderedDict([ ("object_type", "Control"), ("Code*", control1_slug), ("Reference Url", "new_gdrive_url"), ])) control1 = all_models.Control.query.filter_by(slug=control1_slug).one() self.assertEquals(1, len(control1.documents_reference_url)) self.assertEquals("new_gdrive_url", control1.documents_reference_url[0].link)
def test_search_by_reference_url(self): """Test search audit related snapshots of control type by reference_url""" expected_ref_url = "xxx" with factories.single_commit(): audit = factories.AuditFactory() audit_id = audit.id doc1 = factories.DocumentReferenceUrlFactory( link=expected_ref_url, title=expected_ref_url) doc_id1 = doc1.id doc2 = factories.DocumentReferenceUrlFactory(link="yyy", title="yyy") doc_id2 = doc2.id control = factories.ControlFactory() control_id = control.id response = self.api.post( all_models.Relationship, { "relationship": { "source": { "id": control_id, "type": control.type }, "destination": { "id": doc_id1, "type": doc1.type }, "context": None }, }) self.assertStatus(response, 201) response = self.api.post( all_models.Relationship, { "relationship": { "source": { "id": control_id, "type": control.type }, "destination": { "id": doc_id2, "type": doc2.type }, "context": None }, }) self.assertStatus(response, 201) response = self.api.post( all_models.Relationship, { "relationship": { "source": { "id": control_id, "type": control.type }, "destination": { "id": audit_id, "type": audit.type }, "context": None }, }) self.assertStatus(response, 201) query_request_data = [{ "object_name": "Snapshot", "filters": { "expression": { "left": { "left": "child_type", "op": { "name": "=" }, "right": "Control" }, "op": { "name": "AND" }, "right": { "left": { "object_name": "Audit", "op": { "name": "relevant" }, "ids": [audit_id] }, "op": { "name": "AND" }, "right": { "left": { "left": "Reference URL", "op": { "name": "~" }, "right": expected_ref_url }, "op": { "name": "AND" }, "right": { "left": "Status", "op": { "name": "IN" }, "right": ["Active", "Draft", "Deprecated"] } } } } }, }] response = self.api.send_request(self.api.client.post, data=query_request_data, api_link="/query") self.assert200(response) self.assertEquals(1, response.json[0]["Snapshot"]["count"])
class TestFullFilledModelExport(TestCase): """Test for export a full filled model""" # pylint: disable=undefined-variable FIELDS_FILLED_RULES = { 'archived': lambda **kwargs: set_attribute( kwargs['obj'], 'archived', False ), 'assertions': lambda **kwargs: set_attribute( kwargs['obj'], 'assertions', 'assertions' ), 'assessment_template': lambda **kwargs: set_attribute( kwargs['obj'], 'assessment_template', 'assessment_template' ), 'assessment_type': lambda **kwargs: set_attribute( kwargs['obj'], 'assessment_type', 'Control' ), 'audit': lambda **kwargs: TestFullFilledModelExport._map_object( source=kwargs['obj'], destination=kwargs['obj'].audit ), 'categories': lambda **kwargs: set_attribute( kwargs['obj'], 'categories', '["categories"]' ), 'comments': lambda **kwargs: TestFullFilledModelExport._map_object( source=kwargs['obj'], destination=factories.CommentFactory( description='description', assignee_type='Admin' ) ), 'company': lambda **kwargs: set_attribute( kwargs['obj'], 'company', 'company' ), 'component_id': lambda **kwargs: factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=kwargs['obj'], issue_id=123, issue_type="PROCESS", component_id=12345, hotlist_id=12345, issue_priority="P2", issue_severity="S2", issue_url="somelink", ), 'contact': lambda **kwargs: set_attribute( kwargs['obj'], 'contact', kwargs['user'] ), 'created_at': lambda **kwargs: set_attribute( kwargs['obj'], 'created_at', datetime.date(2019, 9, 24) ), 'created_by': lambda **kwargs: set_attribute( kwargs['obj'], 'created_by_id', kwargs['user'].id ), 'cycle': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle', 'cycle' ), 'cycle_task_group': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle_task_group', 'cycle_task_group' ), 'cycle_workflow': lambda **kwargs: set_attribute( kwargs['obj'], 'cycle_workflow', 'cycle_workflow' ), 'default_assignees': lambda **kwargs: set_attribute( kwargs['obj'], 'default_assignees', kwargs['user'] ), 'default_verifier': lambda **kwargs: set_attribute( kwargs['obj'], 'default_verifier', kwargs['user'] ), 'directive': lambda **kwargs: set_attribute( kwargs['obj'], 'directive', 'directive' ), 'delete': lambda **kwargs: set_attribute( kwargs['obj'], 'delete', 'delete' ), 'description': lambda **kwargs: set_attribute( kwargs['obj'], 'description', 'description' ), 'design': lambda **kwargs: set_attribute( kwargs['obj'], 'design', 'Effective' ), 'documents_file': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.DocumentFileFactory(link='link'), destination=kwargs['obj'], ), 'documents_reference_url': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.DocumentReferenceUrlFactory(link='link'), destination=kwargs['obj'], ), 'due_date': lambda **kwargs: set_attribute( kwargs['obj'], 'due_date', datetime.date(2019, 9, 24) ), 'email': lambda **kwargs: set_attribute( kwargs['obj'], 'email', '*****@*****.**' ), 'enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'enabled', True ), 'end_date': lambda **kwargs: set_attribute( kwargs['obj'], 'end_date', datetime.date(2019, 9, 24) ), 'evidences_file': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.EvidenceFileFactory(), destination=kwargs['obj'], ), 'evidences_url': lambda **kwargs: TestFullFilledModelExport._map_object( source=factories.EvidenceUrlFactory(), destination=kwargs['obj'], ), 'finished_date': lambda **kwargs: set_attribute( kwargs['obj'], 'finished_date', datetime.date(2019, 9, 24) ), 'folder': lambda **kwargs: set_attribute( kwargs['obj'], 'folder', 'folder' ), 'fraud_related': lambda **kwargs: set_attribute( kwargs['obj'], 'fraud_related', True ), 'hotlist_id': lambda **kwargs: set_attribute( kwargs['obj'], 'hotlist_id', 'hotlist_id' ), 'is_verification_needed': lambda **kwargs: set_attribute( kwargs['obj'], 'is_verification_needed', True ), 'issue_priority': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_priority', 'issue_priority' ), 'issue_severity': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_severity', 'issue_severity' ), 'issue_title': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_title', 'issue_title' ), 'issue_tracker': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_tracker', 'issue_tracker' ), 'issue_type': lambda **kwargs: set_attribute( kwargs['obj'], 'issue_type', 'issue_type' ), 'key_control': lambda **kwargs: set_attribute( kwargs['obj'], 'key_control', True ), 'kind': lambda **kwargs: set_attribute( kwargs['obj'], 'kind', all_models.Option.query.filter( all_models.Option == 'product_type' ).first() ), 'labels': lambda **kwargs: factories.ObjectLabelFactory( labeled_object=kwargs['obj'], label=factories.LabelFactory( object_type=kwargs['obj'].__tablename__ ), ), 'last_assessment_date': lambda **kwargs: TestFullFilledModelExport._create_attributes( kwargs['obj'], 1 ), 'last_comment': lambda **kwargs: TestFullFilledModelExport._create_attributes( kwargs['obj'], 3 ), 'last_deprecated_date': lambda **kwargs: set_attribute( kwargs['obj'], 'last_deprecated_date', datetime.date(2019, 9, 24) ), 'last_submitted_at': lambda **kwargs: set_attribute( kwargs['obj'], 'last_submitted_at', datetime.date(2019, 9, 24) ), 'last_submitted_by': lambda **kwargs: set_attribute( kwargs['obj'], 'last_submitted_by_id', kwargs['user'].id ), 'last_verified_at': lambda **kwargs: set_attribute( kwargs['obj'], 'last_verified_at', datetime.date(2019, 9, 24) ), 'last_verified_by': lambda **kwargs: set_attribute( kwargs['obj'], 'last_verified_by_id', kwargs['user'].id ), 'means': lambda **kwargs: set_attribute(kwargs['obj'], 'means', 'means'), 'modified_by': lambda **kwargs: set_attribute( kwargs['obj'], 'modified_by', kwargs['user'] ), 'name': lambda **kwargs: set_attribute(kwargs['obj'], 'name', 'name'), 'network_zone': lambda **kwargs: set_attribute( kwargs['obj'], 'network_zone', all_models.Option.query.filter( all_models.Option == 'network_zone' ).first() ), 'notes': lambda **kwargs: set_attribute(kwargs['obj'], 'notes', 'notes'), 'notify_custom_message': lambda **kwargs: set_attribute( kwargs['obj'], 'notify_custom_message', 'notify_custom_message' ), 'notify_on_change': lambda **kwargs: set_attribute( kwargs['obj'], 'notify_on_change', True ), 'operationally': lambda **kwargs: set_attribute( kwargs['obj'], 'operationally', 'Effective' ), 'people_sync_enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'people_sync_enabled', True ), 'procedure_description': lambda **kwargs: set_attribute( kwargs['obj'], 'procedure_description', 'procedure description' ), 'program': lambda **kwargs: set_attribute( kwargs['obj'], 'program', 'program' ), 'readonly': lambda **kwargs: set_attribute( kwargs['obj'], 'readonly', True ), 'recipients': lambda **kwargs: set_attribute( kwargs['obj'], 'recipients', 'recipients' ), 'repeat_every': lambda **kwargs: set_attribute( kwargs['obj'], 'repeat_every', 1 ), 'report_end_date': lambda **kwargs: set_attribute( kwargs['obj'], 'report_end_date', datetime.date(2019, 9, 24)), 'report_start_date': lambda **kwargs: set_attribute( kwargs['obj'], 'report_start_date', datetime.date(2019, 8, 20) ), 'review_status': lambda **kwargs: set_attribute( kwargs['obj'], 'review_status', 'review status' ), 'review_status_display_name': lambda **kwargs: set_attribute( kwargs['obj'], 'review_status_display_name', 'review status display name' ), 'reviewers': lambda **kwargs: TestFullFilledModelExport._create_acl( 'Reviewers', factories.ReviewFactory(reviewable=kwargs['obj']), kwargs['user'], ), 'risk_type': lambda **kwargs: set_attribute( kwargs['obj'], 'risk_type', 'risk_type' ), 'secondary_contact': lambda **kwargs: set_attribute( kwargs['obj'], 'secondary_contact', kwargs['user'] ), 'send_by_default': lambda **kwargs: set_attribute( kwargs['obj'], 'send_by_default', True ), 'slug': lambda **kwargs: set_attribute(kwargs['obj'], 'slug', 'slug'), 'sox_302_enabled': lambda **kwargs: set_attribute( kwargs['obj'], 'sox_302_enabled', True ), 'start_date': lambda **kwargs: set_attribute( kwargs['obj'], 'start_date', datetime.date(2019, 8, 20) ), 'status': lambda **kwargs: set_attribute( kwargs['obj'], 'status', 'In Progress' ), 'task_group': lambda **kwargs: set_attribute( kwargs['obj'], 'task_group', 'task_group' ), 'task_type': lambda **kwargs: set_attribute( kwargs['obj'], 'task_type', 'text' ), 'template_custom_attributes': lambda **kwargs: set_attribute( kwargs['obj'], 'template_custom_attributes', 'adsasd' ), 'template_object_type': lambda **kwargs: set_attribute( kwargs['obj'], 'template_object_type', 'Objective' ), 'test_plan': lambda **kwargs: set_attribute( kwargs['obj'], 'test_plan', 'test_plan' ), 'test_plan_procedure': lambda **kwargs: set_attribute( kwargs['obj'], 'test_plan_procedure', True ), 'threat_event': lambda **kwargs: set_attribute( kwargs['obj'], 'threat_event', 'threat event' ), 'threat_source': lambda **kwargs: set_attribute( kwargs['obj'], 'threat_source', 'threat source' ), 'title': lambda **kwargs: set_attribute(kwargs['obj'], 'title', 'title'), 'unit': lambda **kwargs: set_attribute( kwargs['obj'], 'unit', all_models.Workflow.DAY_UNIT ), 'updated_at': lambda **kwargs: set_attribute( kwargs['obj'], 'updated_at', datetime.date(2019, 8, 20) ), 'user_role': lambda **kwargs: set_attribute( kwargs['obj'], 'user_role', 'user_role' ), 'verified_date': lambda **kwargs: set_attribute( kwargs['obj'], 'verified_date', datetime.date(2019, 9, 24) ), 'verify_frequency': lambda **kwargs: set_attribute( kwargs['obj'], 'verify_frequency', 'verify frequency' ), 'vulnerability': lambda **kwargs: set_attribute( kwargs['obj'], 'vulnerability', 'vulnerability' ), 'workflow': lambda **kwargs: set_attribute( kwargs['obj'], 'workflow', 'workflow' ), } @staticmethod def _create_attributes(obj, attribute_template_id): """Create attribute for object""" attr = all_models.Attributes( object_id=obj.id, object_type=obj.__class__.__name__, value_datetime=datetime.datetime(2019, 9, 26), value_string="last comment", attribute_template_id=attribute_template_id, updated_at=datetime.datetime.now(), created_at=datetime.datetime.now(), ) db.session.add(attr) db.session.commit() @staticmethod def _map_object(source, destination): """Create relationship with between two objects""" from ggrc.models import exceptions try: factories.RelationshipFactory(source=source, destination=destination) except exceptions.ValidationError: return def _map_snapshot(self, obj, destination): """Create relationship between object and his snapshot""" revision = self._get_latest_object_revisions([obj])[0] parent = destination if not isinstance(parent, all_models.Audit): parent = destination.audit snapshot = factories.SnapshotFactory( child_id=revision.resource_id, child_type=revision.resource_type, revision=revision, parent=parent, parent_id=parent.id, ) factories.RelationshipFactory(source=snapshot, destination=destination) @staticmethod def _create_acl(role, obj, user): """Propagate acl for obj""" ac_role = all_models.AccessControlRole.query.filter( all_models.AccessControlRole.name == role, all_models.AccessControlRole.object_type == obj.__class__.__name__, ).one() factories.AccessControlPersonFactory( ac_list=obj.acr_acl_map[ac_role], person=user, ) @staticmethod def _set_field(alias, obj, user): """Set field for model""" kwargs = {'obj': obj, 'user': user} set_attr = TestFullFilledModelExport.FIELDS_FILLED_RULES[alias] set_attr(**kwargs) @staticmethod def _get_aliases(model): """Get aliases for provided model""" return [c for c in import_helper.get_object_column_definitions(model)] def assert_full_filled_model(self, data): """Assert that all columns are filled for export Args: data: list of rows from csv table Raises: AssertionError: if not all require columns are filled for model """ errors = [] # these columns only for import ignore = ['Delete', 'Comments', 'Policy / Regulation / Standard / Contract', 'Template', 'Custom Attributes'] rows = csv.reader(StringIO(data)) rows = [r for r in rows][:3] for top, column_name, field_value in zip(*rows): if column_name.startswith('unmap:') or column_name in ignore: continue elif top == 'Object type': title = column_name continue if field_value == '': errors.append(column_name) self.assertEqual(errors, [], 'These columns for {0} are not exported: {1}'.format( title, ', '.join(errors))) def build_object(self, model): """Fill all fields in model""" errors = [] obj = factories.get_model_factory(model.__name__)() aliases = sorted(self._get_aliases(model)) for alias in aliases: if alias.startswith('__mapping__'): title = alias.split(':')[1] mapped_model = ''.join([part.title() for part in title.split()]) destination_obj = factories.get_model_factory(mapped_model)() self._map_object(source=obj, destination=destination_obj) elif alias.startswith('__snapshot_mapping__'): title = alias.split(':')[1] mapped_model = ''.join([part.title() for part in title.split()]) destination_obj = factories.get_model_factory(mapped_model)() self._map_snapshot(destination_obj, obj) elif alias.startswith('__acl__'): role = alias.split(':')[1] self._create_acl(role, obj, self.user) elif not alias.startswith('__'): try: self._set_field(alias, obj, self.user) except KeyError: errors.append(alias) else: continue db.session.commit() self.assertEqual(errors, [], 'These columns are not filled for model: {}. ' 'Need to add rule for these into ' 'FIELDS_FILLED_RULES'.format( ', '.join(errors))) return obj def setUp(self): super(TestFullFilledModelExport, self).setUp() self.api = api_helper.Api() self.client.get('/login') self.user = all_models.Person.query.filter( all_models.Person.email == "*****@*****.**" ).one() @ddt.data( *get_exportables() ) def test_full_filled_model_export(self, model): """Test export of {0.__name__} with all filled columns We defined dict with fillable fields from all models. So since we get some new column in model we need define it in `attrs_dict` Raises: AssertionError: 1. Raised when column isn't present in `attrs_dict`. So we need to add rules for this column to dict. 2. Raised when some columns are missed for export. So we need fix rules for columns in `attrs_dict`. """ model_name = model.__name__ obj = self.build_object(model) data = [{ "object_name": model_name, "filters": { "expression": { 'left': 'id', 'op': {'name': '='}, 'right': obj.id, } }, "fields": "all", }] response = self.export_csv(data) response_data = response.data self.assert_full_filled_model(response_data) self.assert200(response)