def construct_mapping(self, node, deep=False): """Overrides default mapper with a version that detects duplicate keys.""" if not isinstance(node, yaml.nodes.MappingNode): raise yaml.constructor.ConstructorError( None, None, 'Expected a mapping node, but found %s' % node.id, node.start_mark) mapping = {} for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) try: hash(key) except TypeError: m1 = node.start_mark m2 = key_node.start_mark ctx = findings_lib.FileContext(m1.name, m1.line, m2.line) raise ParseError(findings_lib.BadKeyError(key, ctx)) # check for duplicate keys if key in mapping: m1 = node.start_mark m2 = key_node.start_mark ctx = findings_lib.FileContext(m1.name, m1.line, m2.line) raise ParseError(findings_lib.DuplicateKeyError(key, ctx)) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
def testFindingsHandlesDeduplication(self): high = findings_lib.Finding('high', findings_lib.FileContext('f'), 2, 2, 2, 'key', False) med = findings_lib.Finding('med', findings_lib.FileContext('f'), 3, 2, 2, 'key', False) low = findings_lib.Finding('low', findings_lib.FileContext('f'), 3, 3, 2, 'key', False) bottom = findings_lib.Finding('bottom', findings_lib.FileContext('f'), 3, 3, 3, 'key', False) master = findings_lib.Finding('master', findings_lib.FileContext('f'), 3, 3, 3, 'key', True) # Check each paired sort, working from lowest to highest self.findings_class.AddFinding(master) self.findings_class.AddFinding(bottom) findings = self.findings_class.GetFindings() self.assertLen(findings, 1) self.assertEqual('master', findings[0].message) self.findings_class.AddFinding(low) self.findings_class.AddFinding(bottom) findings = self.findings_class.GetFindings() self.assertLen(findings, 1) self.assertEqual('low', findings[0].message) self.findings_class.AddFinding(med) self.findings_class.AddFinding(low) findings = self.findings_class.GetFindings() self.assertLen(findings, 1) self.assertEqual('med', findings[0].message) self.findings_class.AddFinding(high) self.findings_class.AddFinding(med) findings = self.findings_class.GetFindings() self.assertLen(findings, 1) self.assertEqual('high', findings[0].message)
def testOrganizeFindingsByFile(self): finding1 = findings_lib.Finding( 'error1', findings_lib.FileContext(filepath='path1')) finding2 = findings_lib.Finding( 'error2', findings_lib.FileContext(filepath='path1')) finding3 = findings_lib.Finding( 'error3', findings_lib.FileContext(filepath='path2')) finding4 = findings_lib.Finding( 'error4', findings_lib.FileContext(filepath='path3')) findings_list = [finding1, finding2, finding3, finding4] findings_map = presubmit_validate_types_lib.OrganizeFindingsByFile( findings_list) self.assertLen(findings_map, 3) path1_list = findings_map.get('path1') path2_list = findings_map.get('path2') path3_list = findings_map.get('path3') self.assertLen(path1_list, 2) self.assertIn(finding1, path1_list) self.assertIn(finding2, path1_list) self.assertLen(path2_list, 1) self.assertIn(finding3, path2_list) self.assertLen(path3_list, 1) self.assertIn(finding4, path3_list)
def testFindingsAddMultiple(self): self.findings_class.AddFinding( findings_lib.ValidationError('message', findings_lib.FileContext('filepath'))) self.findings_class.AddFinding( findings_lib.ValidationWarning( 'message', findings_lib.FileContext('filepath'))) findings = self.findings_class.GetFindings() self.assertLen(findings, 2) self.assertFalse(self.findings_class.IsValid())
def testFindingsAddBadList(self): bad_list = [ findings_lib.ValidationError('message', findings_lib.FileContext('filepath')), 'some string', findings_lib.ValidationWarning( 'message', findings_lib.FileContext('filepath')) ] with self.assertRaises(TypeError): self.findings_class.AddFindings(bad_list)
def testFindingsAddGoodList(self): good_list = [ findings_lib.ValidationError('message', findings_lib.FileContext('filepath')), findings_lib.ValidationWarning( 'message', findings_lib.FileContext('filepath')) ] self.findings_class.AddFindings(good_list) findings = self.findings_class.GetFindings() self.assertLen(findings, 2) self.assertFalse(self.findings_class.IsValid())
def testCreateSubfieldNoDescription(self): ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sf = subfield_lib.Subfield('good', subfield_lib.SubfieldCategory.DESCRIPTOR, '', ctx) self.assertIsInstance(sf.GetFindings()[0], findings_lib.MissingSubfieldDescriptionWarning)
def testConnectionUniverseGetFindings(self): context = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) folder = connection_lib.ConnectionFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding( findings_lib.DuplicateConnectionDefinitionError( connection_lib.Connection('FEEDS'), base_lib.GLOBAL_NAMESPACE)) connection = connection_lib.Connection('FEEDS', 'description') connection.AddFinding( findings_lib.MissingConnectionDescriptionWarning(connection)) namespace.InsertConnection(connection) connection_universe = connection_lib.ConnectionUniverse([folder]) findings = connection_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( connection_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.DuplicateConnectionDefinitionError, findings_lib.MissingConnectionDescriptionWarning ])) self.assertFalse(connection_universe.IsValid())
def testAddValidSubfield(self): sff = subfield_lib.SubfieldFolder(_GOOD_PATH) ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sff.AddSubfield( subfield_lib.Subfield('good', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx)) self.assertIn('good', sff.local_namespace.subfields)
def testFindingsAddValidationWarning(self): self.findings_class.AddFinding( findings_lib.ValidationWarning( 'message', findings_lib.FileContext('filepath'))) findings = self.findings_class.GetFindings() self.assertLen(findings, 1) self.assertTrue(self.findings_class.IsValid())
def testCreateSubfieldIllegalName(self): ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sf = subfield_lib.Subfield('goo-1d', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx) self.assertIsInstance(sf.GetFindings()[0], findings_lib.InvalidSubfieldNameError)
def testFieldsUniverseGetFindings(self): context = findings_lib.FileContext(_GOOD_PATH + '/file.yaml') folder = field_lib.FieldFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding( findings_lib.UnrecognizedSubfieldError(['any'], field_lib.Field( 'two', file_context=context))) field = field_lib.Field('one', file_context=context) # Currently there are no warnings for fields, so using a subfield warning field.AddFinding( findings_lib.MissingSubfieldDescriptionWarning('one', context)) namespace.InsertField(field) fields_universe = field_lib.FieldUniverse([folder]) findings = fields_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( fields_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.UnrecognizedSubfieldError, findings_lib.MissingSubfieldDescriptionWarning ])) self.assertFalse(fields_universe.IsValid())
def testSubfieldUniverseGetFindings(self): context = findings_lib.FileContext(_GOOD_PATH + '/file.yaml') folder = subfield_lib.SubfieldFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding( findings_lib.DuplicateSubfieldDefinitionError( namespace, subfield_lib.Subfield( 'two', subfield_lib.SubfieldCategory.POINT_TYPE), context)) subfield_one = subfield_lib.Subfield( 'one', subfield_lib.SubfieldCategory.POINT_TYPE, 'thing') subfield_one.AddFinding( findings_lib.MissingSubfieldDescriptionWarning('one', context)) namespace.InsertSubfield(subfield_one) subfields_universe = subfield_lib.SubfieldUniverse([folder]) findings = subfields_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( subfields_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.DuplicateSubfieldDefinitionError, findings_lib.MissingSubfieldDescriptionWarning ])) self.assertFalse(subfields_universe.IsValid())
def testEntityTypeUniverseGetFindings(self): filepath = _GOOD_PATH + '/file.yaml' context = findings_lib.FileContext(filepath) folder = entity_type_lib.EntityTypeFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding(findings_lib.IllegalCharacterError( 'two', context)) # This will generate a MissingDescriptionWarning on itself entity_type = entity_type_lib.EntityType(typename='one', filepath=filepath) namespace.InsertType(entity_type) types_universe = entity_type_lib.EntityTypeUniverse([folder]) findings = types_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( types_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.IllegalCharacterError, findings_lib.MissingDescriptionWarning ])) self.assertFalse(types_universe.IsValid())
def AddFromConfig(self, documents, config_filename): """Reads the list of extracted yaml documents and adds all ontology items found. Method checks that config_filename is a path in the correct folder. Valid items are added to the appropriate namespace objects. Findings are saved on objects if found. Errors do not halt processing. Args: documents: output of a yaml safe_load_all() config_filename: relative path to the yaml file from config root Raises: RuntimeError: if the path is not valid for the component type """ context = findings_lib.FileContext(config_filename) if documents is None: self.AddFinding(findings_lib.EmptyFileWarning(context)) return # Validate that the ontology item is from the correct folder if not self._IsYamlUnderThisFolder(config_filename): self.AddFinding( findings_lib.InconsistentFileLocationError( self._folderpath + r'/*.yaml', context)) return for document in documents: self._AddFromConfigHelper(document, context)
def testUnitUniverseGetFindings(self): context = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) folder = unit_lib.UnitFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding( findings_lib.DuplicateUnitDefinitionError( unit_lib.Unit('unit', 'measurement'), 'namespace')) unit = unit_lib.Unit('unit', 'measurement') unit.AddFinding( findings_lib.UnknownUnitTagError(unit.name, 'tag', context)) namespace.InsertUnit(unit) unit_universe = unit_lib.UnitUniverse([folder]) findings = unit_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( unit_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.DuplicateUnitDefinitionError, findings_lib.UnknownUnitTagError, ])) self.assertFalse(unit_universe.IsValid())
def testConfigUniverse(self): context = findings_lib.FileContext('') type_universe = entity_type_lib.EntityTypeUniverse([]) type_universe.AddFinding( findings_lib.IllegalCharacterError('stuff', context)) field_universe = field_lib.FieldUniverse([]) field_universe.AddFinding( findings_lib.InconsistentFileLocationError('', context)) subfield_universe = subfield_lib.SubfieldUniverse([]) subfield_universe.AddFinding( findings_lib.CapitalizationError('Hi', context)) state_universe = state_lib.StateUniverse([]) connection_universe = connection_lib.ConnectionUniverse([]) connection_universe.AddFinding( findings_lib.InvalidConnectionNamespaceError('notglobal', context)) unit_universe = unit_lib.UnitUniverse([]) config_universe = presubmit_validate_types_lib.ConfigUniverse( subfield_universe=subfield_universe, field_universe=field_universe, entity_type_universe=type_universe, state_universe=state_universe, connection_universe=connection_universe, unit_universe=unit_universe) findings = config_universe.GetFindings() self.assertLen(findings, 4) self.assertTrue( config_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.IllegalCharacterError, findings_lib.CapitalizationError, findings_lib.InvalidConnectionNamespaceError ])) self.assertFalse(config_universe.IsValid())
def testConfigUniverseGetEntityType(self): context = findings_lib.FileContext('') type_universe = entity_type_lib.EntityTypeUniverse([]) type_universe.AddFinding( findings_lib.IllegalCharacterError('stuff', context)) field_universe = field_lib.FieldUniverse([]) field_universe.AddFinding( findings_lib.InconsistentFileLocationError('', context)) subfield_universe = subfield_lib.SubfieldUniverse([]) subfield_universe.AddFinding( findings_lib.CapitalizationError('Hi', context)) state_universe = state_lib.StateUniverse([]) connection_universe = connection_lib.ConnectionUniverse([]) unit_universe = unit_lib.UnitUniverse([]) config_universe = presubmit_validate_types_lib.ConfigUniverse( subfield_universe=subfield_universe, field_universe=field_universe, entity_type_universe=type_universe, state_universe=state_universe, connection_universe=connection_universe, unit_universe=unit_universe) entity_type = config_universe.GetEntityType('NONEXISTENT', 'NONEXISTENT') self.assertIsNone(entity_type)
def testAddInvalidSubfieldFails(self): sff = subfield_lib.SubfieldFolder(_GOOD_PATH) ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sff.AddSubfield( subfield_lib.Subfield('1-bad', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx)) self.assertIsInstance(sff.GetFindings()[0], findings_lib.IllegalCharacterError)
def testAddSubfieldWithUpperFails(self): sff = subfield_lib.SubfieldFolder(_GOOD_PATH) ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sf = subfield_lib.Subfield('gOod', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx) sff.AddSubfield(sf) self.assertIsInstance(sff.GetFindings()[0], findings_lib.InvalidSubfieldNameError)
def testAddDuplicateSubfieldFails(self): sff = subfield_lib.SubfieldFolder(_GOOD_PATH) ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sf = subfield_lib.Subfield('good', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx) sf2 = subfield_lib.Subfield( 'good', subfield_lib.SubfieldCategory.POINT_TYPE, 'hi2', ctx) sff.AddSubfield(sf) self.assertEmpty(sff.local_namespace.GetFindings()) sff.AddSubfield(sf2) self.assertIsInstance(sff.local_namespace.GetFindings()[0], findings_lib.DuplicateSubfieldDefinitionError)
def testAddFieldRejectsDuplicateSubfields(self): folder = field_lib.FieldFolder('/fields') field = 'field_name' field_dup = 'field_field_name' rel_filepath = '/fields/f.yaml' context = findings_lib.FileContext(rel_filepath) folder.AddField(field_lib.Field(field, file_context=context)) self.assertEmpty(folder.GetFindings()) folder.AddField(field_lib.Field(field_dup, file_context=context)) self.assertLen(folder.GetFindings(), 1) self.assertIsInstance(folder.GetFindings()[0], findings_lib.DuplicateSubfieldError)
def __init__(self, folderpath: str): """Creates a ConnectionFolder. Args: folderpath: required str with full path to folder containing connections. """ super(ConnectionFolder, self).__init__(folderpath, base_lib.ComponentType.CONNECTION) self.local_namespace = ConnectionNamespace(self._namespace_name) if base_lib.GLOBAL_NAMESPACE != self.local_namespace.namespace: self.local_namespace.AddFinding( findings_lib.InvalidConnectionNamespaceError( self.local_namespace.namespace, findings_lib.FileContext(folderpath)))
def __init__(self, begin_line_number=0, filepath='', typename='', description='', parents=None, local_field_tuples=None, is_abstract=False, inherited_fields_expanded=False, is_canonical=False, uid=None): super(EntityType, self).__init__() self.file_context = findings_lib.FileContext( begin_line_number=begin_line_number, filepath=filepath) self.typename = typename self.description = description self.local_field_names = {} local_field_names = [] if local_field_tuples: local_field_names = [ _BuildQualifiedField(opt_parts) for opt_parts in local_field_tuples ] for i, lfn in enumerate(local_field_names): self.local_field_names[lfn] = local_field_tuples[i] self.inherited_field_names = {} self.inherited_fields_expanded = inherited_fields_expanded if parents is None: parents = [] self.parent_names = None self.parent_name_tuples = None self.unqualified_parent_names = parents self._all_fields = None self._has_optional_fields = None self.is_abstract = is_abstract self.is_canonical = is_canonical self.uid = uid # TODO(berkoben) update this method to use tuples if possible self._ValidateType(local_field_names)
def testFindingsHasFindingTypes(self): self.assertFalse( self.findings_class.HasFindingTypes([findings_lib.ValidationError ])) self.findings_class.AddFinding( findings_lib.ValidationError('message', findings_lib.FileContext('filepath'))) self.assertTrue( self.findings_class.HasFindingTypes([findings_lib.ValidationError ])) self.assertFalse( self.findings_class.HasFindingTypes( [findings_lib.ValidationWarning])) self.assertTrue( self.findings_class.HasFindingTypes( [findings_lib.ValidationError, findings_lib.ValidationWarning]))
def testStateUniverseGetFindings(self): context = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) folder = state_lib.StateFolder(_GOOD_PATH) folder.AddFinding( findings_lib.InconsistentFileLocationError('', context)) namespace = folder.local_namespace namespace.AddFinding( findings_lib.DuplicateStateDefinitionError( state_lib.State('STATE'), 'namespace')) state = state_lib.State('STATE', 'description') state.AddFinding(findings_lib.MissingStateDescriptionWarning(state)) namespace.InsertState(state) state_universe = state_lib.StateUniverse([folder]) findings = state_universe.GetFindings() self.assertLen(findings, 3) self.assertTrue( state_universe.HasFindingTypes([ findings_lib.InconsistentFileLocationError, findings_lib.DuplicateStateDefinitionError, findings_lib.MissingStateDescriptionWarning ])) self.assertFalse(state_universe.IsValid())
def __init__(self, begin_line_number=0, filepath='', typename='', description='', parents=None, local_field_tuples=None, is_abstract=False, inherited_fields_expanded=False, is_canonical=False, uid=None, namespace=None): """Init. Args: begin_line_number: int. Starting line number for the entity type definition. filepath: string. google3 path to the file defining the type. typename: required string. description: required string. parents: list of parent typename strings. local_field_tuples: list of OptWrapper tuples is_abstract: boolean indicating if this is an abstract type. inherited_fields_expanded: boolean. Should be false at init. is_canonical: boolean indicating if this is a curated canonical type. uid: the database ID string of this type if uploaded namespace: a reference to the namespace object the entity belongs to """ super(EntityType, self).__init__() self.file_context = findings_lib.FileContext( begin_line_number=begin_line_number, filepath=filepath) self.typename = typename self.description = description self.namespace = namespace self.local_field_names = {} local_field_names = [] if local_field_tuples: local_field_names = [ BuildQualifiedField(opt_parts) for opt_parts in local_field_tuples ] for i, lfn in enumerate(local_field_names): self.local_field_names[lfn] = local_field_tuples[i] self.inherited_field_names = {} self.inherited_fields_expanded = inherited_fields_expanded if parents is None: parents = [] self.parent_names = None self.parent_name_tuples = None self.unqualified_parent_names = parents self._all_fields = None self._has_optional_fields = None self.is_abstract = is_abstract self.is_canonical = is_canonical self.uid = uid # TODO(berkoben) update this method to use tuples if possible self._ValidateType(local_field_names)
def CheckBackwardsCompatibility(new_universe, old_universe): """Checks that non-abstract types are not removed or changed in new configs. Method expects types in passed universe to have inherited_fields_expanded. Method has the side effect of setting is_changed field on everything in this universe that has changes except folders at the entity type level. Args: new_universe: EntityTypeUniverse object for the new config old_universe: EntityTypeUniverse object for the old config Returns: A list of findings generated by the compatibility check. Raises: RuntimeError: if fields are not expanded for any types """ # for every non-abstract type in the old universe, there should be a # corresponding type with the same fields in the new universe. old_ns_map = old_universe.type_namespaces_map new_ns_map = new_universe.type_namespaces_map.copy() findings = [] for ns_name in old_ns_map: old_ns = old_ns_map[ns_name] if ns_name not in new_ns_map: old_types = list(old_ns.valid_types_map.keys()) for old_type_name in old_types: if old_ns.valid_types_map[old_type_name].is_abstract: continue context = findings_lib.FileContext( old_universe.namespace_folder_map[ns_name].GetFolderpath()) finding = findings_lib.RemovedNamespaceWarning(context, ns_name, list(old_types)) new_universe.AddFinding(finding) findings.append(finding) break continue # Remove namespace from new ns map so when we're done we'll only have newly # created namespaces left in it. new_ns = new_ns_map.pop(ns_name) new_ns_types = new_ns.valid_types_map.copy() for type_name in old_ns.valid_types_map: old_type = old_ns.valid_types_map[type_name] if old_type.uid: new_type_uid_entry = new_universe.type_ids_map.get(old_type.uid) if new_type_uid_entry: if (new_type_uid_entry.namespace == ns_name and new_type_uid_entry.typename == type_name): new_type = new_ns_types.pop(type_name) else: new_type = new_universe.GetEntityType(new_type_uid_entry.namespace, new_type_uid_entry.typename) else: # type has been removed if not old_type.is_abstract: finding = findings_lib.RemovedTypeWarning(old_type) new_ns.AddFinding(finding) findings.append(finding) continue elif type_name not in new_ns_types: if not old_type.is_abstract: finding = findings_lib.RemovedTypeWarning(old_type) new_ns.AddFinding(finding) findings.append(finding) continue else: new_type = new_ns_types.pop(type_name) # Check to appease python type static analyzer if new_type is None: raise RuntimeError('new_type should never be None at this point.') old_fields = old_type.GetAllFields() new_fields = new_type.GetAllFields() if old_fields == new_fields: if (new_type.description != old_type.description or new_type.typename != old_type.typename or new_type.is_abstract != old_type.is_abstract or new_type.is_canonical != old_type.is_canonical): new_type.SetChanged() new_ns.SetChanged() continue new_type.SetChanged() new_ns.SetChanged() new_universe.namespace_folder_map[new_ns.namespace].SetChanged() if old_type.is_abstract: continue # Check added fields for field in old_fields: if field in new_fields: new_fields.pop(field) continue finding = findings_lib.RemovedFieldWarning(new_type, field) new_type.AddFinding(finding) findings.append(finding) for field in new_fields: if new_fields[field].optional: continue finding = findings_lib.AddedFieldWarning(new_type, field) new_type.AddFinding(finding) findings.append(finding) for new_type in new_ns_types.values(): new_type.SetChanged() # Mark anything new as changed for ns_name in new_ns_map: new_ns = new_ns_map[ns_name] new_ns.SetChanged() new_universe.namespace_folder_map[new_ns.namespace].SetChanged() for new_type in new_ns.valid_types_map.values(): new_type.SetChanged() return findings
def testCreateSubfield(self): ctx = findings_lib.FileContext('{0}/file.yaml'.format(_GOOD_PATH)) sf = subfield_lib.Subfield('good', subfield_lib.SubfieldCategory.DESCRIPTOR, 'hi', ctx) self.assertEmpty(sf.GetFindings())
def setUp(self): self.findings_class = findings_lib.Findings() self.file_context = findings_lib.FileContext(filepath='some_path')