def test_parse_bad_children(self): """ Tests the correct error is raised when a type definition has undefined child IDs referenced. """ # Test try: parser.parse([BAD_CHILD_TYPES_DESCRIPTOR]) self.fail('Bad children did not raise an exception') except parser.UndefinedReferencedIds, e: self.assertEqual(1, len(e.missing_referenced_ids)) self.assertTrue('not_there' in e.missing_referenced_ids)
def test_parse_duplicate_type(self): """ Tests two types with the same ID throw the correct error. """ # Test try: parser.parse([VALID_DESCRIPTOR_1, VALID_DESCRIPTOR_1]) self.fail('Exception not correctly thrown') except parser.DuplicateType, e: self.assertEqual(1, len(e.type_ids)) self.assertEqual('rpm', e.type_ids[0])
def test_parse_bad_children(self): """ Tests the correct error is raised when a type definition has undefined child IDs referenced. """ # Test try: parser.parse([BAD_CHILD_TYPES_DESCRIPTOR]) self.fail('Bad children did not raise an exception') except parser.UndefinedReferencedIds, e: self.assertEqual(1, len(e.missing_referenced_ids)) self.assertTrue('not_there' in e.missing_referenced_ids)
def test_parse_duplicate_type(self): """ Tests two types with the same ID throw the correct error. """ # Test try: parser.parse([VALID_DESCRIPTOR_1, VALID_DESCRIPTOR_1]) self.fail('Exception not correctly thrown') except parser.DuplicateType, e: self.assertEqual(1, len(e.type_ids)) self.assertEqual('rpm', e.type_ids[0])
def _check_content_definitions(descriptors): """ Check whether the given content definitions exist in the database. This method does not make any changes to the content definitions or any indexes. :param descriptors: A list of content descriptors :type descriptors: list of TypeDescriptor :return: A list of content types that would have been created or updated by _load_type_definitions :rtype: list of TypeDefinition """ definitions = parser.parse(descriptors) old_content_types = [] # Ensure all the content types exist and match the definitions for definition in definitions: content_type = database.type_definition(definition.id) if content_type is None: old_content_types.append(definition) continue dict_definition = definition.__dict__ for key, value in dict_definition.items(): if key not in content_type or content_type[key] != value: old_content_types.append(definition) break return old_content_types
def test_parse_single_descriptor_single_type(self): """ Tests the simple success case of loading multiple descriptors with valid data. """ # Test definitions = parser.parse([VALID_DESCRIPTOR_1]) # Verify self.assertTrue(definitions is not None) self.assertEqual(1, len(definitions)) type_def = definitions[0] self.assertEqual('rpm', type_def.id) self.assertEqual('RPM', type_def.display_name) self.assertEqual( ["name", "version", "release", "arch", "filename", "checksum"], type_def.unit_key) self.assertEqual(2, len(type_def.search_indexes)) self.assertEqual(["name", "epoch", "version", "release", "arch"], type_def.search_indexes[0]) self.assertEqual("filename", type_def.search_indexes[1])
def test_parse_single_descriptor_single_type(self): """ Tests the simple success case of loading multiple descriptors with valid data. """ # Test definitions = parser.parse([VALID_DESCRIPTOR_1]) # Verify self.assertTrue(definitions is not None) self.assertEqual(1, len(definitions)) type_def = definitions[0] self.assertEqual('rpm', type_def.id) self.assertEqual('RPM', type_def.display_name) self.assertEqual(["name", "version", "release", "arch", "filename", "checksum"], type_def.unit_key) self.assertEqual(2, len(type_def.search_indexes)) self.assertEqual(["name", "epoch", "version", "release", "arch"], type_def.search_indexes[0]) self.assertEqual("filename", type_def.search_indexes[1])
def init_types(self): database.clean() fp = open(self.TYPES_PATH) td = TypeDescriptor(os.path.basename(self.TYPES_PATH), fp.read()) fp.close() definitions = parser.parse([td]) database.update_database(definitions)
def _check_content_definitions(descriptors): """ Check whether the given content definitions exist in the database. This method does not make any changes to the content definitions or any indexes. :param descriptors: A list of content descriptors :type descriptors: list of TypeDescriptor :return: A list of content types that would have been created or updated by _load_type_definitions :rtype: list of TypeDefinition """ definitions = parser.parse(descriptors) old_content_types = [] # Ensure all the content types exist and match the definitions for definition in definitions: content_type = database.type_definition(definition.id) if content_type is None: old_content_types.append(definition) continue dict_definition = definition.__dict__ for key, value in dict_definition.items(): if key not in content_type or content_type[key] != value: old_content_types.append(definition) break return old_content_types
def load_content_types(types_dir=_TYPES_DIR, dry_run=False, drop_indices=False): """ Check or update database with content unit types information. :param types_dir: path to content unit type JSON files, currently used only for node.json :type types_dir: str :param dry_run: if True, no modifications to database will be made, defaults to False :type dry_run: bool :param drop_indices: if True, indices for the collections of modified unit types will be dropped, defaults to False :type drop_indices: bool :return: None if dry_run is set to False, list of content unit types to be created or updated, if dry_run is set to True :rtype: None or list of TypeDefinition """ if not os.access(types_dir, os.F_OK | os.R_OK): msg = _('Cannot load types: path does not exist or cannot be read: %(p)s') _logger.critical(msg % {'p': types_dir}) raise IOError(msg % {'p': types_dir}) # to handle node.json only descriptors = _load_type_descriptors(types_dir) pre_mongoengine_definitions = parser.parse(descriptors) # get information about content unit types from entry points mongoengine_definitions = _generate_plugin_definitions() if dry_run: return _check_content_definitions(pre_mongoengine_definitions + mongoengine_definitions) else: database.update_database(pre_mongoengine_definitions, drop_indices=drop_indices, create_indexes=True) database.update_database(mongoengine_definitions, drop_indices=drop_indices, create_indexes=False)
def test_parse_invalid_root(self): """ Tests that a parsable but ill-formed descriptor throws the correct error. A valid descriptor is passed to show that at least one failed descriptor causes the parse to fail. """ # Setup incorrect = model.TypeDescriptor('incorrect', '{"not-types" : "foo"}') # Test try: parser.parse([VALID_DESCRIPTOR_1, incorrect]) self.fail('Exception not correctly thrown') except parser.MissingRoot, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('incorrect', e.error_filenames()[0])
def test_parse_invalid_root(self): """ Tests that a parsable but ill-formed descriptor throws the correct error. A valid descriptor is passed to show that at least one failed descriptor causes the parse to fail. """ # Setup incorrect = model.TypeDescriptor('incorrect', '{"not-types" : "foo"}') # Test try: parser.parse([VALID_DESCRIPTOR_1, incorrect]) self.fail('Exception not correctly thrown') except parser.MissingRoot, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('incorrect', e.error_filenames()[0])
def test_parse_invalid_descriptor(self): """ Tests the proper exception is thrown when a descriptor cannot be parsed. A valid descriptor is passed to show that at least one failed descriptor causes the parse to fail. """ # Setup invalid = model.TypeDescriptor('invalid', 'foo') # Test try: parser.parse([VALID_DESCRIPTOR_1, invalid]) self.fail('Exception not correctly thrown') except parser.Unparsable, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('invalid', e.error_filenames()[0]) e.__str__() # included just for coverage
def test_parse_invalid_descriptor(self): """ Tests the proper exception is thrown when a descriptor cannot be parsed. A valid descriptor is passed to show that at least one failed descriptor causes the parse to fail. """ # Setup invalid = model.TypeDescriptor('invalid', 'foo') # Test try: parser.parse([VALID_DESCRIPTOR_1, invalid]) self.fail('Exception not correctly thrown') except parser.Unparsable, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('invalid', e.error_filenames()[0]) self.assertEqual(e.__str__(), 'Exception [Unparsable] for files [invalid]')
def test_parse_missing_attribute(self): """ Tests a type definition with a missing attribute cannot be parsed. """ # Setup no_id = model.TypeDescriptor( 'no_id', """{"types": [ {"display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name"} ]}""") # Test try: parser.parse([VALID_DESCRIPTOR_1, no_id]) self.fail('Exception not correctly thrown') except parser.MissingAttribute, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('no_id', e.error_filenames()[0])
def test_parse_extra_attribute(self): """ Tests a type definition with unexpected attributes cannot be parsed. """ # Setup extra = model.TypeDescriptor( 'extra', """{"types": [ {"id" : "rpm", "display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name", "unexpected_attribute" : "foo"} ]}""") # Test try: parser.parse([VALID_DESCRIPTOR_1, extra]) self.fail('Exception not correctly thrown') except parser.InvalidAttribute, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('extra', e.error_filenames()[0])
def test_parse_missing_attribute(self): """ Tests a type definition with a missing attribute cannot be parsed. """ # Setup no_id = model.TypeDescriptor( 'no_id', """{"types": [ {"display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name"} ]}""") # Test try: parser.parse([VALID_DESCRIPTOR_1, no_id]) self.fail('Exception not correctly thrown') except parser.MissingAttribute, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('no_id', e.error_filenames()[0])
def test_parse_invalid_type_id(self): """ Tests that a type definition with a malformed ID throws the correct error. """ # Setup bad_id = model.TypeDescriptor( 'bad_id', """{"types": [ {"id" : "bad-id", "display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name"} ]}""") # Test try: parser.parse([VALID_DESCRIPTOR_1, bad_id]) self.fail('Exception not correctly thrown') except parser.InvalidTypeId, e: self.assertEqual(1, len(e.type_ids)) self.assertEqual('bad-id', e.type_ids[0])
def test_parse_invalid_type_id(self): """ Tests that a type definition with a malformed ID throws the correct error. """ # Setup bad_id = model.TypeDescriptor('bad_id', """{"types": [ {"id" : "bad-id", "display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name"} ]}""" ) # Test try: parser.parse([VALID_DESCRIPTOR_1, bad_id]) self.fail('Exception not correctly thrown') except parser.InvalidTypeId, e: self.assertEqual(1, len(e.type_ids)) self.assertEqual('bad-id', e.type_ids[0])
def test_parse_extra_attribute(self): """ Tests a type definition with unexpected attributes cannot be parsed. """ # Setup extra = model.TypeDescriptor( 'extra', """{"types": [ {"id" : "rpm", "display_name" : "RPM", "description" : "RPM", "unit_key" : "name", "search_indexes" : "name", "unexpected_attribute" : "foo"} ]}""") # Test try: parser.parse([VALID_DESCRIPTOR_1, extra]) self.fail('Exception not correctly thrown') except parser.InvalidAttribute, e: self.assertEqual(1, len(e.error_filenames())) self.assertEqual('extra', e.error_filenames()[0])
def test_parse_multiple_types(self): """ Tests parsing a descriptor that contains multiple type definitions. """ # Test definitions = parser.parse([MULTI_TYPE_DESCRIPTOR]) # Verify self.assertTrue(definitions is not None) self.assertEqual(2, len(definitions)) self.assertEqual('rpm', definitions[0].id) self.assertEqual('deb', definitions[1].id)
def test_parse_multiple_descriptors(self): """ Tests parsing multiple descriptors. """ # Test definitions = parser.parse([VALID_DESCRIPTOR_1, VALID_DESCRIPTOR_2]) # Verify self.assertTrue(definitions is not None) self.assertEqual(2, len(definitions)) self.assertEqual('rpm', definitions[0].id) self.assertEqual('deb', definitions[1].id)
def test_parse_multiple_descriptors(self): """ Tests parsing multiple descriptors. """ # Test definitions = parser.parse([VALID_DESCRIPTOR_1, VALID_DESCRIPTOR_2]) # Verify self.assertTrue(definitions is not None) self.assertEqual(2, len(definitions)) self.assertEqual('rpm', definitions[0].id) self.assertEqual('deb', definitions[1].id)
def test_parse_multiple_types(self): """ Tests parsing a descriptor that contains multiple type definitions. """ # Test definitions = parser.parse([MULTI_TYPE_DESCRIPTOR]) # Verify self.assertTrue(definitions is not None) self.assertEqual(2, len(definitions)) self.assertEqual('rpm', definitions[0].id) self.assertEqual('deb', definitions[1].id)
def load_content_types(types_dir=_TYPES_DIR, dry_run=False, drop_indices=False): """ Check or update database with content unit types information. :param types_dir: path to content unit type JSON files, currently used only for node.json :type types_dir: str :param dry_run: if True, no modifications to database will be made, defaults to False :type dry_run: bool :param drop_indices: if True, indices for the collections of modified unit types will be dropped, defaults to False :type drop_indices: bool :return: None if dry_run is set to False, list of content unit types to be created or updated, if dry_run is set to True :rtype: None or list of TypeDefinition """ if not os.access(types_dir, os.F_OK | os.R_OK): msg = _( 'Cannot load types: path does not exist or cannot be read: %(p)s') _logger.critical(msg % {'p': types_dir}) raise IOError(msg % {'p': types_dir}) # to handle node.json only descriptors = _load_type_descriptors(types_dir) pre_mongoengine_definitions = parser.parse(descriptors) # get information about content unit types from entry points mongoengine_definitions = _generate_plugin_definitions() if dry_run: return _check_content_definitions(pre_mongoengine_definitions + mongoengine_definitions) else: database.update_database(pre_mongoengine_definitions, drop_indices=drop_indices, create_indexes=True) database.update_database(mongoengine_definitions, drop_indices=drop_indices, create_indexes=False)
def test_parse_with_children(self): """ Tests parsing a descriptor with valid children definitions. """ # Test definitions = parser.parse([CHILD_TYPES_DESCRIPTOR]) # Verify self.assertTrue(definitions is not None) self.assertEqual(3, len(definitions)) aaa_def = [d for d in definitions if d.id == 'aaa'][0] self.assertEqual(1, len(aaa_def.referenced_types)) self.assertTrue('ccc' in aaa_def.referenced_types) bbb_def = [d for d in definitions if d.id == 'bbb'][0] self.assertEqual(1, len(bbb_def.referenced_types)) self.assertTrue('ccc' in bbb_def.referenced_types) ccc_def = [d for d in definitions if d.id == 'ccc'][0] self.assertEqual(0, len(ccc_def.referenced_types))
def test_parse_with_children(self): """ Tests parsing a descriptor with valid children definitions. """ # Test definitions = parser.parse([CHILD_TYPES_DESCRIPTOR]) # Verify self.assertTrue(definitions is not None) self.assertEqual(3, len(definitions)) aaa_def = [d for d in definitions if d.id == 'aaa'][0] self.assertEqual(1, len(aaa_def.referenced_types)) self.assertTrue('ccc' in aaa_def.referenced_types) bbb_def = [d for d in definitions if d.id == 'bbb'][0] self.assertEqual(1, len(bbb_def.referenced_types)) self.assertTrue('ccc' in bbb_def.referenced_types) ccc_def = [d for d in definitions if d.id == 'ccc'][0] self.assertEqual(0, len(ccc_def.referenced_types))
def _load_type_definitions(descriptors): """ :type descriptors: list [TypeDescriptor, ...] """ definitions = parser.parse(descriptors) database.update_database(definitions)
def _load_type_definitions(descriptors): """ @type descriptors: list [L{TypeDescriptor}, ...] """ definitions = parser.parse(descriptors) database.update_database(definitions)
def _load_type_definitions(descriptors, drop_indices=False): """ :type descriptors: list [TypeDescriptor, ...] """ definitions = parser.parse(descriptors) database.update_database(definitions, drop_indices=drop_indices)
def _load_type_definitions(descriptors, drop_indices=False): """ :type descriptors: list [TypeDescriptor, ...] """ definitions = parser.parse(descriptors) database.update_database(definitions, drop_indices=drop_indices)