def test_dynamic_schema_refreshed_on_modify_model_source(self): portal_type = 'testtype' fti = self.mocker.proxy(DexterityFTI(portal_type)) class INew(Interface): title = zope.schema.TextLine(title='title') model_dummy = Model({'': INew}) self.expect(fti.lookupModel()).result(model_dummy) self.create_dummy() site_dummy = self.create_dummy(getPhysicalPath=lambda: ('', 'siteid')) self.mock_utility(site_dummy, Interface) class IBlank(Interface): pass self.replay() # Set source interface schemaName = utils.portalTypeToSchemaName(fti.getId()) setattr(plone.dexterity.schema.generated, schemaName, IBlank) # Sync this with schema ftiModified( fti, ObjectModifiedEvent( fti, DexterityFTIModificationDescription('model_source', ''))) self.assertTrue('title' in IBlank) self.assertTrue(IBlank['title'].title == 'title')
def test_dynamic_schema_refreshed_on_modify_model_source(self): portal_type = u"testtype" fti = DexterityFTI(portal_type) class INew(Interface): title = zope.schema.TextLine(title=u"title") model_dummy = Model({u"": INew}) fti.lookupModel = Mock(return_value=model_dummy) self.create_dummy() site_dummy = self.create_dummy(getPhysicalPath=lambda: ('', 'siteid')) self.mock_utility(site_dummy, ISiteRoot) class IBlank(Interface): pass # Set source interface schemaName = utils.portalTypeToSchemaName(fti.getId()) setattr(plone.dexterity.schema.generated, schemaName, IBlank) # Sync this with schema ftiModified( fti, ObjectModifiedEvent( fti, DexterityFTIModificationDescription('model_source', ''))) self.assertTrue('title' in IBlank) self.assertTrue(IBlank['title'].title == u"title")
def test_concrete_schema_not_refreshed_on_modify_schema(self): portal_type = 'testtype' fti = self.mocker.proxy(DexterityFTI(portal_type)) class IBlank(Interface): pass class INew(Interface): title = zope.schema.TextLine(title='title') model_dummy = Model({'': INew}) self.expect(fti.lookupModel()).result(model_dummy).count(0, None) self.create_dummy() site_dummy = self.create_dummy(getPhysicalPath=lambda: ('', 'siteid')) self.mock_utility(site_dummy, Interface) self.replay() # Set schema to something so that hasDynamicSchema is false fti.schema = IBlank.__identifier__ assert not fti.hasDynamicSchema # Set source for dynamic FTI - should not be used schemaName = utils.portalTypeToSchemaName(fti.getId()) setattr(plone.dexterity.schema.generated, schemaName, IBlank) # Sync should not happen now ftiModified( fti, ObjectModifiedEvent( fti, DexterityFTIModificationDescription('schema', ''))) self.assertFalse('title' in IBlank)
def test_named_schema(self): # Mock schema model class IDummy(Interface): dummy = zope.schema.TextLine(title=u"Dummy") class INamedDummy(Interface): named = zope.schema.TextLine(title=u"Named") mock_model = Model({u"": IDummy, u"named": INamedDummy}) # Mock FTI fti_mock = self.mocker.mock(DexterityFTI) fti_mock.lookupModel() self.mocker.result(mock_model) self.mock_utility(fti_mock, IDexterityFTI, u'testtype') self.mocker.replay() factory = schema.SchemaModuleFactory() schemaName = schema.portalTypeToSchemaName('testtype', schema=u"named", prefix='site') klass = factory(schemaName, schema.generated) self.assertTrue(isinstance(klass, InterfaceClass)) # only default schema gets this: self.assertFalse(klass.isOrExtends(IDexteritySchema)) self.assertFalse(IContentType.providedBy(klass)) self.assertEqual(schemaName, klass.__name__) self.assertEqual('plone.dexterity.schema.generated', klass.__module__) self.assertEqual(('named', ), tuple(zope.schema.getFieldNames(klass)))
def test_concrete_default_schema(self): # Mock schema model class IDummy(Interface): dummy = zope.schema.TextLine(title=u"Dummy") mock_model = Model({u"": IDummy}) # Mock FTI fti_mock = self.mocker.mock(DexterityFTI) fti_mock.lookupModel() self.mocker.result(mock_model) self.mock_utility(fti_mock, IDexterityFTI, u'testtype') self.mocker.replay() factory = schema.SchemaModuleFactory() schemaName = utils.portalTypeToSchemaName('testtype', prefix='site') klass = factory(schemaName, schema.generated) self.failUnless(isinstance(klass, InterfaceClass)) self.failUnless(klass.isOrExtends(IDexteritySchema)) self.failUnless(IContentType.providedBy(klass)) self.assertEquals(schemaName, klass.__name__) self.assertEquals('plone.dexterity.schema.generated', klass.__module__) self.assertEquals(('dummy', ), tuple(zope.schema.getFieldNames(klass)))
def test_lookupModel_from_string(self): fti = DexterityFTI(u"testtype") fti.schema = None fti.model_source = "<model />" fti.model_file = None model_dummy = Model() from plone.supermodel import loadString self.patch_global(loadString, return_value=model_dummy) model = fti.lookupModel() self.assertIs(model_dummy, model)
def serialize_ttw_schema(schema=None): if not schema: schema = get_ttw_edited_schema() bfields = [a for a in IUserDataSchema] attrs = {} for name in schema: f = schema[name] if is_serialisable_field(f) and name not in bfields: attrs[name] = f smember = SchemaClass(SCHEMATA_KEY, attrs=attrs) finalizeSchemas(smember) model = Model({SCHEMATA_KEY: smember}) sschema = serialize(model) return sschema
def test_transient_schema_made_concrete(self): factory = schema.SchemaModuleFactory() schemaName = schema.portalTypeToSchemaName('testtype', prefix='site') # No IDexterityFTI registered klass = factory(schemaName, schema.generated) self.assertTrue(isinstance(klass, InterfaceClass)) self.assertTrue(klass.isOrExtends(IDexteritySchema)) self.assertTrue(IContentType.providedBy(klass)) self.assertEqual(schemaName, klass.__name__) self.assertEqual('plone.dexterity.schema.generated', klass.__module__) self.assertEqual((), tuple(zope.schema.getFields(klass))) # Calling it again gives the same result klass = factory(schemaName, schema.generated) self.assertTrue(isinstance(klass, InterfaceClass)) self.assertTrue(klass.isOrExtends(IDexteritySchema)) self.assertTrue(IContentType.providedBy(klass)) self.assertEqual(schemaName, klass.__name__) self.assertEqual('plone.dexterity.schema.generated', klass.__module__) self.assertEqual((), tuple(zope.schema.getFields(klass))) # Now register a mock FTI and try again class IDummy(Interface): dummy = zope.schema.TextLine(title=u"Dummy") mock_model = Model({u"": IDummy}) fti_mock = self.mocker.mock(DexterityFTI) fti_mock.lookupModel() self.mocker.result(mock_model) self.mock_utility(fti_mock, IDexterityFTI, u'testtype') self.mocker.replay() klass = factory(schemaName, schema.generated) self.assertTrue(isinstance(klass, InterfaceClass)) self.assertTrue(klass.isOrExtends(IDexteritySchema)) self.assertTrue(IContentType.providedBy(klass)) self.assertEqual(schemaName, klass.__name__) self.assertEqual('plone.dexterity.schema.generated', klass.__module__) # Now we get the fields from the FTI's model self.assertEqual(('dummy', ), tuple(zope.schema.getFieldNames(klass)))
def lookupModel(self): if self.model_source: return loadString(self.model_source, policy=self.schema_policy) elif self.model_file: model_file = self._absModelFile() return loadFile(model_file, reload=True, policy=self.schema_policy) elif self.schema: schema = self.lookupSchema() return Model({u"": schema}) raise ValueError( "Neither model source, nor model file, nor schema is specified in FTI %s" % self.getId())
def test_lookupModel_from_string(self): fti = DexterityFTI('testtype') fti.schema = None fti.model_source = '<model />' fti.model_file = None model_dummy = Model() loadString_mock = self.mocker.replace('plone.supermodel.loadString') self.expect(loadString_mock(fti.model_source, policy='dexterity')).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model)
def test_lookupModel_from_string_with_schema(self): fti = DexterityFTI(u"testtype") fti.schema = u"plone.dexterity.tests.schemata.ITestSchema" # effectively ignored fti.model_source = "<model />" fti.model_file = None model_dummy = Model() loadString_mock = self.mocker.replace("plone.supermodel.loadString") self.expect(loadString_mock(fti.model_source, policy=u"dexterity")).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model) self.assertIs(ITestSchema, fti.lookupSchema())
def test_lookupModel_from_string_with_schema(self): fti = DexterityFTI('testtype') # effectively ignored: fti.schema = 'plone.dexterity.tests.schemata.ITestSchema' fti.model_source = '<model />' fti.model_file = None model_dummy = Model() loadString_mock = self.mocker.replace('plone.supermodel.loadString') self.expect(loadString_mock(fti.model_source, policy='dexterity')).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model) self.assertIs(ITestSchema, fti.lookupSchema())
def test_lookupModel_from_string_with_schema(self): fti = DexterityFTI(u"testtype") # effectively ignored: fti.schema = u"plone.dexterity.tests.schemata.ITestSchema" fti.model_source = "<model />" fti.model_file = None model_dummy = Model() from plone.supermodel import loadString loadString_mock = self.patch_global(loadString, return_value=model_dummy) model = fti.lookupModel() self.assertIs(model_dummy, model) self.assertIs(ITestSchema, fti.lookupSchema()) loadString_mock.assert_called_once_with(fti.model_source, policy=u'dexterity')
def test_lookupModel_from_file_with_win32_absolute_path(self): fti = DexterityFTI(u"testtype") fti.schema = None fti.model_source = None fti.model_file = r"C:\models\testmodel.xml" model_dummy = Model() from os.path import isabs, isfile self.patch_global(isabs, return_value=True) self.patch_global(isfile, return_value=True) from plone.supermodel import loadFile loadFile_mock = self.patch_global(loadFile, return_value=model_dummy) model = fti.lookupModel() self.assertIs(model_dummy, model) loadFile_mock.assert_called_once_with( fti.model_file, reload=True, policy=u"dexterity")
def test_lookupModel_from_file_with_absolute_path(self): import plone.dexterity.tests abs_file = os.path.join( os.path.split(plone.dexterity.tests.__file__)[0], 'test.xml') fti = DexterityFTI('testtype') fti.schema = None fti.model_source = None fti.model_file = abs_file model_dummy = Model() loadFile_mock = self.mocker.replace('plone.supermodel.loadFile') self.expect(loadFile_mock(abs_file, reload=True, policy='dexterity')).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model)
def test_lookupModel_from_file_with_package(self): fti = DexterityFTI(u"testtype") fti.schema = None fti.model_source = None fti.model_file = u"plone.dexterity.tests:test.xml" model_dummy = Model() import plone.dexterity.tests abs_file = os.path.join( os.path.split(plone.dexterity.tests.__file__)[0], "test.xml") loadFile_mock = self.mocker.replace("plone.supermodel.loadFile") self.expect(loadFile_mock(abs_file, reload=True, policy=u"dexterity")).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model)
def test_lookupModel_from_file_with_absolute_path(self): import plone.dexterity.tests abs_file = os.path.join( os.path.split(plone.dexterity.tests.__file__)[0], "test.xml") fti = DexterityFTI(u"testtype") fti.schema = None fti.model_source = None fti.model_file = abs_file model_dummy = Model() from plone.supermodel import loadFile loadFile_mock = self.patch_global(loadFile, return_value=model_dummy) model = fti.lookupModel() self.assertIs(model_dummy, model) loadFile_mock.assert_called_once_with(abs_file, reload=True, policy=u"dexterity")
def test_dynamic_schema_refreshed_on_modify_model_source(self): portal_type = u"testtype" fti = DexterityFTI(portal_type) class INew(Interface): title = zope.schema.TextLine(title=u"title") model_dummy = Model({u"": INew}) fti.lookupModel = Mock(return_value=model_dummy) self.create_dummy() site_dummy = self.create_dummy( getPhysicalPath=lambda: ('', 'siteid') ) self.mock_utility(site_dummy, ISiteRoot) # b/c of zope.interface does not support hashing of the same class multiple times # we need to postfix with a unique number # see https://github.com/zopefoundation/zope.interface/issues/216#issuecomment-701332380 class IBlank2(Interface): pass # Set source interface schemaName = portalTypeToSchemaName(fti.getId()) setattr(plone.dexterity.schema.generated, schemaName, IBlank2) # Sync this with schema ftiModified( fti, ObjectModifiedEvent( fti, DexterityFTIModificationDescription('model_source', '') ) ) self.assertTrue('title' in IBlank2) self.assertTrue(IBlank2['title'].title == u"title")
def test_concrete_schema_not_refreshed_on_modify_schema(self): portal_type = u"testtype" fti = DexterityFTI(portal_type) class IBlank4(Interface): pass class INew(Interface): title = zope.schema.TextLine(title=u"title") model_dummy = Model({u"": INew}) fti.lookupModel = Mock(return_value=model_dummy) site_dummy = self.create_dummy( getPhysicalPath=lambda: ('', 'siteid') ) self.mock_utility(site_dummy, ISiteRoot) # Set schema to something so that hasDynamicSchema is false fti.schema = IBlank4.__identifier__ assert not fti.hasDynamicSchema # Set source for dynamic FTI - should not be used schemaName = portalTypeToSchemaName(fti.getId()) setattr(plone.dexterity.schema.generated, schemaName, IBlank4) # Sync should not happen now ftiModified( fti, ObjectModifiedEvent( fti, DexterityFTIModificationDescription('schema', '') ) ) self.assertFalse('title' in IBlank4)
def test_lookupModel_from_file_with_win32_absolute_path(self): fti = DexterityFTI('testtype') fti.schema = None fti.model_source = None fti.model_file = r'C:\models\testmodel.xml' model_dummy = Model() isabs_mock = self.mocker.replace('os.path.isabs') self.expect(isabs_mock(fti.model_file)).result(True) isfile_mock = self.mocker.replace('os.path.isfile') self.expect(isfile_mock(fti.model_file)).result(True) loadFile_mock = self.mocker.replace('plone.supermodel.loadFile') self.expect( loadFile_mock(fti.model_file, reload=True, policy='dexterity')).result(model_dummy) self.replay() model = fti.lookupModel() self.assertIs(model_dummy, model)
def _parse(source, policy): tree = etree.parse(source) root = tree.getroot() parseinfo.i18n_domain = root.attrib.get( ns('domain', prefix=I18N_NAMESPACE) ) model = Model() handlers = {} schema_metadata_handlers = tuple(getUtilitiesFor(ISchemaMetadataHandler)) field_metadata_handlers = tuple(getUtilitiesFor(IFieldMetadataHandler)) policy_util = getUtility(ISchemaPolicy, name=policy) def readField(fieldElement, schemaAttributes, fieldElements, baseFields): # Parse field attributes fieldName = fieldElement.get('name') fieldType = fieldElement.get('type') if fieldName is None or fieldType is None: raise ValueError( 'The attributes \'name\' and \'type\' are required for each ' '<field /> element' ) handler = handlers.get(fieldType, None) if handler is None: handler = handlers[fieldType] = queryUtility( IFieldExportImportHandler, name=fieldType ) if handler is None: raise ValueError( 'Field type {0} specified for field {1} is not ' 'supported'.format(fieldType, fieldName) ) field = handler.read(fieldElement) # Preserve order from base interfaces if this field is an override # of a field with the same name in a base interface base_field = baseFields.get(fieldName, None) if base_field is not None: field.order = base_field.order # Save for the schema schemaAttributes[fieldName] = field fieldElements[fieldName] = fieldElement return fieldName for schema_element in root.findall(ns('schema')): parseinfo.stack.append(schema_element) schemaAttributes = {} schemaName = schema_element.get('name') if schemaName is None: schemaName = u"" bases = () baseFields = {} based_on = schema_element.get('based-on') if based_on is not None: bases = tuple([resolve(dotted) for dotted in based_on.split()]) for base_schema in bases: baseFields.update(getFields(base_schema)) fieldElements = {} # Read global fields for fieldElement in schema_element.findall(ns('field')): parseinfo.stack.append(fieldElement) readField( fieldElement, schemaAttributes, fieldElements, baseFields ) parseinfo.stack.pop() # Read invariants, fieldsets and their fields invariants = [] fieldsets = [] fieldsets_by_name = {} for subelement in schema_element: parseinfo.stack.append(subelement) if subelement.tag == ns('field'): readField( subelement, schemaAttributes, fieldElements, baseFields ) elif subelement.tag == ns('fieldset'): fieldset_name = subelement.get('name') if fieldset_name is None: raise ValueError( u'Fieldset in schema {0} has no name'.format( schemaName ) ) fieldset = fieldsets_by_name.get(fieldset_name, None) if fieldset is None: fieldset_label = subelement.get('label') fieldset_description = subelement.get('description') fieldset_order = subelement.get('order') if fieldset_order is None: fieldset_order = DEFAULT_ORDER elif isinstance(fieldset_order, basestring): fieldset_order = int(fieldset_order) fieldset = fieldsets_by_name[fieldset_name] = Fieldset( fieldset_name, label=fieldset_label, description=fieldset_description, order=fieldset_order, ) fieldsets_by_name[fieldset_name] = fieldset fieldsets.append(fieldset) for fieldElement in subelement.findall(ns('field')): parseinfo.stack.append(fieldElement) parsed_fieldName = readField( fieldElement, schemaAttributes, fieldElements, baseFields ) if parsed_fieldName: fieldset.fields.append(parsed_fieldName) parseinfo.stack.pop() elif subelement.tag == ns('invariant'): dotted = subelement.text invariant = resolve(dotted) if not IInvariant.providedBy(invariant): raise ImportError( u'Invariant functions must provide ' u'plone.supermodel.interfaces.IInvariant' ) invariants.append(invariant) parseinfo.stack.pop() schema = SchemaClass( name=policy_util.name(schemaName, tree), bases=bases + policy_util.bases(schemaName, tree) + (Schema,), __module__=policy_util.module(schemaName, tree), attrs=schemaAttributes ) # add invariants to schema as tagged values if invariants: schema_invariants = schema.queryTaggedValue('invariants', []) schema.setTaggedValue('invariants', schema_invariants + invariants) # Save fieldsets schema.setTaggedValue(FIELDSETS_KEY, fieldsets) # Let metadata handlers write metadata for handler_name, metadata_handler in field_metadata_handlers: for fieldName in schema: if fieldName in fieldElements: metadata_handler.read( fieldElements[fieldName], schema, schema[fieldName] ) for handler_name, metadata_handler in schema_metadata_handlers: metadata_handler.read(schema_element, schema) model.schemata[schemaName] = schema parseinfo.stack.pop() parseinfo.i18n_domain = None return model
def _parse(source, policy): tree = etree.parse(source) root = tree.getroot() parseinfo.i18n_domain = root.attrib.get(ns('domain', prefix=I18N_NAMESPACE)) model = Model() handlers = {} schema_metadata_handlers = tuple(getUtilitiesFor(ISchemaMetadataHandler)) field_metadata_handlers = tuple(getUtilitiesFor(IFieldMetadataHandler)) policy_util = getUtility(ISchemaPolicy, name=policy) def readField(fieldElement, schemaAttributes, fieldElements, baseFields): # Parse field attributes fieldName = fieldElement.get('name') fieldType = fieldElement.get('type') if fieldName is None or fieldType is None: raise ValueError( 'The attributes \'name\' and \'type\' are required for each ' '<field /> element') handler = handlers.get(fieldType, None) if handler is None: handler = handlers[fieldType] = queryUtility( IFieldExportImportHandler, name=fieldType) if handler is None: raise ValueError( 'Field type {0} specified for field {1} is not ' 'supported'.format(fieldType, fieldName)) field = handler.read(fieldElement) # Preserve order from base interfaces if this field is an override # of a field with the same name in a base interface base_field = baseFields.get(fieldName, None) if base_field is not None: field.order = base_field.order # Save for the schema schemaAttributes[fieldName] = field fieldElements[fieldName] = fieldElement return fieldName for schema_element in root.findall(ns('schema')): parseinfo.stack.append(schema_element) schemaAttributes = {} schemaName = schema_element.get('name') if schemaName is None: schemaName = u"" bases = () baseFields = {} based_on = schema_element.get('based-on') if based_on is not None: bases = tuple([resolve(dotted) for dotted in based_on.split()]) for base_schema in bases: baseFields.update(getFields(base_schema)) fieldElements = {} # Read global fields for fieldElement in schema_element.findall(ns('field')): parseinfo.stack.append(fieldElement) readField(fieldElement, schemaAttributes, fieldElements, baseFields) parseinfo.stack.pop() # Read invariants, fieldsets and their fields invariants = [] fieldsets = [] fieldsets_by_name = {} for subelement in schema_element: parseinfo.stack.append(subelement) if subelement.tag == ns('field'): readField(subelement, schemaAttributes, fieldElements, baseFields) elif subelement.tag == ns('fieldset'): fieldset_name = subelement.get('name') if fieldset_name is None: raise ValueError( u'Fieldset in schema {0} has no name'.format( schemaName)) fieldset = fieldsets_by_name.get(fieldset_name, None) if fieldset is None: fieldset_label = subelement.get('label') fieldset_description = subelement.get('description') fieldset_order = subelement.get('order') if fieldset_order is None: fieldset_order = DEFAULT_ORDER elif isinstance(fieldset_order, six.string_types): fieldset_order = int(fieldset_order) fieldset = fieldsets_by_name[fieldset_name] = Fieldset( fieldset_name, label=fieldset_label, description=fieldset_description, order=fieldset_order, ) fieldsets_by_name[fieldset_name] = fieldset fieldsets.append(fieldset) for fieldElement in subelement.findall(ns('field')): parseinfo.stack.append(fieldElement) parsed_fieldName = readField(fieldElement, schemaAttributes, fieldElements, baseFields) if parsed_fieldName: fieldset.fields.append(parsed_fieldName) parseinfo.stack.pop() elif subelement.tag == ns('invariant'): dotted = subelement.text invariant = resolve(dotted) if not IInvariant.providedBy(invariant): raise ImportError( u'Invariant functions must provide ' u'plone.supermodel.interfaces.IInvariant') invariants.append(invariant) parseinfo.stack.pop() schema = SchemaClass(name=policy_util.name(schemaName, tree), bases=bases + policy_util.bases(schemaName, tree) + (Schema, ), __module__=policy_util.module(schemaName, tree), attrs=schemaAttributes) # add invariants to schema as tagged values if invariants: schema_invariants = schema.queryTaggedValue('invariants', []) schema.setTaggedValue('invariants', schema_invariants + invariants) # Save fieldsets schema.setTaggedValue(FIELDSETS_KEY, fieldsets) # Let metadata handlers write metadata for handler_name, metadata_handler in field_metadata_handlers: for fieldName in schema: if fieldName in fieldElements: metadata_handler.read(fieldElements[fieldName], schema, schema[fieldName]) for handler_name, metadata_handler in schema_metadata_handlers: metadata_handler.read(schema_element, schema) model.schemata[schemaName] = schema parseinfo.stack.pop() parseinfo.i18n_domain = None return model
def _parse(source, policy): tree = etree.parse(source) root = tree.getroot() parseinfo.i18n_domain = root.attrib.get(ns('domain', prefix=I18N_NAMESPACE)) model = Model() handlers = {} schema_metadata_handlers = tuple(getUtilitiesFor(ISchemaMetadataHandler)) field_metadata_handlers = tuple(getUtilitiesFor(IFieldMetadataHandler)) policy_util = getUtility(ISchemaPolicy, name=policy) def readField(fieldElement, schemaAttributes, fieldElements, baseFields): # Parse field attributes fieldName = fieldElement.get('name') fieldType = fieldElement.get('type') if fieldName is None or fieldType is None: raise ValueError("The attributes 'name' and 'type' are required for each <field /> element") handler = handlers.get(fieldType, None) if handler is None: handler = handlers[fieldType] = queryUtility(IFieldExportImportHandler, name=fieldType) if handler is None: raise ValueError("Field type %s specified for field %s is not supported" % (fieldType, fieldName, )) field = handler.read(fieldElement) # Preserve order from base interfaces if this field is an override # of a field with the same name in a base interface base_field = baseFields.get(fieldName, None) if base_field is not None: field.order = base_field.order # Save for the schema schemaAttributes[fieldName] = field fieldElements[fieldName] = fieldElement return fieldName for schema_element in root.findall(ns('schema')): parseinfo.stack.append(schema_element) schemaAttributes = {} schemaName = schema_element.get('name') if schemaName is None: schemaName = u"" bases = () baseFields = {} based_on = schema_element.get('based-on') if based_on is not None: bases = tuple([resolve(dotted) for dotted in based_on.split()]) for base_schema in bases: baseFields.update(getFields(base_schema)) fieldElements = {} # Read global fields for fieldElement in schema_element.findall(ns('field')): parseinfo.stack.append(fieldElement) readField(fieldElement, schemaAttributes, fieldElements, baseFields) parseinfo.stack.pop() # Read fieldsets and their fields fieldsets = [] fieldsets_by_name = {} for subelement in schema_element: parseinfo.stack.append(subelement) if subelement.tag == ns('field'): readField(subelement, schemaAttributes, fieldElements, baseFields) elif subelement.tag == ns('fieldset'): fieldset_name = subelement.get('name') if fieldset_name is None: raise ValueError(u"Fieldset in schema %s has no name" % (schemaName)) fieldset = fieldsets_by_name.get(fieldset_name, None) if fieldset is None: fieldset_label = subelement.get('label') fieldset_description = subelement.get('description') fieldset = fieldsets_by_name[fieldset_name] = Fieldset(fieldset_name, label=fieldset_label, description=fieldset_description) fieldsets_by_name[fieldset_name] = fieldset fieldsets.append(fieldset) for fieldElement in subelement.findall(ns('field')): parseinfo.stack.append(fieldElement) parsed_fieldName = readField(fieldElement, schemaAttributes, fieldElements, baseFields) if parsed_fieldName: fieldset.fields.append(parsed_fieldName) parseinfo.stack.pop() parseinfo.stack.pop() schema = SchemaClass(name=policy_util.name(schemaName, tree), bases=bases + policy_util.bases(schemaName, tree) + (Schema,), __module__=policy_util.module(schemaName, tree), attrs=schemaAttributes) schema.setTaggedValue(FIELDSETS_KEY, fieldsets) # Save fieldsets # Let metadata handlers write metadata for handler_name, metadata_handler in field_metadata_handlers: for fieldName in schema: if fieldName in fieldElements: metadata_handler.read(fieldElements[fieldName], schema, schema[fieldName]) for handler_name, metadata_handler in schema_metadata_handlers: metadata_handler.read(schema_element, schema) model.schemata[schemaName] = schema parseinfo.stack.pop() parseinfo.i18n_domain = None return model
def parse(source, policy=u""): tree = ElementTree.parse(source) root = tree.getroot() model = Model() handlers = {} schema_metadata_handlers = tuple(getUtilitiesFor(ISchemaMetadataHandler)) field_metadata_handlers = tuple(getUtilitiesFor(IFieldMetadataHandler)) policy_util = getUtility(ISchemaPolicy, name=policy) def readField(fieldElement, schemaAttributes, fieldElements, baseFields): # Parse field attributes fieldName = fieldElement.get('name') fieldType = fieldElement.get('type') if fieldName is None or fieldType is None: raise ValueError("The attributes 'name' and 'type' are required for each <field /> element") handler = handlers.get(fieldType, None) if handler is None: handler = handlers[fieldType] = queryUtility(IFieldExportImportHandler, name=fieldType) if handler is None: raise ValueError("Field type %s specified for field %s is not supported" % (fieldType, fieldName, )) field = handler.read(fieldElement) # Preserve order from base interfaces if this field is an override # of a field with the same name in a base interface base_field = baseFields.get(fieldName, None) if base_field is not None: field.order = base_field.order # Save for the schema schemaAttributes[fieldName] = field fieldElements[fieldName] = fieldElement return fieldName for schema_element in root.findall(ns('schema')): schemaAttributes = {} schema_metadata = {} schemaName = schema_element.get('name') if schemaName is None: schemaName = u"" bases = () baseFields = {} based_on = schema_element.get('based-on') if based_on is not None: bases = tuple([resolve(dotted) for dotted in based_on.split()]) for base_schema in bases: baseFields.update(getFields(base_schema)) fieldElements = {} # Read global fields for fieldElement in schema_element.findall(ns('field')): readField(fieldElement, schemaAttributes, fieldElements, baseFields) # Read fieldsets and their fields fieldsets = [] fieldsets_by_name = {} for subelement in schema_element: if subelement.tag == ns('field'): readField(subelement, schemaAttributes, fieldElements, baseFields) elif subelement.tag == ns('fieldset'): fieldset_name = subelement.get('name') if fieldset_name is None: raise ValueError(u"Fieldset in schema %s has no name" % (schemaName)) fieldset = fieldsets_by_name.get(fieldset_name, None) if fieldset is None: fieldset_label = subelement.get('label') fieldset_description = subelement.get('description') fieldset = fieldsets_by_name[fieldset_name] = Fieldset(fieldset_name, label=fieldset_label, description=fieldset_description) fieldsets_by_name[fieldset_name] = fieldset fieldsets.append(fieldset) for fieldElement in subelement.findall(ns('field')): parsed_fieldName = readField(fieldElement, schemaAttributes, fieldElements, baseFields) if parsed_fieldName: fieldset.fields.append(parsed_fieldName) schema = InterfaceClass(name=policy_util.name(schemaName, tree), bases=bases + policy_util.bases(schemaName, tree), __module__=policy_util.module(schemaName, tree), attrs=schemaAttributes) schema.setTaggedValue(FIELDSETS_KEY, fieldsets) # Save fieldsets # Let metadata handlers write metadata for handler_name, metadata_handler in field_metadata_handlers: for fieldName in schema: if fieldName in fieldElements: metadata_handler.read(fieldElements[fieldName], schema, schema[fieldName]) for handler_name, metadata_handler in schema_metadata_handlers: metadata_handler.read(schema_element, schema) model.schemata[schemaName] = schema return model