def test_handler(self): """ """ with open(os.path.join(FHIR_FIXTURE_PATH, 'Organization.json'), 'r') as f: fhir_str = f.read() fhir_field = getFields(ITestToken)['resource'] # Test: available as adapter resource_handler = queryMultiAdapter((fhir_field, ), IToUnicode) self.assertIsNotNone(resource_handler) self.assertIsInstance(resource_handler, handler.JSONToUnicode) self.assertIsInstance(resource_handler.context, JSON) fhir_value = fhir_field.fromUnicode(fhir_str) self.assertIsInstance(resource_handler.toUnicode(fhir_value), unicode) # Test: available as Uitility fhir_hanlder_util = queryUtility(IFieldExportImportHandler, name='plone.app.jsonfield.field.JSON') self.assertIsNotNone(fhir_hanlder_util) self.assertEqual(fhir_hanlder_util, handler.JSONHandler) class ITestPatient(model.Schema): model.load(os.path.join(BASE_TEST_PATH, 'schema', 'patient.xml')) fhir_field2 = getFields(ITestToken)['resource'] self.assertEqual(fhir_field2.__class__, fhir_field.__class__) xml_schema = serializeSchema(ITestToken) self.assertIn( '<field name="resource" type="plone.app.jsonfield.field.JSON">', xml_schema)
def _validate(self, value): # XXX HACK: Can't call the super, since it'll check to # XXX see if we provide DictRow. # We're only a dict, so we can't. # super(DictRow, self)._validate(value) # Validate the dict against the schema # Pass 1 - ensure fields are present if value is NO_VALUE: return # Treat readonly fields for field_name in getFields(self.schema).keys(): field = self.schema[field_name] if field.readonly: value[field_name] = field.default errors = [] for field_name in getFields(self.schema).keys(): if field_name not in value: errors.append(AttributeNotFoundError(field_name, self.schema)) if errors: raise WrongContainedType(errors, self.__name__) # Pass 2 - Ensure fields are valid for field_name, field_type in getFields(self.schema).items(): if IChoice.providedBy(field_type): # Choice must be bound before validation otherwise # IContextSourceBinder is not iterable in validation bound = field_type.bind(value) bound.validate(value[field_name]) else: field_type.validate(value[field_name])
def get_fields(fti): schema = fti.lookupSchema() fields = getFields(schema) for behavior_id in fti.behaviors: schema = getUtility(IBehavior, behavior_id).interface if not IFormFieldProvider.providedBy(schema): continue fields.update(getFields(schema)) return fields
def get_obj_schema(obj): for iface in providedBy(obj).flattened(): for name, field in getFields(iface).items(): yield name, field assignable = IBehaviorAssignable(obj, None) if assignable: for behavior in assignable.enumerateBehaviors(): for name, field in getFields(behavior.interface).items(): yield name, field
def test_color_select_widget_render(self): """ Test if the color select widget render a correct input type """ header_color_field = getFields(IInstitution)["header_color"] color_select_render = ColorSelectFieldWidget( header_color_field, self.portal.REQUEST).render() self.assertIn("""<input type="color" id="header_color""", color_select_render)
def __init__(self, oid=''): self.oid = oid # update defaults sch = self.__schema__ for field in getFields(sch): setattr(self, field, copy.copy(sch[field].default))
def PreferenceGroupChecker(instance): """A function that generates a custom security checker. The attributes available in a preference group are dynamically generated based on the group schema and the available sub-groups. Thus, the permission dictionaries have to be generated at runtime and are unique for each preference group instance. """ read_perm_dict = {} write_perm_dict = {} # Make sure that the attributes from IPreferenceGroup and IReadContainer # are public. for attrName in ('__id__', '__schema__', '__title__', '__description__', 'get', 'items', 'keys', 'values', '__getitem__', '__contains__', '__iter__', '__len__'): read_perm_dict[attrName] = CheckerPublic # Make the attributes generated from the schema available as well. if instance.__schema__ is not None: for name in getFields(instance.__schema__): read_perm_dict[name] = CheckerPublic write_perm_dict[name] = CheckerPublic # Make all sub-groups available as well. for name in instance.keys(): read_perm_dict[name] = CheckerPublic write_perm_dict[name] = CheckerPublic return Checker(read_perm_dict, write_perm_dict)
def Portlet(name, class_=None, title='', description='', template=None, schema=None, **kw): cdict = {} cdict.update(kw) cdict['__name__'] = name cdict['title'] = title cdict['description'] = description if template: cdict['template'] = ViewPageTemplateFile(template) if class_ is not None: class_name = 'Portlet<%s:%s>'%(class_.__name__, name) else: class_name = 'Portlet<%s>'%name if class_ is None: bases = (PortletBase,) else: bases = (class_, PortletBase) PortletClass = type(str(class_name), bases, cdict) if schema is not None: for f_id in getFields(schema): if not hasattr(PortletClass, f_id): setattr(PortletClass, f_id, ConfigurationProperty(schema[f_id])) PortletClass.__schema__ = schema interface.classImplements(PortletClass, schema) return PortletClass
def get_subsite_info(self): subsite = None for item in self.context.aq_chain: if ISubsite.providedBy(item): subsite = item if not subsite: return {} serializer = queryMultiAdapter( (subsite, self.request), ISerializeToJsonSummary ) data = serializer() for schema in iterSchemata(subsite): for name, field in getFields(schema).items(): if name not in [ "subsite_header", "subsite_footer", "subsite_css_class", "image", ]: continue serializer = queryMultiAdapter( (field, subsite, self.request), IFieldSerializer ) value = serializer() data[json_compatible(name)] = value return data
def _potential_relations(obj): """Given an object return tuples of name, index, relation value. Returns both IRelationValue attributes as well as ITemporaryRelationValue attributes. If this is a IRelationList attribute, index will contain the index in the list. If it's a IRelation attribute, index will be None. """ for iface in providedBy(obj).flattened(): for name, field in getFields(iface).items(): frel = name + '_rel' _rel = iface.queryTaggedValue(frel) if _rel is not None: field = _rel name = frel if IRelation.providedBy(field): try: relation = getattr(obj, name) except AttributeError: # can't find this relation on the object continue yield name, None, relation if IRelationList.providedBy(field): try: l = getattr(obj, name) except AttributeError: # can't find the relation list on this object continue if l is not None: for i, relation in enumerate(l): yield name, i, relation
def serializeToJSON(context, request): result = {} permission_cache = {} for schema in iterFlowSchemata(context): if not schema.providedBy(context): continue read_permissions = mergedTaggedValueDict(schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): if IRichTextLabel.providedBy(field): continue if not check_permission( read_permissions.get(name), context, permission_cache, ): continue serializer = queryMultiAdapter((field, context, request), IFieldSerializer) value = serializer() # Clean FlowSubmissionData values if value == removable: value = None elif isinstance(value, list): value = [v for v in value if v != removable] elif isinstance(value, tuple): value = tuple([v for v in value if v != removable]) elif isinstance(value, set): value = set([v for v in value if v != removable]) result[json_compatible(name)] = value return result
def import_people(request, container, portal_type, format, data): dataset = get_dataset(format, data) attribute_map = get_attribute_map(request, dataset.headers, portal_type) schema = tools.get_schema_from_portal_type(portal_type) for ix, record in enumerate(dataset.dict): log.info('processing row number {}'.format(ix+1)) try: values = get_attribute_values(request, record, attribute_map) # add None for missing values for field in getFields(schema).keys(): if field not in values: values[field] = None validate_attribute_values(schema, values) api.content.create( container=container, type=portal_type, id='', **values ) except ContentImportError, e: raise ContentImportError( e.message, rownumber=ix+1, colname=e.colname ) except Exception, e: log.exception('error importing people') raise ContentImportError(e.message, rownumber=ix+1)
def registerPreference(name, schema, klass=None, title='', description='', configContext=None): if '.' in name: category, name = name.split('.', 1) else: category = '' PreferenceClass = preferencetype.PreferenceType( str(name), category, schema, klass, title, description) # register storage schema if getFields(schema): storage.registerSchema( 'memphis.preferences-%s.%s'%(category, name), schema) # instance inst = PreferenceClass() # register preference in preferences def _register(preference): root.Preferences.addPreference(preference) config.addAction( configContext, discriminator = ('memphis.preferences:preference', category, name), callable = _register, args = (inst,)) return inst
def getCASPlugin(): """Return CAS Plugin within acl_users, creating one if required.""" portal = getSite() acl_users = getToolByName(portal, 'acl_users') cas_auth_helpers = acl_users.objectValues(['CAS Auth Helper']) if not cas_auth_helpers: cas4pas = acl_users.manage_addProduct['CAS4PAS'] cas4pas.addCASAuthHelper('cas', 'CAS Auth Helper') cas = acl_users['cas'] registry = getUtility(IRegistry) casSettings = registry.forInterface(ICAS4PASPluginSchema) #Load defaults from fields fields = getFields(ICAS4PASPluginSchema) for field in fields: #Only set attributes the PAS plugin knows about if hasattr(CASAuthHelper, field): #Set from registry settings, which will pick up defaults value = getattr(casSettings, field) setattr(cas, field, value) out = StringIO() activatePluginInterfaces(portal, 'cas', out) msg = 'Created CAS plugin. %s' % out.getvalue() IStatusMessage(portal.REQUEST).addStatusMessage(msg, 'info') else: cas = cas_auth_helpers[0] return cas
def _include_fields(self, obj): """ """ result = {} # Compute fields if include_all or metadata_fields if self.include_all or self.metadata_fields: for schema in iterSchemata(self.context): read_permissions = mergedTaggedValueDict( schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): # only keep relevant fields if (self.include_all and not self.metadata_fields ) or name in self.metadata_fields: if not self.check_permission( read_permissions.get(name), obj): continue # serialize the field serializer = queryMultiAdapter( (field, obj, self.request), IFieldSerializer) value = serializer() result[json_compatible(name)] = value return result
def __call__(self, value, filestore, extra=None): """Create a new dict with all the contents serialized""" rv = {} for field_name, field_type in getFields(self.field.schema).items(): rv[field_name] = self._serializer(field_type)( value.get(field_name), filestore, field_name) return rv
def get(self): data = dict(self.annotations.get(self.key, {})) if self.tileType is not None and self.tileType.schema is not None: for name, field in getFields(self.tileType.schema).items(): if name not in data: data[name] = field.missing_value return data
def __call__(self): result = { '@context': 'http://www.w3.org/ns/hydra/context.jsonld', '@id': self.context.absolute_url(), '@type': self.context.portal_type, 'parent': { '@id': aq_parent(aq_inner(self.context)).absolute_url(), 'title': aq_parent(aq_inner(self.context)).title, 'description': aq_parent(aq_inner(self.context)).description }, 'created': json_compatible(self.context.created()), 'modified': json_compatible(self.context.modified()), 'UID': self.context.UID(), } for schema in iterSchemata(self.context): read_permissions = mergedTaggedValueDict( schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): if not self.check_permission(read_permissions.get(name)): continue serializer = queryMultiAdapter( (field, self.context, self.request), IFieldSerializer) value = serializer() result[json_compatible(name)] = value return result
def test_available_adapter(self): """ """ with open(os.path.join(FHIR_FIXTURE_PATH, 'Organization.json'), 'r') as f: json_dict = json.load(f) context = api.content.create( container=self.portal, type='TestToken', id=None, title='Test Organization xxx', ) fhir_field = getFields(ITestToken)['resource'] fhir_value = fhir_field.from_iterable(json_dict) fhir_field.set(context, fhir_value) serializer = queryMultiAdapter( (fhir_field, context, self.request), IFieldSerializer) # Test id adapter is avaialble self.assertIsNotNone(serializer) value = serializer() self.assertEqual(json_dict['resourceType'], value['resourceType']) # Test with None value serializer.context.resource = None self.assertIsNone(serializer())
def geo_settings(self): settings = {} fields = [i for i in getFields(IGeoSettings)] manager = utils.geo_settings(self.context) for name in fields: settings[name] = getattr(manager, name, None) return settings
def ViewModelType(name, class_=None, provides=(), title='', description='', schema=None, **kw): cdict = {} cdict.update(kw) cdict['__id__'] = name cdict['__name__'] = name cdict['__schema__'] = schema cdict['__title__'] = title cdict['__description__'] = description class_name = 'ViewModel<%s>'%name if class_ is None: bases = (ViewModel,) else: bases = (class_, ViewModel) ViewModelClass = type(str(class_name), bases, cdict) if provides: interface.classImplements(ViewModelClass, *provides) if schema is not None: for f_id in getFields(schema): if not hasattr(ViewModelClass, f_id) and \ f_id not in ('context', 'request'): setattr(ViewModelClass, f_id, StorageProperty(schema[f_id])) interface.classImplements(ViewModelClass, schema) return ViewModelClass
def get(self): # use explicitly set data (saved as annotation on the request) if self.key in self.annotations: data = dict(self.annotations[self.key]) if self.tileType is not None and self.tileType.schema is not None: for name, field in getFields(self.tileType.schema).items(): if name not in data: data[name] = field.missing_value # try to use a '_tiledata' parameter in the request elif '_tiledata' in self.tile.request.form: data = json.loads(self.tile.request.form['_tiledata']) # fall back to the copy of request.form object itself else: # If we don't have a schema, just take the request if self.tileType is None or self.tileType.schema is None: data = self.tile.request.form.copy() else: # Try to decode the form data properly if we can try: data = decode(self.tile.request.form, self.tileType.schema, missing=True) except (ValueError, UnicodeDecodeError,): LOGGER.exception(u'Could not convert form data to schema') return self.data.copy() return data
def test_integration_mail_events(self): """ Trigger every event of a mail at least one times and check the journalentries. """ portal = self.layer['portal'] dossier = createContentInContainer( portal, 'opengever.dossier.businesscasedossier', 'd1') fti = getUtility(IDexterityFTI, name='ftw.mail.mail') schema = fti.lookupSchema() field_type = getFields(schema)['message']._type msgtxt = 'Subject: mail-test\n' mail = createContentInContainer(dossier, 'ftw.mail.mail', message=field_type( data=msgtxt, contentType=u'message/rfc822', filename=u'attachment.txt')) # The journal of a mail is always on the parents dossier and not # on the mail self.check_annotation( dossier, action_type='Mail added', action_title='Mail added: %s' % mail.title_or_id(), check_entry=-2, )
def __call__(self): result = { '@context': 'http://www.w3.org/ns/hydra/context.jsonld', '@id': self.context.absolute_url(), '@type': self.context.portal_type, 'parent': { '@id': aq_parent(aq_inner(self.context)).absolute_url(), 'title': aq_parent(aq_inner(self.context)).title, 'description': aq_parent(aq_inner(self.context)).description }, 'created': json_compatible(self.context.created()), 'modified': json_compatible(self.context.modified()), 'UID': self.context.UID(), } for schema in iterSchemata(self.context): read_permissions = mergedTaggedValueDict(schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): if not self.check_permission(read_permissions.get(name)): continue serializer = queryMultiAdapter( (field, self.context, self.request), IFieldSerializer) value = serializer() result[json_compatible(name)] = value return result
def testDataGridFields(self): """ The DataGridFields should have at least one entry, as they are required. We get problems when adding an Issue if we are not careful. See http://plone.org/products/poi/issues/139 """ # This is what a real entry looks like: real_entry = { 'description': u'Something nice.', 'short_name': u'something', 'orderindex_': u'1', 'title': u'Something' } fields = getFields(ITracker) # Test the availableAreas field. field = fields['available_areas'] input = [real_entry] self.assertEqual(field.validate(input), None) # Test the availableIssueTypes field. field = fields['available_issue_types'] input = [real_entry] self.assertEqual(field.validate(input), None)
def test_integration_mail_events(self): """ Trigger every event of a mail at least one times and check the journalentries. """ portal = self.layer['portal'] dossier = createContentInContainer( portal, 'opengever.dossier.businesscasedossier', 'd1') fti = getUtility(IDexterityFTI, name='ftw.mail.mail') schema = fti.lookupSchema() field_type = getFields(schema)['message']._type msgtxt = 'Subject: mail-test\n' mail = createContentInContainer( dossier, 'ftw.mail.mail', message=field_type(data=msgtxt, contentType=u'message/rfc822', filename=u'attachment.txt')) # The journal of a mail is always on the parents dossier and not # on the mail self.check_annotation( dossier, action_type='Mail added', action_title='Mail added: %s' % mail.title_or_id(), check_entry=-2, )
def evolve(context): root = getRootFolder(context) for content in findObjectsMatching(root, IRevisions.providedBy): content._p_activate schema = content.__contentclass__.__schema__ changed = False if not hasattr(content, '_revisions'): changed = True content._revisions = IOBTree() content._revisions_length = Length(0) for name, field in getFields(schema).items(): if name in content.__dict__: setattr(content.workingRevision, name, content.__dict__[name]) del content.__dict__[name] changed = True elif name in ('title', 'description'): dc = ICMFDublinCore(content) val = getattr(dc, name, '') if val: setattr(content.workingRevision, name, val) if changed: content.publishWorkingRevision() content._p_changed = True
def _validate(self, value): if value is NO_VALUE: return errors = [] for field_name, field_ in schema.getFields(self.schema).items(): if field_name not in value and field_.required: errors.append(AttributeNotFoundError(field_name, self.schema)) ftype = getattr(field_, '_type', None) fvalue = value.get(field_name, NO_VALUE) if isinstance(ftype, tuple): ftype = ftype[0] # Perform a naive type coercion if (fvalue is not NO_VALUE and ftype is not None and not isinstance(fvalue, ftype)): if ftype is bool and fvalue.lower() == 'false': fvalue = False try: value[field_name] = ftype(fvalue) except (ValueError, TypeError): pass if fvalue is not NO_VALUE: field_.validate(fvalue) if errors: raise schema.interfaces.WrongContainedType(errors, self.__name__)
def extract_data_as_object(request, schema): obj = Object() data = extract_data(request, schema) for key, field in zschema.getFields(schema).items(): setattr(obj, key, data[key]) directlyProvides(obj, schema) return obj
def __load__(self, datasheet): if self.__schema__ is not datasheet.__schema__: raise DatasheetException( "Can't load data from incompatible datasheet") for field in getFields(self.__schema__): setattr(self, field, getattr(datasheet, field))
def applyTileConfigurations(self): conf = self.tile.get_tile_configuration() fields = getFields(self.tileType.schema) for field_name, field_conf in conf.items(): if 'order' in field_conf and field_conf['order']: fields[field_name].order = int(field_conf['order'])
def test_handleRelease(self): ztm = self.layer.txn logging.basicConfig(level=logging.CRITICAL) prf = ProductReleaseFinder(ztm, logging.getLogger()) alt_file_name = 'evolution-42.0.orig.tar.bz2' file_path, file_name = self.create_tarball( 'evolution-42.0.orig.tar.gz') file_names = set() prf.handleRelease('evolution', 'trunk', file_path, file_names) self.assertTrue(file_name in file_names) self.assertFalse(alt_file_name in file_names) # check to see that the release has been created evo = getUtility(IProductSet).getByName('evolution') trunk = evo.getSeries('trunk') release = trunk.getRelease('42.0') self.assertNotEqual(release, None) self.assertEqual(release.files.count(), 1) fileinfo = release.files[0] self.assertEqual(fileinfo.filetype, UpstreamFileType.CODETARBALL) self.assertEqual(fileinfo.libraryfile.filename, file_name) # verify that the fileinfo object is sane self.failUnless(verifyObject(IProductReleaseFile, fileinfo)) for field in getFields(IProductReleaseFile).values(): # XXX: BradCrittenden 2008-09-04 bug=264829: # Several interfaces have uploaded files as `Bytes` attributes but # then the values get converted to LibraryFileAlias objects. The # Bytes._validate() method then fails. As a work-around the # validate test is being disabled here for those fields. from zope.schema import Bytes if isinstance(field, Bytes): continue bound = field.bind(fileinfo) bound.validate(bound.get(fileinfo))
def get_modulistica_data(self, context=None): if context is None: context = self.context res = [] for child in context.listFolderContents(): if child.portal_type == "Document" and child.getId( ) == "multimedia": continue serializer = queryMultiAdapter((child, self.request), ISerializeToJsonSummary) data = serializer() if child.portal_type == "Document": for schema in iterSchemata(context): for name, field in getFields(schema).items(): if name not in ["blocks", "blocks_layout"]: continue # serialize the field serializer = queryMultiAdapter( (field, child, self.request), IFieldSerializer) value = serializer() data[json_compatible(name)] = value if IFolderish.providedBy(child): children = [ x for x in self.get_modulistica_data(context=child) if x.get("@type", "") not in ["Document", "CartellaModulistica"] ] if children: data["items"] = children res.append(data) return res
def _potential_relations(obj): """Given an object return tuples of name, index, relation value. Returns both IRelationValue attributes as well as ITemporaryRelationValue attributes. If this is a IRelationList attribute, index will contain the index in the list. If it's a IRelation attribute, index will be None. """ for iface in providedBy(obj).flattened(): for name, field in getFields(iface).items(): if IRelation.providedBy(field): try: relation = getattr(obj, name) except AttributeError: # can't find this relation on the object continue yield name, None, relation if IRelationList.providedBy(field): try: l = getattr(obj, name) except AttributeError: # can't find the relation list on this object continue if l is not None: for i, relation in enumerate(l): yield name, i, relation
def no_longer_searchable(iface, field_name): """Removes a "searchable" mark from a previously marked field. """ if schema.getFields(iface).get(field_name) is None: dottedname = '.'.join((iface.__module__, iface.__name__)) raise AttributeError( '{0} has no field "{1}"'.format( dottedname, field_name ) ) store = iface.queryTaggedValue(SEARCHABLE_KEY) if store is None: return False key = (iface, field_name, 'true') if key not in store: return False store.remove(key) iface.setTaggedValue(SEARCHABLE_KEY, store) return True
def update(self): self.query = self.data.get('query') self.sort_on = self.data.get('sort_on') if self.query is None or self.sort_on is None: # Get defaults tileType = queryUtility(ITileType, name=self.__name__) fields = getFields(tileType.schema) if self.query is None: self.query = getMultiAdapter(( self.context, self.request, None, fields['query'], None ), name='default').get() if self.sort_on is None: self.sort_on = getMultiAdapter(( self.context, self.request, None, fields['sort_on'], None ), name='default').get() self.limit = self.data.get('limit') if self.data.get('sort_reversed'): self.sort_order = 'reverse' else: self.sort_order = 'ascending' self.view_template = self.data.get('view_template')
async def __call__(self): """ data input : { 'interface': 'INTERFACE' }""" if not hasattr(self.request, 'site_settings'): return ErrorResponse( 'BadRequest', _("Not in a site request")) data = await self.request.json() interface = data.get('interface', None) initial_values = data.get('initial_values', {}) if interface is None: return ErrorResponse( 'InvalidRequest', 'Non existent Interface') registry = self.request.site_settings iObject = import_class(interface) registry.register_interface(iObject) config = registry.for_interface(iObject) # Initialize values # If its defined on the zope.schema default will not be overwritten # you will need to PATCH for key, field in getFields(iObject).items(): if key in initial_values and not getattr(config, key, False): # We don't have a value setattr(config, key, initial_values[key]) return Response(response={}, status=201)
def patch_entry_explicit_version(interface, version): """Make it look as though an entry definition used as_of. This function should be phased out in favor of actually using as_of. This function patches the entry's fields as well as the entry itself. Fields that are explicitly published as of a given version (even though the entry is not) are ignored. """ tagged = interface.getTaggedValue(LAZR_WEBSERVICE_EXPORTED) versioned = tagged.dict_for_name(version) or tagged.dict_for_name(None) versioned['_as_of_was_used'] = True # Now tag the fields. for name, field in getFields(interface).items(): tagged = field.queryTaggedValue(LAZR_WEBSERVICE_EXPORTED) if tagged is None: continue versioned = (tagged.dict_for_name(version) or tagged.dict_for_name(None)) if versioned is None: # This field is explicitly published in some other version. # Just ignore it. continue else: versioned['_as_of_was_used'] = True
def patch_entry_explicit_version(interface, version): """Make it look as though an entry definition used as_of. This function should be phased out in favor of actually using as_of. This function patches the entry's fields as well as the entry itself. Fields that are explicitly published as of a given version (even though the entry is not) are ignored. """ tagged = interface.getTaggedValue(LAZR_WEBSERVICE_EXPORTED) versioned = tagged.dict_for_name(version) or tagged.dict_for_name(None) versioned['_as_of_was_used'] = True # Now tag the fields. for name, field in getFields(interface).items(): tagged = field.queryTaggedValue(LAZR_WEBSERVICE_EXPORTED) if tagged is None: continue versioned = ( tagged.dict_for_name(version) or tagged.dict_for_name(None)) if versioned is None: # This field is explicitly published in some other version. # Just ignore it. continue else: versioned['_as_of_was_used'] = True
def get_valid_value(self, field_name): schema = self.schema value = self.get_field(field_name) if schema: field = getFields(schema).get(field_name, None) if field: values = [] if hasattr(field, 'vocabularyName'): vocabulary_name = field.vocabularyName if vocabulary_name: values = getVocabularyTerms( self.context, vocabulary_name=vocabulary_name) elif hasattr(field, 'vocabulary'): vocabulary = field.vocabulary if vocabulary: values = getVocabularyTerms(self.context, vocabulary=vocabulary) if values: if value in values: return value return field.default return value
def __call__(self): parent = aq_parent(aq_inner(self.context)) parent_summary = getMultiAdapter( (parent, self.request), ISerializeToJsonSummary)() result = { # '@context': 'http://www.w3.org/ns/hydra/context.jsonld', '@id': self.context.absolute_url(), 'id': self.context.id, '@type': self.context.portal_type, 'parent': parent_summary, 'created': json_compatible(self.context.created()), 'modified': json_compatible(self.context.modified()), 'review_state': self._get_workflow_state(), 'UID': self.context.UID(), } for schema in iterSchemata(self.context): read_permissions = mergedTaggedValueDict( schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): if not self.check_permission(read_permissions.get(name)): continue serializer = queryMultiAdapter( (field, self.context, self.request), IFieldSerializer) value = serializer() result[json_compatible(name)] = value return result
def contents(self): self.query = self.data.get('query') self.sort_on = self.data.get('sort_on') if self.query is None or self.sort_on is None: # Get defaults tileType = queryUtility(ITileType, name=self.__name__) fields = getFields(tileType.schema) if self.query is None: self.query = getMultiAdapter( (self.context, self.request, None, fields['query'], None), name="default").get() if self.sort_on is None: self.sort_on = getMultiAdapter((self.context, self.request, None, fields['sort_on'], None), name="default").get() self.limit = self.data.get('limit') if self.data.get('sort_reversed'): self.sort_order = 'reverse' else: self.sort_order = 'ascending' """Search results""" builder = getMultiAdapter((self.context, self.request), name='querybuilderresults') accessor = builder(query=self.query, sort_on=self.sort_on or 'getObjPositionInParent', sort_order=self.sort_order, limit=self.limit) return accessor
def has_company_info(self): fields = schema.getFields(self.company_schema) for field_name, field in fields.iteritems(): if field.required: if getattr(self.company, field_name, None) is None: return False return True
def PortletManager( name, class_=None, provides=(), title='', description='', schema=None, portlettype=IPortlet, **kw): # configuration schema if schema is None: schema = IPortletManagerConfiguration cdict = {} cdict.update(kw) cdict['__name__'] = name cdict['__schema__'] = schema cdict['title'] = title cdict['description'] = description cdict['portlettype'] = portlettype class_name = 'PortletManager<%s>'%name if class_ is None: bases = (PortletManagerBase,) else: bases = (class_, PortletManagerBase) ManagerClass = type(str(class_name), bases, cdict) if provides: interface.classImplements(ManagerClass, *provides) for f_id in getFields(schema): if not hasattr(ManagerClass, f_id): setattr(ManagerClass, f_id, ConfigurationProperty(schema[f_id])) interface.classImplements(ManagerClass, schema) return ManagerClass
def __iter__(self): for item in self.previous: filename = resolvePackageReferenceOrFile(item[self.key]) file_ = open(filename, 'r') keys = item.keys() pathkey = self.pathkey(*keys)[0] typekey = self.typekey(*keys)[0] # Get the file object by path path = item[pathkey] obj = self.context.unrestrictedTraverse(path.lstrip('/'), None) if obj is None: # path doesn't exist yield item continue if not file_: yield item continue # Set file field fti = getUtility(IDexterityFTI, name=item[typekey]) schema = fti.lookupSchema() field = getFields(schema)[self.field] fileobj = field._type(file_, filename=file_.name[file_.name.rfind('/') + 1:].decode('utf-8')) field.set(field.interface(obj), fileobj) yield item
def __call__(self): """ """ local_registry = self.context.get('local_registry') #If registry is not present, then create it if local_registry is None: enableChildRegistry(self.context, None) registry = queryUtility(IRegistry) if registry != self.context.get('local_registry'): return settings = registry.forInterface(IThemeSettings, False) themes = getAvailableThemes() if self.context.theme: settings.enabled = self.context.theme != DISABLE_PLONE_APP_THEMING for theme in themes: if theme.rules == self.context.theme: settings.currentTheme = theme.__name__.decode() settings.rules = theme.rules settings.absolutePrefix = theme.absolutePrefix settings.parameterExpressions = theme.parameterExpressions settings.doctype = theme.doctype else: return fields = getFields(IThemeSettings) settings_fields = ('currentTheme', 'rules', 'absolutePrefix', 'parameterExpressions', 'doctype',) for settings_field in settings_fields: setattr(settings, settings_field, fields[settings_field].default)
def __iter__(self): for item in self.previous: filename = resolvePackageReferenceOrFile(item[self.key]) file_ = open(filename, 'r') keys = item.keys() pathkey = self.pathkey(*keys)[0] typekey = self.typekey(*keys)[0] # Get the file object by path path = item[pathkey] obj = self.context.unrestrictedTraverse(path.lstrip('/'), None) if obj is None: # path doesn't exist yield item; continue if not file_: yield item; continue # Set file field fti = getUtility(IDexterityFTI, name=item[typekey]) schema = fti.lookupSchema() field = getFields(schema)[self.field] fileobj = field._type(file_, filename=file_.name[file_.name.rfind('/')+1:].decode('utf-8')) field.set(field.interface(obj), fileobj) yield item
def __call__(self, version=None, include_items=True): version = "current" if version is None else version obj = self.getVersion(version) parent = aq_parent(aq_inner(obj)) parent_summary = getMultiAdapter( (parent, self.request), ISerializeToJsonSummary )() result = { # '@context': 'http://www.w3.org/ns/hydra/context.jsonld', "@id": obj.absolute_url(), "id": obj.id, "@type": obj.portal_type, "parent": parent_summary, "created": json_compatible(obj.created()), "modified": json_compatible(obj.modified()), "review_state": self._get_workflow_state(obj), "UID": obj.UID(), "version": version, "layout": self.context.getLayout(), "is_folderish": False, } # Insert next/prev information nextprevious = NextPrevious(obj) result.update( {"previous_item": nextprevious.previous, "next_item": nextprevious.next} ) # Insert expandable elements result.update(expandable_elements(self.context, self.request)) # Insert field values for schema in iterSchemata(self.context): read_permissions = mergedTaggedValueDict(schema, READ_PERMISSIONS_KEY) for name, field in getFields(schema).items(): if not self.check_permission(read_permissions.get(name), obj): continue # serialize the field serializer = queryMultiAdapter( (field, obj, self.request), IFieldSerializer ) value = serializer() result[json_compatible(name)] = value target_url = getMultiAdapter( (self.context, self.request), IObjectPrimaryFieldTarget )() if target_url: result["targetUrl"] = target_url result["allow_discussion"] = getMultiAdapter( (self.context, self.request), name="conversation_view" ).enabled() return result
def update_languages(context): reload_gs_profile(context) view = UpdateLanguages(context, getattr(context, 'REQUEST', None)) view() transaction.commit() cts = [ CT_DCAT_CATALOG, CT_DCAT_COLLECTION_CATALOG, CT_DCAT_DATASET, CT_DCAT_DISTRIBUTION, CT_DCT_LICENSEDOCUMENT, CT_DCT_LOCATION, CT_DCT_MEDIATYPEOREXTENT, CT_DCT_RIGHTSSTATEMENT, CT_DCT_STANDARD, CT_FOAF_AGENT, CT_SKOS_CONCEPTSCHEME, CT_RDFS_LITERAL, CT_SKOS_CONCEPT, ] portal = api.portal.get() if portal is None: return None changed_obj = [] for ct in cts: results = api.content.find({'portal_type': ct}) if not results: continue fti = getUtility(IDexterityFTI, name=ct) schema = fti.lookupSchema() fields = getFields(schema) for field_name in fields: field = fields[field_name] if isinstance(field, I18NText) or isinstance(field, I18NTextLine): for res in results: obj = res.getObject() field_value = getattr(obj, field_name, {}) if not field_value: continue new_value = clean_value(field_value) if not new_value: continue setattr(obj, field_name, new_value) if obj not in changed_obj: changed_obj.append(obj) for obj in changed_obj: obj.reindexObject() transaction.commit()