def processXml(self, context, data_node): tagname, namespace = utils.fixtag(data_node.tag, context.ns_map) if tagname == 'metadata': # ignore the container return False elif tagname in ('compound','array'): # basic at field specified, find the matching attribute # and annotate the data node with it schema_name = data_node.attrib.get('name', None) if schema_name is None: schema_name = data_node.attrib.get('id',) log("field %s: 'id' attribute is deprecated, use 'name' instead" % schema_name) assert schema_name, "No field name specified in cf:[array|compound] element" #print "field", schema_name self.last_schema_id = schema_name attribute = self.getAttributeByName(schema_name, context) if attribute is None: #print "na", schema_name return False data_node.attribute = attribute return True return False
def getObject(self, REQUEST=None): """ Used to resolve UIDs into real objects. This also must be annotation aware. The protocol is: We have the path to an object. We get this object. If its UID is not the UID in the brains then we need to pull it from the reference annotation and return that object Thus annotation objects store the path to the source object """ obj = None try: path = self.getPath() try: portal = getToolByName(self, 'portal_url').getPortalObject() obj = portal.unrestrictedTraverse(self.getPath()) obj = aq_inner( obj ) except (ConflictError, KeyboardInterrupt): raise except: #NotFound # XXX bare exception pass if obj is None: if REQUEST is None: REQUEST = self.REQUEST obj = self.aq_parent.resolve_url(self.getPath(), REQUEST) return obj except (ConflictError, KeyboardInterrupt): raise except: log('UIDCatalogBrains getObject raised an error:\n %s' % '\n'.join(traceback.format_exception(*sys.exc_info()))) pass
def processXml(self, context, data_node): tagname, namespace = utils.fixtag(data_node.tag, context.ns_map) if tagname == 'metadata': # ignore the container return False elif tagname in ('compound', 'array'): # basic at field specified, find the matching attribute # and annotate the data node with it schema_name = data_node.attrib.get('name', None) if schema_name is None: schema_name = data_node.attrib.get('id', ) log("field %s: 'id' attribute is deprecated, use 'name' instead" % schema_name) assert schema_name, "No field name specified in cf:[array|compound] element" #print "field", schema_name self.last_schema_id = schema_name attribute = self.getAttributeByName(schema_name, context) if attribute is None: #print "na", schema_name return False data_node.attribute = attribute return True return False
def _subfieldValidationLayer(self, subfield): """ for the individual subfields """ chainname = 'Validator_%s_%s' % (self.getName(), subfield) current_validators = self.subfield_validators.get(subfield, ()) if type(current_validators) is DictType: raise NotImplementedError, 'Please use the new syntax with validation chains' elif providedBy(IValidationChain, current_validators): validators = current_validators elif providedBy(IValidator, current_validators): validators = ValidationChain(chainname, validators=current_validators) elif type(current_validators) in (TupleType, ListType, StringType): if len(current_validators): # got a non empty list or string - create a chain try: validators = ValidationChain( chainname, validators=current_validators) except (UnknowValidatorError, FalseValidatorError), msg: log("WARNING: Disabling validation for %s/%s: %s" % (self.getName(), subfield, msg)) validators = () else: validators = ()
def registerType(klass, package=None): if not package: deprecated("registerType without a package name is deprecated. " "Please apply a package name for class %s" % repr(klass), level=2) package = _guessPackage(klass.__module__) # Registering a class results in classgen doing its thing # Set up accessor/mutators and sane meta/portal_type generateClass(klass) data = { 'klass' : klass, 'name' : klass.__name__, 'identifier': klass.meta_type.capitalize().replace(' ', '_'), 'meta_type': klass.meta_type, 'portal_type': klass.portal_type, 'package' : package, 'module' : sys.modules[klass.__module__], 'schema' : klass.schema, 'signature' : klass.schema.signature(), # backward compatibility, remove later 'type' : klass.schema, } key = '%s.%s' % (package, data['meta_type']) if key in _types.keys(): existing = _types[key] existing_name = '%s.%s' % (existing['module'].__name__, existing['name']) override_name = '%s.%s' % (data['module'].__name__, data['name']) log('ArchetypesTool: Trying to register "%s" which ' \ 'has already been registered. The new type %s ' \ 'is going to override %s' % (key, override_name, existing_name)) _types[key] = data
def instructor_to_instructors(obj, val, **kwargs): log('%s: instructor_to_instructors called for %s' % (ECL_NAME, obj.TitleOrId())) if isinstance(val, StringTypes): return [val,] else: return val
def manage_inspect(self, UID, REQUEST=None): """Dump some things about an object hook in the debugger for now. """ object = self.getObject(UID) log(object, object.Schema(), dir(object)) return REQUEST.RESPONSE.redirect(self.absolute_url() + '/manage_uids')
def enum(self, callback, *args, **kwargs): catalog = getToolByName(self, UID_CATALOG) keys = catalog.uniqueValuesFor('UID') for uid in keys: o = self.getObject(uid) if o: callback(o, *args, **kwargs) else: log('No object for %s' % uid)
def wrap_method(klass, name, method, pattern='__at_wrapped_%s__'): old_method = getattr(klass, name) if isWrapperMethod(old_method): log('Already wrapped method %s.%s. Skipping.' % (klass.__name__, name)) return new_name = pattern % name setattr(klass, new_name, old_method) setattr(method, ORIG_NAME, new_name) setattr(method, WRAPPER, True) setattr(klass, name, method)
def parseCompound(self,context,node): res={} for n in node: name = n.attrib.get('name', None) if name is None: name = n.attrib.get('id', None) log("compoundfield %s: 'id' attribute is deprecated, use 'name' instead" % name) value = self.parseXmlNode(context, n) res[name] = value return res
def initializeInstance(self, instance, item=None, container=None): if self.is_initialized(instance) or getattr(instance, "_at_is_fake_instance", None): # duh, we don't need to be initialized twice return factory = getToolByName(instance, "portal_factory") if factory.isTemporary(instance): return fields = instance.Schema().fields() fields = [f for f in fields if IObjectField.isImplementedBy(f) and f.getStorage().__class__ is self.__class__] columns = [] args = {} for field in fields: type = self.db_type_map.get(field.type, field.type) name = field.getName() # MySQL supports escape for columns names! if self.__class__.__name__ == "MySQLSQLStorage": columns.append("`%s` %s" % (name, type)) else: columns.append("%s %s" % (name, type)) parent = container or aq_parent(aq_inner(instance)) args["PARENTUID"] = getattr(aq_base(parent), "UID", lambda: None)() args["table"] = instance.portal_type args["UID"] = instance.UID() # args['db_encoding']=kwargs.get('db_encoding',None) args["columns"] = ", " + ", ".join(columns) if not self.table_exists(instance): self._query(instance, self.query_create, args) log("created table %s\n" % args["table"]) try: self._query(instance, self.query_insert, args) except ConflictError: raise except: # usually, duplicate key # raise SQLInitException(msg) raise try: instance.__initialized += (self.getName(),) except AttributeError: instance.__initialized = (self.getName(),) # now, if we find an attribute called _v_$classname_temps, it # means the object was moved and we can initialize the fields # with those values temps_var = "_v_%s_temps" % self.getName() if hasattr(aq_base(instance), temps_var): temps = getattr(instance, temps_var) for key, value in temps.items(): instance.Schema()[key].set(instance, value) delattr(instance, temps_var) try: del instance.__cleaned except (AttributeError, KeyError): pass
def parseCompound(self, context, node): res = {} for n in node: name = n.attrib.get('name', None) if name is None: name = n.attrib.get('id', None) log("compoundfield %s: 'id' attribute is deprecated, use 'name' instead" % name) value = self.parseXmlNode(context, n) res[name] = value return res
def _rawEnum(self, callback, *args, **kwargs): """Finds all object to check if they are 'referenceable'. """ catalog = getToolByName(self, 'portal_catalog') brains = catalog(id=[]) for b in brains: o = b.getObject() if o is not None: if IBaseObject.isImplementedBy(o): callback(o, *args, **kwargs) else: log('no object for brain: %s:%s' % (b,b.getURL()))
def isBinary(self): """Return true if this contains a binary value, else false. """ try: return self.binary except AttributeError: # XXX workaround for "[ 1040514 ] AttributeError on some types after # migration 1.2.4rc5->1.3.0" # Somehow and sometimes the binary attribute gets lost magically self.binary = not str(self.mimetype).startswith('text') log("BaseUnit: Failed to access attribute binary for mimetype %s. " "Setting binary to %s" % (self.mimetype, self.binary), level=ERROR) return self.binary
def reindexObjectSecurity(self, skip_self=False): """update security information in all registered catalogs. """ at = getToolByName(self, TOOL_NAME, None) if at is None: return catalogs = [ c for c in at.getCatalogsByType(self.meta_type) if c is not None ] path = '/'.join(self.getPhysicalPath()) for catalog in catalogs: unrestrictedSearchResults = getattr(catalog, 'unrestrictedSearchResults', catalog.searchResults) # PATCH for brain in unrestrictedSearchResults(path=path): brain_path = brain.getPath() if brain_path == path and skip_self: continue # Get the object if hasattr(aq_base(brain), '_unrestrictedGetObject'): ob = brain._unrestrictedGetObject() else: # BBB: Zope 2.7 ob = self.unrestrictedTraverse(brain_path, None) if ob is None: # BBB: Ignore old references to deleted objects. # Can happen only in Zope 2.7, or when using # catalog-getObject-raises off in Zope 2.8 log("reindexObjectSecurity: Cannot get %s from catalog" % brain_path, level=WARNING) continue # Recatalog with the same catalog uid. # PATCH: lst = [ i for i in self._cmf_security_indexes if i in c.indexes() ] # only valid indexes if lst: catalog.catalog_object(ob, uid=brain_path, idxs=lst, update_metadata=0)
def parseContext(self, instance, data): #parser = XmlParser( instance, data, use_validation=self.use_validation) root = ElementTree.fromstring(data) ns_map = self.getNamespaceURIMap() context = ParseContext(instance, root, ns_map) context.xmlsource = data self.parseXml( root, context, ns_map ) if self.use_validation: # and not reader.IsValid(): errors = error_callback.get(clear=True) log(errors) raise MarshallingException, ("Input failed to validate against " "the ATXML RelaxNG schema.\n" "%s" % errors) return context
def getCatalogsByType(self, portal_type): """Return the catalog objects assoicated with a given type. """ catalogs = [] catalog_map = getattr(self, 'catalog_map', None) if catalog_map is not None: names = self.catalog_map.get(portal_type, ['portal_catalog']) else: names = ['portal_catalog'] portal = getToolByName(self, 'portal_url').getPortalObject() for name in names: try: catalogs.append(getToolByName(portal, name)) except (ConflictError, KeyboardInterrupt): raise except Exception, E: log('No tool', name, E) pass
def marshall(self, instance, **kwargs): p = instance.getPrimaryField() if not p: raise TypeError, 'Primary Field could not be found.' data = p and instance[p.getName()] or '' content_type = length = None # Gather/Guess content type if IBaseUnit.isImplementedBy(data): content_type = data.getContentType() length = data.get_size() data = data.getRaw() elif isinstance(data, File): content_type = data.content_type length = data.get_size() data = data.data else: log('WARNING: PrimaryFieldMarshaller(%r): ' 'field %r does not return a IBaseUnit ' 'instance.' % (instance, p.getName())) if hasattr(p, 'getContentType'): content_type = p.getContentType(instance) or 'text/plain' else: content_type = (data and guess_content_type(data) or 'text/plain') # DM 2004-12-01: "FileField"s represent a major field class # that does not use "IBaseUnit" yet. # Ensure, the used "File" objects get the correct length. if hasattr(p, 'get_size'): length = p.get_size(instance) else: # DM: this almost surely is stupid! length = len(data) # ObjectField without IBaseUnit? if shasattr(data, 'data'): data = data.data else: data = str(data) # DM 2004-12-01: recompute 'length' as we now know it # definitely length = len(data) return (content_type, length, data)
def delegate(self, method, obj, data=None, file=None, **kw): if file is not None: kw['file'] = file __traceback_info__ = (method, obj, kw) context = getContext(obj, kw.get('REQUEST')) if context is not obj: # If the object is being created by means of a PUT # then it has no context, and some of the stuff # we are doing here may require a context. # Wrapping it in an ImplicitAcquisitionWrapper should # be safe as long as nothing tries to persist # a reference to the wrapped object. obj = ImplicitAcquisitionWrapper(obj, context) tool = getToolByName(obj, TOOL_ID, None) components = None if tool is not None: info = kw.copy() info['data'] = data info['mode'] = method components = tool.getMarshallersFor(obj, **info) else: # Couldn't find a context to get # hold of the tool or the tool is not installed. log('Could not find the marshaller tool. ' 'It might not be installed or you might not ' 'be providing enough context to find it.') # We just use the first component, if one is returned. if components: marshaller = getComponent(components[0]) else: # If no default marshaller was provided then we complain. if self.fallback is None: raise MarshallingException( "Couldn't get a marshaller for %r, %r" % (obj, kw)) # Otherwise, use the default marshaller provided. Note it # must be an instance, not a factory. marshaller = self.fallback __traceback_info__ = (marshaller, method, obj, kw) args = (obj,) if method == 'demarshall': args += (data,) method = getattr(marshaller, method) return method(*args, **kw)
def fixUpSMIGlobs(portal, out=None): from Products.MimetypesRegistry.mime_types import smi_mimetypes from Products.Archetypes.debug import log mtr = getToolByName(portal, 'mimetypes_registry') smi_mimetypes.initialize(mtr) # Now comes the fun part. For every glob, lookup a extension # matching the glob and unregister it. for glob in mtr.globs.keys(): if mtr.extensions.has_key(glob): log('Found glob %s in extensions registry, removing.' % glob) mti = mtr.extensions[glob] del mtr.extensions[glob] if glob in mti.extensions: log('Found glob %s in mimetype %s extensions, ' 'removing.' % (glob, mti)) exts = list(mti.extensions) exts.remove(glob) mti.extensions = tuple(exts) mtr.register(mti)
def _subfieldValidationLayer(self, subfield): """ for the individual subfields """ chainname = 'Validator_%s_%s' % (self.getName(), subfield) current_validators = self.subfield_validators.get(subfield, ()) if type(current_validators) is DictType: raise NotImplementedError, 'Please use the new syntax with validation chains' elif IValidationChain.isImplementedBy(current_validators): validators = current_validators elif IValidator.isImplementedBy(current_validators): validators = ValidationChain(chainname, validators=current_validators) elif type(current_validators) in (TupleType, ListType, StringType): if len(current_validators): # got a non empty list or string - create a chain try: validators = ValidationChain(chainname, validators=current_validators) except (UnknowValidatorError, FalseValidatorError), msg: log("WARNING: Disabling validation for %s/%s: %s" % (self.getName(), subfield, msg)) validators = () else: validators = ()
def reindexObjectSecurity(self, skip_self=False): """update security information in all registered catalogs. """ at = getToolByName(self, TOOL_NAME, None) if at is None: return catalogs = [c for c in at.getCatalogsByType(self.meta_type) if c is not None] path = '/'.join(self.getPhysicalPath()) for catalog in catalogs: unrestrictedSearchResults = getattr(catalog, 'unrestrictedSearchResults', catalog.searchResults) # PATCH for brain in unrestrictedSearchResults(path=path): brain_path = brain.getPath() if brain_path == path and skip_self: continue # Get the object if hasattr(aq_base(brain), '_unrestrictedGetObject'): ob = brain._unrestrictedGetObject() else: # BBB: Zope 2.7 ob = self.unrestrictedTraverse(brain_path, None) if ob is None: # BBB: Ignore old references to deleted objects. # Can happen only in Zope 2.7, or when using # catalog-getObject-raises off in Zope 2.8 log("reindexObjectSecurity: Cannot get %s from catalog" % brain_path, level=WARNING) continue # Recatalog with the same catalog uid. # PATCH: lst = [i for i in self._cmf_security_indexes if i in c.indexes()] # only valid indexes if lst: catalog.catalog_object(ob, uid=brain_path, idxs=lst, update_metadata=0)
class RecordField(ObjectField): """A field that stores a 'record' (dictionary-like) construct""" _properties = ObjectField._properties.copy() _properties.update({ 'type': 'record', 'default': {}, 'subfields': (), 'subfield_types': {}, 'subfield_vocabularies': {}, 'subfield_labels': {}, 'subfield_sizes': {}, 'subfield_maxlength': {}, 'required_subfields': (), 'subfield_validators': {}, 'subfield_conditions': {}, 'innerJoin': ', ', 'outerJoin': ', ', 'widget': RecordWidget, }) security = ClassSecurityInfo() security.declarePublic('getSubfields') def getSubfields(self): """the tuple of sub-fields""" return self.subfields security.declarePublic('getSubfieldType') def getSubfieldType(self, subfield): """ optional type declaration default: string """ return self.subfield_types.get(subfield, 'string') security.declarePublic('getSubfieldLabel') def getSubfieldLabel(self, subfield): """ optional custom label for the subfield default: the id of the subfield """ return self.subfield_labels.get(subfield, subfield.capitalize()) def getSubfieldSize(self, subfield, default=40): """ optional custom size for the subfield default: 40 only effective for string type subfields """ return self.subfield_sizes.get(subfield, default) def getSubfieldMaxlength(self, subfield): """ otional custom maxlength size for the subfield only effective for string type subfields """ return self.subfield_maxlength.get(subfield, 40) def isRequired(self, subfield): """ looks whether subfield is included in the list of required subfields """ return subfield in self.required_subfields def isSelection(self, subfield): """select box needed?""" return self.subfield_vocabularies.has_key(subfield) security.declarePublic('testSubfieldCondition') def testSubfieldCondition(self, subfield, folder, portal, object): """Test the subfield condition.""" try: condition = self.subfield_conditions.get(subfield, None) if condition is not None: __traceback_info__ = (folder, portal, object, condition) ec = createExprContext(folder, portal, object) return Expression(condition)(ec) else: return True except AttributeError: return True def getVocabularyFor(self, subfield, instance=None): """the vocabulary (DisplayList) for the subfield""" ## XXX rr: couldn't we just rely on the field's ## Vocabulary method here? value = None vocab = self.subfield_vocabularies.get(subfield, None) if not vocab: raise AttributeError, 'no vocabulary found for %s' % subfield if isinstance(vocab, DisplayList): return vocab if type(vocab) in StringTypes: value = None method = getattr(self, vocab, None) if method and callable(method): value = method(instance) else: if instance is not None: method = getattr(instance, vocab, None) if method and callable(method): value = method() if not isinstance(value, DisplayList): raise TypeError, '%s is not a DisplayList %s' % (value, subfield) return value raise TypeError, '%s niether a StringType or a DisplayList for %s' % ( vocab, subfield) def getViewFor(self, instance, subfield, joinWith=', '): """ formatted value of the subfield for display """ raw = self.getRaw(instance).get(subfield, '') if type(raw) in (type(()), type([])): raw = joinWith.join(raw) # Prevent XSS attacks by quoting all user input raw = html_quote(str(raw)) # this is now very specific if subfield == 'email': return self.hideEmail(raw, instance) if subfield == 'phone': return self.labelPhone(raw) if subfield == 'fax': return self.labelFax(raw) if subfield == 'homepage': return '<a href="%s">%s</a>' % (raw, raw) return raw def getSubfieldViews(self, instance, joinWith=', '): """ list of subfield views for non-empty subfields """ result = [] for subfield in self.getSubfields(): view = self.getViewFor(instance, subfield, joinWith) if view: result.append(view) return result # this is really special purpose and in no ways generic def hideEmail(self, email='', instance=None): masked = 'email: ' + \ email.replace('@', ' (at) ').replace('.', ' (dot) ') membertool = getToolByName(instance, 'portal_membership', None) if membertool is None or membertool.isAnonymousUser(): return masked return "<a href='mailto:%s'>%s</a>" % (email, email) def labelPhone(self, phone=''): return 'phone: ' + phone def labelFax(self, fax=''): return 'fax: ' + fax # enable also a string representation of a dictionary # to be passed in (external edit may need this) # store string values as unicode def set(self, instance, value, **kwargs): if type(value) in StringTypes: try: value = eval(value) # more checks to add? except: # what to catch here? pass value = self._to_dict(value) value = self._decode_strings(value, instance, **kwargs) ObjectField.set(self, instance, value, **kwargs) def _to_dict(self, value): if type(value) != type({}) and hasattr(value, 'keys'): new_value = {} new_value.update(value) return new_value return value def _decode_strings(self, value, instance, **kwargs): new_value = value for k, v in value.items(): if type(v) is type(''): nv = decode(v, instance, **kwargs) try: new_value[k] = nv except AttributeError: # Records don't provide __setitem__ setattr(new_value, k, nv) # convert datetimes if self.subfield_types.get(k, None) == 'datetime': try: val = DateTime(v) except: val = None new_value[k] = val return new_value # Return strings using the site's encoding def get(self, instance, **kwargs): value = ObjectField.get(self, instance, **kwargs) return self._encode_strings(value, instance, **kwargs) def _encode_strings(self, value, instance, **kwargs): new_value = value for k, v in value.items(): if type(v) is type(u''): nv = encode(v, instance, **kwargs) try: new_value[k] = nv except AttributeError: # Records don't provide __setitem__ setattr(new_value, k, nv) return new_value if HAS_VALIDATION_CHAIN: def _validationLayer(self): """ Resolve that each validator is in the service. If validator is not, log a warning. We could replace strings with class refs and keep things impl the ivalidator in the list. Note: XXX this is not compat with aq_ things like scripts with __call__ """ for subfield in self.getSubfields(): self.subfield_validators[ subfield] = self._subfieldValidationLayer(subfield) def _subfieldValidationLayer(self, subfield): """ for the individual subfields """ chainname = 'Validator_%s_%s' % (self.getName(), subfield) current_validators = self.subfield_validators.get(subfield, ()) if type(current_validators) is DictType: raise NotImplementedError, 'Please use the new syntax with validation chains' elif providedBy(IValidationChain, current_validators): validators = current_validators elif providedBy(IValidator, current_validators): validators = ValidationChain(chainname, validators=current_validators) elif type(current_validators) in (TupleType, ListType, StringType): if len(current_validators): # got a non empty list or string - create a chain try: validators = ValidationChain( chainname, validators=current_validators) except (UnknowValidatorError, FalseValidatorError), msg: log("WARNING: Disabling validation for %s/%s: %s" % (self.getName(), subfield, msg)) validators = () else: validators = () else: log('WARNING: Unknow validation %s. Disabling!' % current_validators) validators = () if not subfield in self.required_subfields: if validators == (): validators = ValidationChain(chainname) if len(validators): # insert isEmpty validator at position 0 if first validator # is not isEmpty if not validators[0][0].name == 'isEmpty': validators.insertSufficient('isEmpty') else: validators.insertSufficient('isEmpty') return validators
def unset(self, name, instance, **kwargs): if not shasattr(instance, "_md"): log("Broken instance %s, no _md" % instance) else: del instance._md[name] instance._p_changed = 1
from collective.contentfiles2aws.awsfile import AWSFile from collective.contentfiles2aws.widgets import AWSFileWidget from collective.contentfiles2aws.widgets import AWSImageWidget from collective.contentfiles2aws.interfaces import IAWSFileField from collective.contentfiles2aws.interfaces import IAWSImageField from collective.contentfiles2aws.config import AWSCONF_SHEET, \ ACTIVE_STORAGE_PNAME, STORAGES _marker = [] try: import PIL.Image except ImportError: # no PIL, no scaled versions! log("Warning: no Python Imaging Libraries (PIL) found." + "Archetypes based ImageField's don't scale if neccessary.") HAS_PIL = False PIL_ALGO = None else: HAS_PIL = True PIL_ALGO = PIL.Image.ANTIALIAS class AWSFileField(FileField): """Something that may be a file, but is not an image and doesn't want text format conversion""" implements(IAWSFileField) _properties = FileField._properties.copy() _properties.update({'widget': AWSFileWidget})
def processXml(self, context, data_node): tagname, namespace = utils.fixtag(data_node.tag, context.ns_map) if tagname == 'metadata': # ignore the container return False elif tagname == 'reference': # switch to reference mode, we tell the parser that we want # to explictly recieve all new node parse events, so we # can introspect the nested metadata that can be used # in reference specification. self.in_reference_mode = True self.new_reference_p = True assert self.last_schema_id context.setNamespaceDelegate(self) return False elif tagname == 'field': # basic at field specified, find the matching attribute # and annotate the data node with it schema_name = data_node.attrib.get('name', None) if schema_name is None: log("'id' attribute for at:field is deprecated, " "use 'name' instead") schema_name = data_node.attrib.get('id') assert schema_name, "No field name specified in at:field element" #print "field", schema_name self.last_schema_id = schema_name attribute = self.getAttributeByName(schema_name, context) if attribute is None: #print "na", schema_name return False data_node.set('attribute', attribute) return True elif self.in_reference_mode: # if we get new metadata elements while in references, they # are stored as additional data for resolving the reference # latter. data = context.getDataFor(self.xmlns) srefs = data.setdefault(self.last_schema_id, []) # if we've already added a reference to the node data, # put additional reference specification data onto the # existing reference. if self.new_reference_p: ref = Reference() srefs.append(ref) self.new_reference_p = False else: ref = srefs[-1] attribute = ReferenceAttribute(data_node.name, ref) data_node.set('attribute', attribute) return True elif tagname in self.at_fields: # pseudo fields like uid which are specified in a custom manner attribute = self.getAttributeByName(tagname) if attribute is None: return False data_node.set('attribute', attribute) return True return False
def processXml(self, context, data_node): tagname, namespace = utils.fixtag(data_node.tag, context.ns_map) if tagname == 'metadata': # ignore the container return False elif tagname == 'reference': # switch to reference mode, we tell the parser that we want # to explictly recieve all new node parse events, so we # can introspect the nested metadata that can be used # in reference specification. self.in_reference_mode = True self.new_reference_p = True assert self.last_schema_id context.setNamespaceDelegate( self ) return False elif tagname == 'field': # basic at field specified, find the matching attribute # and annotate the data node with it schema_name = data_node.attrib.get('name', None) if schema_name is None: log("'id' attribute for at:field is deprecated, use 'name' instead") schema_name = data_node.attrib.get('id') ## while context.reader.MoveToNextAttribute(): ## if context.reader.LocalName() == 'id': ## schema_name = context.reader.Value() ## break assert schema_name, "No field name specified in at:field element" #print "field", schema_name self.last_schema_id = schema_name attribute = self.getAttributeByName(schema_name, context) if attribute is None: #print "na", schema_name return False data_node.attribute = attribute return True elif self.in_reference_mode: # if we get new metadata elements while in references, they # are stored as additional data for resolving the reference # latter. data = context.getDataFor(self.xmlns) srefs = data.setdefault( self.last_schema_id, []) # if we've already added a reference to the node data, # put additional reference specification data onto the # existing reference. if self.new_reference_p: ref = Reference() srefs.append( ref ) self.new_reference_p = False else: ref = srefs[-1] attribute = ReferenceAttribute( data_node.name, ref ) data_node.attribute = attribute return True elif tagname in self.at_fields: # pseudo fields like uid which are specified in a custom manner attribute = self.getAttributeByName( tagname ) if attribute is None: return False data_node.attribute = attribute return True return False
def addableTypes(self, instance, field): """ Returns a list of dictionaries which maps portal_type to a human readable form. """ tool = getToolByName(instance, 'portal_types') purl = getToolByName(instance, 'portal_url') lookupDestinationsFor = self.lookupDestinationsFor getRelativeContentURL = purl.getRelativeContentURL # if destination_types is None (by default) it will do # N-portal_types queries to the catalog which is horribly inefficient destination_types = getattr(self, 'destination_types', None) destination = self.destination types = [] options = {} for typeid in field.allowed_types: _info = tool.getTypeInfo(typeid) if _info is None: # The portal_type asked for was not # installed/has been removed. log("Warning: in Archetypes.Widget.lookupDestinationsFor: " \ "portal type %s not found" % typeid ) continue if destination == None: options[typeid]=[None] elif isinstance(destination, DictType): options[typeid]=destination.get(typeid, [None]) if not isinstance(options[typeid], ListType): options[typeid] = [options[typeid]] elif isinstance(destination, ListType): options[typeid]=destination else: place = getattr(aq_base(instance), destination, destination) if callable(place): #restore acq.wrapper place = getattr(instance, destination) place = place() if isinstance(place, ListType): options[typeid] = place else: options[typeid] = [place] value = {} value['id'] = typeid value['name'] = _info.Title() value['destinations'] = [] for option in options.get(typeid): if option == None: value['destinations'] = value['destinations'] + \ lookupDestinationsFor(_info, tool, purl, destination_types=destination_types) elif option == '.': value['destinations'].append(getRelativeContentURL(instance)) else: try: place = getattr(aq_base(instance), option, option) except TypeError: place = option if callable(place): #restore acq.wrapper place = getattr(instance, option) place = place() if isinstance(place, ListType): value['destinations'] = place + value['destinations'] else: #TODO Might as well check for type, doing it everywhere else value['destinations'].append(place) if value['destinations']: types.append(value) return types