def filter_batch_on_properties(self, batch): reverse = True if (self.sort_dir == "desc") else False if self.sort_on in self.filter_property_fields: sort_on = self.sort_on[5:] if sort_on not in self.utk: batch.sort(key=lambda x: getattr(x, sort_on), reverse=reverse) for field_name in self.filter_property_fields: md_field = self.domain_annotation.get(field_name) ff_name = "filter_%s" % (field_name) ff = self.request.get(ff_name, None) if ff and md_field: if (IDate.providedBy(md_field.property) or IDatetime.providedBy(md_field.property)): start_date_str, end_date_str = get_date_strings(ff) start_date = string_to_date(start_date_str) end_date = string_to_date(end_date_str) if start_date: batch = [x for x in batch if ( getattr(x, field_name) and getattr(x, field_name).date() >= start_date) ] if end_date: batch = [x for x in batch if ( getattr(x, field_name) and getattr(x, field_name).date() <= end_date) ] elif IText.providedBy(md_field.property): batch = [x for x in batch if ff in getattr(x, field_name)] return batch
def _set_default_configuration(self): defaults = {} tile_type = getUtility(ITileType, name=self.tile.__name__) fields = getFieldNamesInOrder(tile_type.schema) for name, field in getFieldsInOrder(tile_type.schema): order = unicode(fields.index(name)) # default configuration attributes for all fields defaults[name] = {'order': order, 'visibility': u'on'} if name == 'css_class': # css_class, set default defaults[name] = field.default if ITextLine.providedBy(field): # field is TextLine, we should add 'htmltag' defaults[name]['htmltag'] = u'h2' elif INamedBlobImageField.providedBy(field): # field is an image, we should add 'position' and 'imgsize' defaults[name]['position'] = u'left' defaults[name]['imgsize'] = u'mini 200:200' elif IInt.providedBy(field): defaults[name][name] = field.default elif IDatetime.providedBy(field): # field is Datetime, we should add 'format' defaults[name]['format'] = 'datetime' return defaults
def filter_batch_on_properties(self, batch): reverse = True if (self.sort_dir == "desc") else False if self.sort_on in self.filter_property_fields: sort_on = self.sort_on[5:] if sort_on not in self.utk: batch.sort(key=lambda x: getattr(x, sort_on), reverse=reverse) for field_name in self.filter_property_fields: md_field = self.domain_annotation.get(field_name) ff_name = "filter_%s" % (field_name) ff = self.request.get(ff_name, None) if ff and md_field: if (IDate.providedBy(md_field.property) or IDatetime.providedBy(md_field.property)): start_date_str, end_date_str = get_date_strings(ff) start_date = string_to_date(start_date_str) end_date = string_to_date(end_date_str) if start_date: batch = [ x for x in batch if (getattr(x, field_name) and getattr(x, field_name).date() >= start_date) ] if end_date: batch = [ x for x in batch if (getattr(x, field_name) and getattr(x, field_name).date() <= end_date) ] elif IText.providedBy(md_field.property): batch = [x for x in batch if ff in getattr(x, field_name)] return batch
def get_collection_schema_from_interface_schema(self, schema): collection = {} for name in schema: if IDate.providedBy(schema[name]) or \ IDatetime.providedBy(schema[name]): collection['field.'+name] = 'time' elif IDecimal.providedBy(schema[name]) or \ IFloat.providedBy(schema[name]) or \ IInt.providedBy(schema[name]): collection['field.'+name] = 'number' elif IBool.providedBy(schema[name]): collection['field.'+name] = 'bool' elif ICollection.providedBy(schema[name]): if not ICollection.providedBy(schema[name].value_type) and not \ IDict.providedBy(schema[name].value_type): collection['field.'+name] = 'array' elif IDict.providedBy(schema[name]): if IText.providedBy(schema[name].key_type) and \ IText.providedBy(schema[name].value_type): collection['field.'+name] = 'array' # this is a pretty weak check for a IP address field. We might want # to update this to look for a field validator based on the ipaddress package # or mark this field with a special interface indicating it is an # IP address elif IDottedName.providedBy(schema[name]) and \ (schema[name].min_dots == schema[name].max_dots == 3): collection['field.'+name] = 'cidr' elif IText.providedBy(schema[name]) or \ INativeString.providedBy(schema[name]): collection['field.'+name] = 'string' return collection
def migrate_saved_data(ploneformgen, easyform): for data_adapter in ploneformgen.objectValues('FormSaveDataAdapter'): actions = get_actions(easyform) action = actions.get(data_adapter.getId()) schema = get_schema(easyform) if ISaveData.providedBy(action): cols = data_adapter.getColumnNames() for idx, row in enumerate(data_adapter.getSavedFormInput()): if len(row) != len(cols): logger.warning( 'Number of columns does not match. Skipping row %s in ' 'data adapter %s/%s', idx, '/'.join(easyform.getPhysicalPath()), data_adapter.getId()) continue data = {} for key, value in zip(cols, row): field = schema.get(key) value = value.decode('utf8') if IFromUnicode.providedBy(field): value = field.fromUnicode(value) elif IDatetime.providedBy(field) and value: value = DateTime(value).asdatetime() elif IDate.providedBy(field) and value: value = DateTime(value).asdatetime().date() elif ISet.providedBy(field): try: value = set(literal_eval(value)) except ValueError: pass elif INamedBlobFileField.providedBy(field): value = None data[key] = value action.addDataRow(data)
def _set_default_configuration(self): defaults = {} tile_type = getUtility(ITileType, name=self.tile.__name__) fields = getFieldNamesInOrder(tile_type.schema) for name, field in getFieldsInOrder(tile_type.schema): order = unicode(fields.index(name)) # default configuration attributes for all fields defaults[name] = {"order": order, "visibility": u"on"} if name == "css_class": # css_class, set default defaults[name] = field.default if ITextLine.providedBy(field): # field is TextLine, we should add 'htmltag' defaults[name]["htmltag"] = u"h2" elif INamedBlobImageField.providedBy(field): # field is an image, we should add 'position' and 'imgsize' defaults[name]["position"] = u"left" defaults[name]["imgsize"] = u"mini 200:200" elif IInt.providedBy(field): defaults[name][name] = field.default elif IDatetime.providedBy(field): # field is Datetime, we should add 'format' defaults[name]["format"] = "datetime" return defaults
def __init__(self, field, index=None, dialect='csv', choices=None): self.field = field self.index = index self.multiple = self._is_multiple() if self.multiple: self.sortspec = [t.value for t in field.value_type.vocabulary] if choices: self.sortspec = sorted(choices, key=self.sortspec.index) self.name = self._name() self.title = self._title() self.dialect = dialect self.isdate = IDate.providedBy(field) or IDatetime.providedBy(field)
def __init__(self, field, index=None, dialect='csv', choices=None): self.field = field self.index = index self.multiple = self._is_multiple() if self.multiple: self.sortspec = [t.value for t in field.value_type.vocabulary] if choices: self.sortspec = sorted( choices, key=lambda v: safeindex(v, self.sortspec) ) self.name = self._name() self.title = self._title() self.dialect = dialect self.isdate = IDate.providedBy(field) or IDatetime.providedBy(field)
def update(self): super(TaskModifiedTemplate, self).update() task = self.context ev = self.context0 request = self.request data = {} attributes = dict([(attr.interface, list(attr.attributes)) for attr in ev.descriptions]) for iface, fields in attributes.items(): ob = iface(task) for fieldId in fields: field = iface[fieldId].bind(ob) value = field.get(ob) if IChoice.providedBy(field): try: value = field.vocabulary.getTerm(value).title except LookupError: pass if ICollection.providedBy(field) and IChoice.providedBy(field.value_type): voc = field.value_type.vocabulary value = u", ".join([voc.getTerm(v).title for v in value]) if IDate.providedBy(field): value = getFormatter(request, "date", "full").format(value) if IDatetime.providedBy(field): value = getFormatter(request, "dateTime", "medium").format(value) data[field.title] = value data = data.items() data.sort() self.data = data
def get_attribute_values(request, record, attribute_map): values = {} vocabularies = get_vocabularies(request, attribute_map) for header, field in attribute_map.items(): downloaded = download_field_from_url(field, record[header]) if downloaded is not False: values[field.__name__] = downloaded continue if IDate.providedBy(field): if not record[header]: values[field.__name__] = None else: values[field.__name__] = parse_date(record[header]) continue if IDatetime.providedBy(field): if not record[header]: values[field.__name__] = None else: values[field.__name__] = parse_datetime(record[header]) continue if IURI.providedBy(field): if not record[header].strip(): values[field.__name__] = None continue if IList.providedBy(field): if ITextLine.providedBy(field.value_type): values[field.__name__] = convert_to_list(record[header]) continue if ISet.providedBy(field): if IChoice.providedBy(field.value_type): values[field.__name__] = set(convert_to_list(record[header])) continue if IChoice.providedBy(field): if not record[header].strip(): values[field.__name__] = None else: vocabulary = vocabularies[header] if record[header].lower() not in vocabulary: raise ContentImportError( _( u'The ${name} column contains the ' u'unknown value ${value}', mapping=dict(name=header, value=record[header]) ) ) values[field.__name__] = vocabulary[record[header].lower()] continue assert IFromUnicode.providedBy(field), """ {} does not support fromUnicode """.format(field) try: values[field.__name__] = field.fromUnicode(record[header]) if isinstance(values[field.__name__], basestring): values[field.__name__] = values[field.__name__].strip() if isinstance(field, Text): values[field.__name__] = values[field.__name__].replace( '<br />', '\n' ) except ValidationError, e: raise ContentImportError(e.doc(), colname=header) except ValueError, e: raise ContentImportError(e.message, colname=header)
def get_export_data( self, portal_type, blob_format, richtext_format, blacklist, whitelist, query, ): """Return a list of dicts with a dict for each object. The key is the name of the field/value and the value the value. """ all_fields = get_schema_info(portal_type, blacklist, whitelist) results = [] catalog = api.portal.get_tool('portal_catalog') if not query: query = dict() query['portal_type'] = portal_type if 'Language' not in query and HAS_MULTILINGUAL and \ 'Language' in catalog.indexes(): query['Language'] = 'all' brains = catalog.unrestrictedSearchResults(query) for brain in brains: obj = brain.getObject() item_dict = dict() for fieldname, field in all_fields: if fieldname in self.ADDITIONAL_MAPPING: # The way to access the value from this fields is # overridden in ADDITIONAL_MAPPING continue try: value = field.get(field.interface(obj)) except: print("Skipping object at {0}".format(obj.absolute_url())) break if not value: # set a value anyway to keep the dimensions of all value = '' # make sure we do no more transforms field = None if IRichTextValue.providedBy(value): value = transform_richtext(value, mimetype=richtext_format) if IRelationList.providedBy(field): rel_val = [] for relation in value: rel_val.append(get_url_for_relation(relation)) value = pretty_join(rel_val) if IRelationChoice.providedBy(field): value = get_url_for_relation(value) if INamed.providedBy(value): value = get_blob_url(value, brain, blob_format, fieldname) if ICollection.providedBy(field): r = [] for v in value: if INamed.providedBy(v): r.append(u'{0}/@@download/{1}'.format( obj.absolute_url(), fieldname)) # r.append(base64.b64encode(v.data)) else: r.append(v) value = r if IDatetime.providedBy(field) or IDate.providedBy(field): if value.year < 1000: if value.year < 16: year = value.year + 2000 else: year = value.year + 1900 if IDate.providedBy(field): value = datetime.date(month=value.month, day=value.day, year=year) elif IDatetime.providedBy(field): value = datetime.datetime(month=value.month, day=value.day, year=year, hour=value.hour, minute=value.minute, second=value.second) value = api.portal.get_localized_time(value, long_format=True) if safe_callable(value): value = value() if isinstance(value, list) or isinstance(value, tuple): value = pretty_join(value) item_dict[fieldname] = value else: # Update the data with additional info or overridden getters item_dict.update(self.additional_data(obj, blacklist)) results.append(item_dict) continue # executed if the loop ended normally (no break) break # executed if 'continue' was skipped (break) return results
def __getattribute__(self, name): if name.startswith('_') or name.startswith('portal_') or name.startswith('@@') or name == 'sql_id': return super(SQLDexterityItem, self).__getattribute__(name) connection = queryUtility(ISQLConnectionsUtility, name=self.portal_type, default=None) if connection == None and self.portal_type: fti = queryUtility(IDexterityFTI, name=self.portal_type, default=None) if not fti: return None updateConnectionsForFti(fti) connection = queryUtility(ISQLConnectionsUtility, name=self.portal_type, default=None) if name == 'view': #be sure session and sqlitem are up to date self._v_sql_item = None connection.session.close() if not connection: return super(SQLDexterityItem, self).__getattribute__(name) if name == 'UID' and self.sql_virtual: return self.portal_type+'-'+connection.sql_table+'-'+str(self.sql_id) if name == 'id' and 'id' not in connection.fieldnames.keys(): if not self.sql_virtual: return super(SQLDexterityItem, self).__getattribute__(name) fti = ISQLTypeSettings(getUtility(IDexterityFTI, name=self.portal_type)) nameFromTitle = INameFromTitle(self, None) if nameFromTitle is not None and nameFromTitle.title: sql_folder_id = getattr(fti, 'sql_folder_id', 'data-'+self.portal_type) title = nameFromTitle.title folder = None if IRelationValue.providedBy(sql_folder_id): folder = sql_folder_id.to_object elif sql_folder_id and sql_folder_id.startswith('/'): portal = getToolByName(getSite(), 'portal_url').getPortalObject() folder = portal.restrictedTraverse(sql_folder_id) if folder: name = INameChooser(folder).chooseName(title, self) return name # return INameChooser(getSite()).chooseName(title, self) # return getUtility(IURLNormalizer).normalize(title) return self.sql_id if name in connection.fieldnames.keys(): sql_column = connection.fieldnames[name] sql_item = self.getSQLItem() try: sql_id = getattr(sql_item, connection.sql_id_column, None) except orm_exc.DetachedInstanceError: self._v_sql_item = None sql_item = self.getSQLItem() sql_id = getattr(sql_item, connection.sql_id_column, None) fieldname = 'name' if sql_item and sql_column: while '.' in sql_column: sql_key = sql_column.split('.')[0] sql_item = getattr(sql_item, sql_key, None) if isinstance(sql_item, list): value = sql_item fieldname = sql_column.split('.')[-1] break sql_column = '.'.join(sql_column.split('.')[1:]) else: if not isinstance(sql_item, list): value = getattr(sql_item, sql_column, None) if not value and (isinstance(value, list) or hasattr(value, '_sa_instance_state')): value = '' elif (isinstance(value, list) or hasattr(value, '_sa_instance_state')): sqlftis = [a for a in getAllUtilitiesRegisteredFor(IDexterityFTI) if 'collective.behavior.sql.behavior.behaviors.ISQLContent' in a.behaviors and getattr(a, 'sql_table', None)] if name == 'subject': return tuple([getattr(a, fieldname, '') for a in value]) tableftis = [] for iface in iterSchemataForType(self.portal_type): if name in iface.names(): field = iface[name] if IRelationChoice.providedBy(field) or IRelationList.providedBy(field): if IRelationChoice.providedBy(field): allowed_types = field.source.query.get('portal_type', []) else: allowed_types = field.value_type.source.query.get('portal_type', []) tableftis = [] for sqlfti in sqlftis: adapted = ISQLTypeSettings(sqlfti, None) if isinstance(value, list): classname = value[0].__class__.__name__ else: classname = value.__class__.__name__ if adapted and getattr(adapted, 'sql_table', None) == classname: if not allowed_types or sqlfti.id in allowed_types: tableftis.append(adapted) catalog = getToolByName(getSite(), 'portal_catalog') relations = [] for tablefti in tableftis: sql_id_column = getattr(tablefti, 'sql_id_column', 'id') valueids = [] if isinstance(value, list): valueids = [getattr(a, sql_id_column, None) for a in value if getattr(a, sql_id_column, None)] else: valueids = getattr(value, sql_id_column, None) valueids = [str(a) for a in valueids] brains = catalog.unrestrictedSearchResults(portal_type=tablefti.id, sql_id=valueids) for brain in brains: relations.append(SQLRelationValue(brain.portal_type, brain.UID, self)) if IRelationChoice.providedBy(field) and relations: return relations[0] elif IRelationList.providedBy(field) and relations: return relations elif ITuple.providedBy(field): return tuple([getattr(a, fieldname, '') for a in value]) elif IList.providedBy(field): return [getattr(a, fieldname, '') for a in value] elif value and isinstance(value, list): value = getattr(value[0], fieldname, '') for iface in iterSchemataForType(self.portal_type): if name == 'subject': try: return tuple([a.decode('utf-8') for a in literal_eval(value)]) except: return tuple([a.strip() for a in value.split(',')]) if name in iface.names(): field = iface[name] if IRichText.providedBy(field): if not value: return '' if not '<p' in value or not '<br' in value: value = '<p>'+'</p><p>'.join([a for a in value.split('\n') if a.strip()])+'</p>' # try: # value = str(value) # except: # try: # value = value.decode('utf-8') # except: # try: # value = value.encode('utf-8') # except: # pass return RichTextValue(unidecode(value)) elif INamedBlobImage.providedBy(field): return NamedBlobImage(base64.b64decode(value), filename=unicode(self.portal_type+self.id+".jpg")) elif ITuple.providedBy(field): if not value: return tuple([]) try: return tuple([a.decode('utf-8') for a in literal_eval(value)]) except: return tuple([a.strip() for a in value.split(',')]) elif IList.providedBy(field): if not value: return [] try: return [a.decode('utf-8') for a in literal_eval(value)] except: return [a.strip() for a in value.split(',')] elif IDatetime.providedBy(field) and hasattr(value, 'day') and not hasattr(value, 'hour'): value = datetime.datetime.combine(value, datetime.datetime.min.time()) if name in ['expiration_date','effective_date', 'effective', 'expires'] and hasattr(value, 'day') and not hasattr(value, 'hour'): value = datetime.datetime.combine(value, datetime.datetime.min.time()) if isinstance(value, unicode) or name == 'id': try: value = str(value) except: pass return value return super(SQLDexterityItem, self).__getattribute__(name)
def __getattribute__(self, name): if name.startswith('_') or name.startswith( 'portal_') or name.startswith('@@') or name == 'sql_id': return super(SQLDexterityItem, self).__getattribute__(name) connection = queryUtility(ISQLConnectionsUtility, name=self.portal_type, default=None) if connection == None and self.portal_type: fti = queryUtility(IDexterityFTI, name=self.portal_type, default=None) if not fti: return None updateConnectionsForFti(fti) connection = queryUtility(ISQLConnectionsUtility, name=self.portal_type, default=None) if name == 'view': #be sure session and sqlitem are up to date self._v_sql_item = None connection.session.close() if not connection: return super(SQLDexterityItem, self).__getattribute__(name) if name == 'UID' and self.sql_virtual: return self.portal_type + '-' + connection.sql_table + '-' + str( self.sql_id) if name == 'id' and 'id' not in connection.fieldnames.keys(): if not self.sql_virtual: return super(SQLDexterityItem, self).__getattribute__(name) fti = ISQLTypeSettings( getUtility(IDexterityFTI, name=self.portal_type)) nameFromTitle = INameFromTitle(self, None) if nameFromTitle is not None and nameFromTitle.title: sql_folder_id = getattr(fti, 'sql_folder_id', 'data-' + self.portal_type) title = nameFromTitle.title folder = None if IRelationValue.providedBy(sql_folder_id): folder = sql_folder_id.to_object elif sql_folder_id and sql_folder_id.startswith('/'): portal = getToolByName(getSite(), 'portal_url').getPortalObject() folder = portal.restrictedTraverse(sql_folder_id) if folder: name = INameChooser(folder).chooseName(title, self) return name # return INameChooser(getSite()).chooseName(title, self) # return getUtility(IURLNormalizer).normalize(title) return self.sql_id if name in connection.fieldnames.keys(): sql_column = connection.fieldnames[name] sql_item = self.getSQLItem() try: sql_id = getattr(sql_item, connection.sql_id_column, None) except orm_exc.DetachedInstanceError: self._v_sql_item = None sql_item = self.getSQLItem() sql_id = getattr(sql_item, connection.sql_id_column, None) fieldname = 'name' if sql_item and sql_column: while '.' in sql_column: sql_key = sql_column.split('.')[0] sql_item = getattr(sql_item, sql_key, None) if isinstance(sql_item, list): value = sql_item fieldname = sql_column.split('.')[-1] break sql_column = '.'.join(sql_column.split('.')[1:]) else: if not isinstance(sql_item, list): value = getattr(sql_item, sql_column, None) if not value and (isinstance(value, list) or hasattr(value, '_sa_instance_state')): value = '' elif (isinstance(value, list) or hasattr(value, '_sa_instance_state')): sqlftis = [ a for a in getAllUtilitiesRegisteredFor(IDexterityFTI) if 'collective.behavior.sql.behavior.behaviors.ISQLContent' in a.behaviors and getattr(a, 'sql_table', None) ] if name == 'subject': return tuple( [getattr(a, fieldname, '') for a in value]) tableftis = [] for iface in iterSchemataForType(self.portal_type): if name in iface.names(): field = iface[name] if IRelationChoice.providedBy( field) or IRelationList.providedBy(field): if IRelationChoice.providedBy(field): allowed_types = field.source.query.get( 'portal_type', []) else: allowed_types = field.value_type.source.query.get( 'portal_type', []) tableftis = [] for sqlfti in sqlftis: adapted = ISQLTypeSettings(sqlfti, None) if isinstance(value, list): classname = value[0].__class__.__name__ else: classname = value.__class__.__name__ if adapted and getattr( adapted, 'sql_table', None) == classname: if not allowed_types or sqlfti.id in allowed_types: tableftis.append(adapted) catalog = getToolByName( getSite(), 'portal_catalog') relations = [] for tablefti in tableftis: sql_id_column = getattr( tablefti, 'sql_id_column', 'id') valueids = [] if isinstance(value, list): valueids = [ getattr(a, sql_id_column, None) for a in value if getattr(a, sql_id_column, None) ] else: valueids = getattr( value, sql_id_column, None) valueids = [str(a) for a in valueids] brains = catalog.unrestrictedSearchResults( portal_type=tablefti.id, sql_id=valueids) for brain in brains: relations.append( SQLRelationValue( brain.portal_type, brain.UID, self)) if IRelationChoice.providedBy( field) and relations: return relations[0] elif IRelationList.providedBy( field) and relations: return relations elif ITuple.providedBy(field): return tuple( [getattr(a, fieldname, '') for a in value]) elif IList.providedBy(field): return [ getattr(a, fieldname, '') for a in value ] elif value and isinstance(value, list): value = getattr(value[0], fieldname, '') for iface in iterSchemataForType(self.portal_type): if name == 'subject': try: return tuple([ a.decode('utf-8') for a in literal_eval(value) ]) except: return tuple([a.strip() for a in value.split(',')]) if name in iface.names(): field = iface[name] if IRichText.providedBy(field): if not value: return '' if not '<p' in value or not '<br' in value: value = '<p>' + '</p><p>'.join([ a for a in value.split('\n') if a.strip() ]) + '</p>' # try: # value = str(value) # except: # try: # value = value.decode('utf-8') # except: # try: # value = value.encode('utf-8') # except: # pass return RichTextValue(unidecode(value)) elif INamedBlobImage.providedBy(field): return NamedBlobImage( base64.b64decode(value), filename=unicode(self.portal_type + self.id + ".jpg")) elif ITuple.providedBy(field): if not value: return tuple([]) try: return tuple([ a.decode('utf-8') for a in literal_eval(value) ]) except: return tuple( [a.strip() for a in value.split(',')]) elif IList.providedBy(field): if not value: return [] try: return [ a.decode('utf-8') for a in literal_eval(value) ] except: return [a.strip() for a in value.split(',')] elif IDatetime.providedBy(field) and hasattr( value, 'day') and not hasattr(value, 'hour'): value = datetime.datetime.combine( value, datetime.datetime.min.time()) if name in [ 'expiration_date', 'effective_date', 'effective', 'expires' ] and hasattr(value, 'day') and not hasattr(value, 'hour'): value = datetime.datetime.combine( value, datetime.datetime.min.time()) if isinstance(value, unicode) or name == 'id': try: value = str(value) except: pass return value return super(SQLDexterityItem, self).__getattribute__(name)
def __call__(self, validate_all=False): # noqa: ignore=C901 data = json_body(self.request) modified = False schema_data = {} errors = [] for schema in iterSchemata(self.context): write_permissions = mergedTaggedValueDict(schema, WRITE_PERMISSIONS_KEY) for name, field in getFields(schema).items(): field_data = schema_data.setdefault(schema, {}) if field.readonly: continue if name in data: dm = queryMultiAdapter((self.context, field), IDataManager) if not dm.canWrite(): continue if not self.check_permission(write_permissions.get(name)): continue # Deserialize to field value deserializer = queryMultiAdapter( (field, self.context, self.request), IFieldDeserializer) if deserializer is None: continue try: value = deserializer(data[name]) except ValueError as e: errors.append({ 'message': e.message, 'field': name, 'error': e }) except ValidationError as e: errors.append({ 'message': e.doc(), 'field': name, 'error': e }) else: field_data[name] = value if IDatetime.providedBy(dm.field): # Do not compare both, just set the value. See # https://github.com/plone/plone.restapi/issues/253 dm.set(value) modified = True else: if value != dm.get(): dm.set(value) modified = True elif validate_all: # Never validate the changeNote of p.a.versioningbehavior # The Versionable adapter always returns an empty string # which is the wrong type. Should be unicode and should be # fixed in p.a.versioningbehavior if name == 'changeNote': continue dm = queryMultiAdapter((self.context, field), IDataManager) bound = field.bind(self.context) try: bound.validate(dm.get()) except ValidationError as e: errors.append({ 'message': e.doc(), 'field': name, 'error': e }) # Validate schemata for schema, field_data in schema_data.items(): validator = queryMultiAdapter( (self.context, self.request, None, schema, None), IManagerValidator) for error in validator.validate(field_data): errors.append({'error': error, 'message': error.message}) if errors: raise BadRequest(errors) # We'll set the layout after the validation and and even if there # are no other changes. if 'layout' in data: layout = data['layout'] self.context.setLayout(layout) # OrderingMixin self.handle_ordering(data) if modified: notify(ObjectModifiedEvent(self.context)) return self.context
def get_export_data( self, portal_type, blob_format, richtext_format, blacklist, whitelist, query, ): """Return a list of dicts with a dict for each object. The key is the name of the field/value and the value the value. """ all_fields = get_schema_info(portal_type, blacklist, whitelist) results = [] catalog = api.portal.get_tool('portal_catalog') if not query: query = dict() query['portal_type'] = portal_type if 'Language' not in query and HAS_MULTILINGUAL and \ 'Language' in catalog.indexes(): query['Language'] = 'all' if 'path' not in query: query['path'] = {} query['path']['query'] = '/'.join(self.context.getPhysicalPath()) brains = catalog(query) for brain in brains: obj = brain.getObject() item_dict = dict() for fieldname, field in all_fields: if fieldname in self.ADDITIONAL_MAPPING: # The way to access the value from this fields is # overridden in ADDITIONAL_MAPPING continue value = field.get(field.interface(obj)) if not value: # set a value anyway to keep the dimensions of all value = '' # make sure we do no more transforms field = None if IRichTextValue.providedBy(value): value = transform_richtext(value, mimetype=richtext_format) if IRelationList.providedBy(field): rel_val = [] for relation in value: rel_val.append(get_url_for_relation(relation)) value = pretty_join(rel_val) if IRelationChoice.providedBy(field): value = get_url_for_relation(value) if INamed.providedBy(value): value = get_blob_url(value, brain, blob_format, fieldname) if IDatetime.providedBy(field) or IDate.providedBy(field): value = api.portal.get_localized_time(value, long_format=True) if safe_callable(value): value = value() if isinstance(value, list) or isinstance(value, tuple): value = pretty_join(value) if HAS_GEOLOCATION and isinstance(value, Geolocation): value = value.__dict__ item_dict[fieldname] = value # Update the data with additional info or overridden getters item_dict.update(self.additional_data(obj, blacklist)) results.append(item_dict) return results
def get_export_data( self, portal_type, blob_format, richtext_format, blacklist, whitelist, query, ): """Return a list of dicts with a dict for each object. The key is the name of the field/value and the value the value. """ all_fields = get_schema_info(portal_type, blacklist, whitelist) results = [] catalog = api.portal.get_tool('portal_catalog') if not query: query = dict() query['portal_type'] = portal_type if 'Language' not in query and HAS_MULTILINGUAL and \ 'Language' in catalog.indexes(): query['Language'] = 'all' brains = catalog(query) for brain in brains: obj = brain.getObject() item_dict = dict() for fieldname, field in all_fields: if fieldname in self.ADDITIONAL_MAPPING: # The way to access the value from this fields is # overridden in ADDITIONAL_MAPPING continue value = field.get(field.interface(obj)) if not value: # set a value anyway to keep the dimensions of all value = '' # make sure we do no more transforms field = None if IRichTextValue.providedBy(value): value = transform_richtext(value, mimetype=richtext_format) if IRelationList.providedBy(field): rel_val = [] for relation in value: rel_val.append(get_url_for_relation(relation)) value = pretty_join(rel_val) if IRelationChoice.providedBy(field): value = get_url_for_relation(value) if INamed.providedBy(value): value = get_blob_url(value, brain, blob_format, fieldname) if IDatetime.providedBy(field) or IDate.providedBy(field): value = api.portal.get_localized_time( value, long_format=True) if safe_callable(value): value = value() if isinstance(value, list) or isinstance(value, tuple): value = pretty_join(value) item_dict[fieldname] = value # Update the data with additional info or overridden getters item_dict.update(self.additional_data(obj, blacklist)) results.append(item_dict) return results