def __iter__(self): for item in self.previous: keys = item.keys() pathKey = self.pathKey(*keys)[0] if not pathKey: # not enough info yield item continue newPathKey = self.newPathKey or pathKey newTypeKey = self.newTypeKey path = item[pathKey] elems = path.strip('/').rsplit('/', 1) container, id = (len(elems) == 1 and ('', elems[0]) or elems) # This may be a new container if container not in self.seen: containerPathItems = list(pathsplit(container)) if containerPathItems: checkedElements = [] # Check each possible parent folder obj = self.context for element in containerPathItems: checkedElements.append(element) currentPath = '/'.join(checkedElements) if currentPath and currentPath not in self.seen: if element and traverse(obj, element) is None: # We don't have this path - yield to create a # skeleton folder yield { newPathKey: '/' + currentPath, newTypeKey: self.folderType } if self.cache: self.seen.add(currentPath) obj = traverse(obj, element) if self.cache: self.seen.add("%s/%s" % ( container, id, )) yield item
def __iter__(self): for item in self.previous: keys = item.keys() pathKey = self.pathKey(*keys)[0] if not pathKey: # not enough info yield item continue newPathKey = self.newPathKey or pathKey newTypeKey = self.newTypeKey path = item[pathKey] elems = path.strip('/').rsplit('/', 1) container, id = (len(elems) == 1 and ('', elems[0]) or elems) # This may be a new container if container not in self.seen: containerPathItems = list(pathsplit(container)) if containerPathItems: checkedElements = [] # Check each possible parent folder obj = self.context for element in containerPathItems: checkedElements.append(element) currentPath = '/'.join(checkedElements) if currentPath and currentPath not in self.seen: if element and traverse(obj, element) is None: # We don't have this path - yield to create a # skeleton folder yield {newPathKey: '/' + currentPath, newTypeKey: self.folderType} if self.cache: self.seen.add(currentPath) obj = traverse(obj, element) if self.cache: self.seen.add("%s/%s" % (container, id,)) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] ob = traverse(self.context, str(path).lstrip('/'), None) if ob is None: yield item continue # object not found query = item.get('query', None) if query and hasattr(ob, 'query'): for criterion in query: if 'portal_type' in criterion.values(): tmp_list = [] for ctype in criterion['v']: if ctype == 'RTRemoteVideo' or ctype == 'RTInternalVideo': # noqa ctype = 'WildcardVideo' tmp_list.append(ctype) criterion.update({'v': tmp_list}) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] workflowhistorykey = self.workflowhistorykey(*item.keys())[0] if not pathkey or not workflowhistorykey or \ workflowhistorykey not in item: # not enough info yield item continue # traverse() available in version 1.5+ of collective.transmogrifier obj = traverse(self.context, item[pathkey].lstrip('/'), None) if obj is None or not getattr(obj, 'workflow_history', False): yield item continue if (IBaseObject.providedBy(obj) or (dexterity_available and IDexterityContent.providedBy(obj))): item_tmp = item # get back datetime stamp and set the workflow history for workflow in item_tmp[workflowhistorykey]: for k, workflow2 in enumerate(item_tmp[workflowhistorykey][workflow]): # noqa if 'time' in item_tmp[workflowhistorykey][workflow][k]: item_tmp[workflowhistorykey][workflow][k]['time'] = DateTime( # noqa item_tmp[workflowhistorykey][workflow][k]['time']) # noqa obj.workflow_history.data = item_tmp[workflowhistorykey] # update security workflows = self.wftool.getWorkflowsFor(obj) if workflows: workflows[0].updateRoleMappingsFor(obj) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*list(item.keys()))[0] permskey = self.permskey(*list(item.keys()))[0] if not pathkey or not permskey or \ permskey not in item: # not enough info yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) if obj is None: yield item continue if IRoleManager.providedBy(obj): for perm, perm_dict in list(item[permskey].items()): try: obj.manage_permission(perm, roles=perm_dict['roles'], acquire=perm_dict['acquire']) except ValueError: # raise Exception('Error setting the perm "%s"' % perm) logger.error('Error setting the perm "%s" on %s' % (perm, item[pathkey])) yield item
def __iter__(self): for item in self.previous: portal_type = item[u'_type'] parent_guid = item.get(u'parent_guid') if parent_guid: path = self.bundle.item_by_guid[parent_guid][u'_path'] context = traverse(self.site, path, None) else: context = self.site parent_path = '/'.join(context.getPhysicalPath()) try: obj = self._construct_object(context, portal_type, item) logger.info("Constructed %r" % obj) except ValueError as e: logger.warning( u'Could not create object at {} with guid {}. {}'.format( parent_path, item['guid'], e.message)) continue # build path relative to plone site item[u'_path'] = '/'.join(obj.getPhysicalPath()[2:]) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: yield item; continue layoutkey = self.layoutkey(*item.keys())[0] defaultpagekey = self.defaultpagekey(*item.keys())[0] path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: yield item; continue if not ISelectableBrowserDefault.providedBy(obj): yield item; continue if layoutkey: layout = item[layoutkey] if layout: obj.setLayout(layout) if defaultpagekey: defaultpage = item[defaultpagekey] if defaultpage: obj.setDefaultPage(defaultpage) yield item
def resolve_parent_pointer(self, item): """Resolves an item's parent pointer to a container obj and its path. """ parent_guid = item.get('parent_guid') formatted_parent_refnum = item.get('_formatted_parent_refnum') if parent_guid is not None: parent_path = self.path_from_guid(parent_guid) elif formatted_parent_refnum is not None: parent_path = self.path_from_refnum(formatted_parent_refnum) elif item['_type'] == 'opengever.repository.repositoryroot': # Repo roots are the only type that don't require a parent # pointer, and get constructed directly in the Plone site container = self.site parent_path = '/' else: # Should never happen - schema requires a parent pointer logger.warning(u'Item with GUID %s is missing a parent pointer, ' u'skipping.' % item['guid']) return if not parent_path: logger.warning( u'Could not determine parent container for item with ' u'GUID %s, skipping.' % item['guid']) return container = traverse(self.site, parent_path, None) return container, parent_path
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: yield item; continue criterionkey = self.criterionkey(*item.keys())[0] if not criterionkey: yield item; continue fieldkey = self.fieldkey(*item.keys())[0] if not fieldkey: yield item; continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # path doesn't exist yield item; continue criterion = item[criterionkey] field = item[fieldkey] if IATTopic.providedBy(obj): critid = 'crit__%s_%s' % (field, criterion) if getattr(aq_base(obj), critid, None) is None: obj.addCriterion(field, criterion) item[pathkey] = '%s/%s' % (path, critid) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] ob = traverse(self.context, str(path).lstrip('/'), None) if ob is None: yield item continue # object not found creationdate = item.get(self.creationkey, None) if creationdate and hasattr(ob, 'creation_date'): ob.creation_date = DateTime(creationdate) modificationdate = item.get(self.modificationkey, None) if modificationdate and hasattr(ob, 'modification_date'): ob.modification_date = DateTime(modificationdate) effectivedate = item.get(self.effectivekey, None) if effectivedate and hasattr(ob, 'effective_date'): ob.effective_date = DateTime(effectivedate) expirationdate = item.get(self.expirationkey, None) if expirationdate and hasattr(ob, 'expiration_date'): ob.expiration_date = DateTime(expirationdate) yield item
def process_item(self, item, defer=None): """Replace paths with objects Manipulates item in-place, returns success (True or False). Success is defined as 'all paths resolved' if defer is true, and otherwise means 'all existing paths resolved'. If defer is None, self.defer is used instead. """ if defer is None: defer = self.defer context = self.context resolved = {} for key in item.keys(): match = self.keys(key)[1] if match: single, paths = assequence(item[key]) result = [traverse(context, p.lstrip('/'), None) for p in paths] if defer and None in result: return False if single: result = result[0] else: # Strip unresolved items result = filter(lambda x: x is not None, result) resolved[key] = result item.update(resolved) return True
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] uidkey = self.uidkey(*item.keys())[0] if not pathkey or not uidkey: # not enough info yield item continue path = item[pathkey] uid = item[uidkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # path doesn't exist yield item continue if IReferenceable.providedBy(obj): oldUID = obj.UID() if oldUID != uid: if not oldUID: setattr(obj, UUID_ATTR, uid) else: obj._setUID(uid) if IAttributeUUID.providedBy(obj): IMutableUUID(obj).set(uid) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] ob = traverse(self.context, str(path).lstrip('/'), None) if ob is None: yield item continue # object not found if not isinstance(ob, CatalogAware): yield item continue # can't notify portal_catalog if self.verbose: # add a log to display reindexation progess self.counter += 1 logger.info('Reindex object %s (%s)', path, self.counter) # update catalog if self.indexes: self.portal_catalog.reindexObject(ob, idxs=self.indexes) else: self.portal_catalog.reindexObject(ob) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: yield item continue criterionkey = self.criterionkey(*item.keys())[0] if not criterionkey: yield item continue fieldkey = self.fieldkey(*item.keys())[0] if not fieldkey: yield item continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # path doesn't exist yield item continue criterion = item[criterionkey] field = item[fieldkey] if IATTopic.providedBy(obj): critid = 'crit__%s_%s' % (field, criterion) if getattr(aq_base(obj), critid, None) is None: obj.addCriterion(field, criterion) item[pathkey] = '%s/%s' % (path, critid) yield item
def process_item(self, item, defer=None): """Replace paths with objects Manipulates item in-place, returns success (True or False). Success is defined as 'all paths resolved' if defer is true, and otherwise means 'all existing paths resolved'. If defer is None, self.defer is used instead. """ if defer is None: defer = self.defer context = self.context resolved = {} for key in item.keys(): match = self.keys(key)[1] if match: single, paths = assequence(item[key]) result = [ traverse(context, p.lstrip('/'), None) for p in paths ] if defer and None in result: return False if single: result = result[0] else: # Strip unresolved items result = filter(lambda x: x is not None, result) resolved[key] = result item.update(resolved) return True
def __iter__(self): # keep a copy of previous generator to post-process items self.previous, self.postprocess = itertools.tee(self.previous) for item in self.previous: yield item for item in self.postprocess: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # object not found continue # XXX: is there an interface provided by both? if obj.portal_type not in ('Image', 'File'): continue references = obj.getBackReferences() if len(references) != 1: continue # Move attachment into container if references[0].portal_type == self.portal_type: api.content.move(source=obj, target=references[0])
def resolve_uid(self, x): """Parse HTML and update with URLs pointing to Plone objects. ex. url: "http://worpress.com/wp-content/uploads/2010/04/image.jpg" becomes: "resolveuid/c82a53270c904cfbbfd1a0d4cef90676" :param x: [required] Parsed Regex :type x: type Regex Match object :returns: the tag with an internal url :rtype: str """ start = x.group(1) # Start of tag ex.: '<img src="' url = x.group(2) # URL end = x.group(3) # End of tag ex.: '" />' url = fix_id(url) o = urlparse(url) internal_url = o.netloc == self.domain is_site_root = o.path == '' or o.path == '/' # links to external URL or to site root are ignored if not internal_url or is_site_root: return x.group(0) # return unchanged path = str(o.path).strip(' ').lstrip('/') obj = traverse(self.context, path, None) if obj is None: # object not found logger.warn('Could not resolve UUID: {0}'.format(url)) return x.group(0) # return unchanged # Create internal URL uuid = obj.UID() return '{0}resolveuid/{1}{2}'.format(start, uuid, end)
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: yield item continue layoutkey = self.layoutkey(*item.keys())[0] defaultpagekey = self.defaultpagekey(*item.keys())[0] path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: yield item continue if not ISelectableBrowserDefault.providedBy(obj): yield item continue if layoutkey: layout = item[layoutkey] if layout: obj.setLayout(str(layout)) if defaultpagekey: defaultpage = item[defaultpagekey] if defaultpage: obj.setDefaultPage(str(defaultpage)) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] ob = traverse(self.context, str(path).lstrip('/'), None) if ob is None: yield item continue # object not found if not isinstance(ob, CatalogAware): yield item continue # can't notify portal_catalog if self.verbose: # add a log to display reindexation progess self.counter += 1 logger.info("Reindex object %s (%s)", path, self.counter) # update catalog if self.indexes: self.portal_catalog.reindexObject(ob, idxs=self.indexes) else: self.portal_catalog.reindexObject(ob) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] roleskey = self.roleskey(*item.keys())[0] if not pathkey or not roleskey or \ roleskey not in item: # not enough info yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) # path doesn't exist if obj is None: yield item continue if IRoleManager.providedBy(obj): for principal, roles in item[roleskey].items(): if roles: obj.manage_addLocalRoles(principal, roles) obj.reindexObjectSecurity() yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*list(item.keys()))[0] roleskey = self.roleskey(*list(item.keys()))[0] if not pathkey or not roleskey or \ roleskey not in item: # not enough info yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) # path doesn't exist if obj is None: yield item continue if IRoleManager.providedBy(obj): for principal, roles in list(item[roleskey].items()): if roles: obj.manage_addLocalRoles(principal, roles) obj.reindexObjectSecurity() yield item
def resolve_parent_pointer(self, item): """Resolves an item's parent pointer to a container obj and its path. """ parent_guid = item.get('parent_guid') formatted_parent_refnum = item.get('_formatted_parent_refnum') if parent_guid is not None: parent_path = self.path_from_guid(parent_guid) elif formatted_parent_refnum is not None: parent_path = self.path_from_refnum(formatted_parent_refnum) elif item['_type'] == 'opengever.repository.repositoryroot': # Repo roots are the only type that don't require a parent # pointer, and get constructed directly in the Plone site container = self.site parent_path = '/' else: # Should never happen - schema requires a parent pointer logger.warning( u'Item with GUID %s is missing a parent pointer, ' u'skipping.' % item['guid']) return if not parent_path: logger.warning( u'Could not determine parent container for item with ' u'GUID %s, skipping.' % item['guid']) return container = traverse(self.site, parent_path, None) return container, parent_path
def _get_repo_root_id(self, item, portal): if '_repo_root_id' in item: # Classic og.setup return item['_repo_root_id'] else: # OGGBundle import obj = traverse(portal, item['_path']) return obj.id
def __iter__(self): """Iterate over items.""" default_timezone = self.options.get('default_timezone') or 'UTC' if HAS_PAC: try: tz = api.portal.get_registry_record('plone.portal_timezone') except InvalidParameterError: tz = None if tz is not None: tz = timezone(tz) else: tz = timezone(default_timezone) for item in self.previous: if self.condition(item): pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: yield item continue # object not found print("SetAndFixKnownDates") if 'creation_date' in item: # try: # obj.setCreationDate(item['creation_date']) # except AttributeError: # # dexterity content does not have setCreationDate # obj.creation_date = item['creation_date'] obj.creation_date = item['creation_date'] if 'modification_date' in item: obj.setModificationDate(item['modification_date']) effectiveDate = item.get('effectiveDate', None) if effectiveDate and effectiveDate != u"None": obj.setEffectiveDate(effectiveDate) else: obj.setEffectiveDate(item.get('modification_date', None)) # if item['_path']==u'/organisation/gv/arbeitshilfen/exuperantius/exuperantius-05-oeffentlichkeitsarbeit': # import pdb; pdb.set_trace() print(item.get('effectiveDate', None) or item.get('modification_date', None)) if 'expirationDate' in item: obj.setExpirationDate(item['expirationDate']) # Fix issue where expiration date was before effective date effective = obj.effective() expires = obj.expires() if effective and expires and expires < effective: obj.setExpirationDate(effective) if HAS_PAC and item.get('_type') == 'Event': # obj = resolve_object(context, item) obj.start = pydt(DateTime(obj.start)).astimezone(tz) obj.end = pydt(DateTime(obj.end)).astimezone(tz) yield item
def __iter__(self): # Store positions in a mapping containing an id to position mapping for # each parent path {parent_path: {item_id: item_pos}}. positions_mapping = {} for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] poskey = self.poskey(*keys)[0] if not (pathkey and poskey): yield item continue item_id = item[pathkey].split('/')[-1] parent_path = '/'.join(item[pathkey].split('/')[:-1]) if parent_path not in positions_mapping: positions_mapping[parent_path] = {} positions_mapping[parent_path][item_id] = item[poskey] yield item # Set positions on every parent for path, positions in positions_mapping.items(): # Normalize positions ordered_keys = sorted(positions.keys(), key=lambda x: positions[x]) normalized_positions = {} for pos, key in enumerate(ordered_keys): normalized_positions[key] = pos # TODO: After the new collective.transmogrifier release (>1.4), the # utils.py provides a traverse method. from collective.transmogrifier.utils import traverse parent = traverse(self.context, path) # parent = self.context.unrestrictedTraverse(path.lstrip('/')) if not parent: continue # Reorder Collage items if parent.portal_type == 'Collage' or parent.portal_type == 'CollageColumn' or parent.portal_type == 'CollageRow': for key in normalized_positions.keys(): parent.moveObjectToPosition(key, normalized_positions[key]) parent_base = aq_base(parent) if hasattr(parent_base, 'getOrdering'): ordering = parent.getOrdering() # Only DefaultOrdering of p.folder is supported if (not hasattr(ordering, '_order') and not hasattr(ordering, '_pos')): continue order = ordering._order() pos = ordering._pos() order.sort(key=lambda x: normalized_positions.get(x, pos.get(x, self.default_pos))) for i, id_ in enumerate(order): pos[id_] = i notifyContainerModified(parent)
def __iter__(self): # Store positions in a mapping containing an id to position mapping for # each parent path {parent_path: {item_id: item_pos}}. positions_mapping = {} for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] poskey = self.poskey(*keys)[0] if not (pathkey and poskey): yield item continue item_id = item[pathkey].split('/')[-1] parent_path = '/'.join(item[pathkey].split('/')[:-1]) if parent_path not in positions_mapping: positions_mapping[parent_path] = {} positions_mapping[parent_path][item_id] = item[poskey] yield item # Set positions on every parent for path, positions in positions_mapping.items(): # Normalize positions ordered_keys = sorted(positions.keys(), key=lambda x: positions[x]) normalized_positions = {} for pos, key in enumerate(ordered_keys): normalized_positions[key] = pos # TODO: After the new collective.transmogrifier release (>1.4), the # utils.py provides a traverse method. from collective.transmogrifier.utils import traverse parent = traverse(self.context, path) #parent = self.context.unrestrictedTraverse(path.lstrip('/')) if not parent: continue parent_base = aq_base(parent) if hasattr(parent_base, 'getOrdering'): ordering = parent.getOrdering() # Only DefaultOrdering of p.folder is supported if (not hasattr(ordering, '_order') and not hasattr(ordering, '_pos')): continue order = ordering._order() pos = ordering._pos() order.sort(key=lambda x: normalized_positions.get(x, pos.get(x, self.default_pos))) for i, id_ in enumerate(order): pos[id_] = i notifyContainerModified(parent)
def __iter__(self): for item in self.previous: keys = item.keys() typekey = self.typekey(*keys)[0] pathkey = self.pathkey(*keys)[0] if not (typekey and pathkey): logger.warn('Not enough info for item: %s' % item) yield item continue type_, path = item[typekey], item[pathkey] fti = self.ttool.getTypeInfo(type_) if fti is None: logger.warn('Not an existing type: %s' % type_) yield item continue path = path.encode('ASCII') container, id = posixpath.split(path.strip('/')) context = traverse(self.context, container, None) if context is None: error = 'Container %s does not exist for item %s' % (container, path) if self.required: raise KeyError(error) logger.warn(error) yield item continue if getattr(aq_base(context), id, None) is not None: # item exists yield item continue try: obj = fti._constructInstance(context, id) except (BadRequest, ValueError): error = 'Could not create type %s with id %s at %s' % ( type_, id, path) logger.warn(error) yield item continue # For CMF <= 2.1 (aka Plone 3) if hasattr(fti, '_finishConstruction'): obj = fti._finishConstruction(obj) if obj.getId() != id: item[pathkey] = posixpath.join(container, obj.getId()) yield item
def __iter__(self): # Store positions in a mapping containing an id to position mapping for # each parent path {parent_path: {item_id: item_pos}}. positions_mapping = {} for item in self.previous: keys = list(item.keys()) pathkey = self.pathkey(*keys)[0] poskey = self.poskey(*keys)[0] if not (pathkey and poskey): yield item continue item_id = item[pathkey].split('/')[-1] parent_path = '/'.join(item[pathkey].split('/')[:-1]) if parent_path not in positions_mapping: positions_mapping[parent_path] = {} positions_mapping[parent_path][item_id] = item[poskey] yield item # Set positions on every parent for path, positions in list(positions_mapping.items()): # Normalize positions ordered_keys = sorted(list(positions.keys()), key=lambda x: positions[x]) normalized_positions = {} for pos, key in enumerate(ordered_keys): normalized_positions[key] = pos path = safe_unicode(path.lstrip('/')).encode('ascii') parent = traverse(self.context, path, None) if not parent: continue parent_base = aq_base(parent) if hasattr(parent_base, 'getOrdering'): ordering = parent.getOrdering() # Only DefaultOrdering of p.folder is supported if (not hasattr(ordering, '_order') and not hasattr(ordering, '_pos')): continue order = ordering._order() pos = ordering._pos() order.sort(key=lambda x: normalized_positions.get( x, pos.get(x, self.default_pos))) for i, id_ in enumerate(order): pos[id_] = i notifyContainerModified(parent)
def __iter__(self): # Store positions in a mapping containing an id to position mapping for # each parent path {parent_path: {item_id: item_pos}}. positions_mapping = {} for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] poskey = self.poskey(*keys)[0] if not (pathkey and poskey): yield item continue item_id = item[pathkey].split('/')[-1] parent_path = '/'.join(item[pathkey].split('/')[:-1]) if parent_path not in positions_mapping: positions_mapping[parent_path] = {} positions_mapping[parent_path][item_id] = item[poskey] yield item # Set positions on every parent for path, positions in positions_mapping.items(): # Normalize positions ordered_keys = sorted(positions.keys(), key=lambda x: positions[x]) normalized_positions = {} for pos, key in enumerate(ordered_keys): normalized_positions[key] = pos path = safe_unicode(path.lstrip('/')).encode('ascii') parent = traverse(self.context, path, None) if not parent: continue parent_base = aq_base(parent) if hasattr(parent_base, 'getOrdering'): ordering = parent.getOrdering() # Only DefaultOrdering of p.folder is supported if (not hasattr(ordering, '_order') and not hasattr(ordering, '_pos')): continue order = ordering._order() pos = ordering._pos() order.sort(key=lambda x: normalized_positions.get( x, pos.get(x, self.default_pos))) for i, id_ in enumerate(order): pos[id_] = i notifyContainerModified(parent)
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*list(item.keys()))[0] ownerkey = self.ownerkey(*list(item.keys()))[0] if not pathkey or not ownerkey or \ ownerkey not in item: # not enough info yield item continue if item[ownerkey] is None or len(item[ownerkey]) != 2: # owner is None or something else went wrong yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) if obj is None: yield item continue # So there's no archetypes, so there will be no more # code execution after this point, so why not just continue if not IBaseObject.providedBy(obj): continue if item[ownerkey][0] and item[ownerkey][1]: try: obj.changeOwnership( self.memtool.getMemberById(item[ownerkey][1])) except Exception as e: raise Exception('ERROR: %s SETTING OWNERSHIP TO %s' % (str(e), item[pathkey])) try: obj.manage_setLocalRoles(item[ownerkey][1], ['Owner']) except Exception as e: raise Exception('ERROR: %s SETTING OWNERSHIP2 TO %s' % (str(e), item[pathkey])) elif not item[ownerkey][0] and item[ownerkey][1]: try: obj._owner = item[ownerkey][1] except Exception as e: raise Exception('ERROR: %s SETTING __OWNERSHIP TO %s' % (str(e), item[pathkey])) yield item
def __iter__(self): for item in self.previous: keys = item.keys() typekey = self.typekey(*keys)[0] pathkey = self.pathkey(*keys)[0] if not (typekey and pathkey): logger.warn('Not enough info for item: %s' % item) yield item; continue type_, path = item[typekey], item[pathkey] fti = self.ttool.getTypeInfo(type_) if fti is None: logger.warn('Not an existing type: %s' % type_) yield item; continue path = path.encode('ASCII') container, id = posixpath.split(path.strip('/')) context = traverse(self.context, container, None) if context is None: error = 'Container %s does not exist for item %s' % ( container, path) if self.required: raise KeyError(error) logger.warn(error) yield item continue if getattr(aq_base(context), id, None) is not None: # item exists yield item; continue try: obj = fti._constructInstance(context, id) except (BadRequest, ValueError): error = 'Could not create type %s with id %s at %s' % ( type_, id, path) logger.warn(error) yield item continue # For CMF <= 2.1 (aka Plone 3) if hasattr(fti, '_finishConstruction'): obj = fti._finishConstruction(obj) if obj.getId() != id: item[pathkey] = posixpath.join(container, obj.getId()) yield item
def __iter__(self): for item in self.previous: pathkey = '_path' statekey = 'review_state' if not (pathkey and statekey): # not enough info yield item continue if statekey not in item: # no WF state given yield item continue path, state = item[pathkey], item[statekey] obj = traverse(self.context, path, None) if obj is None: log.warning( "Cannot set workflow state for %s. " "Object doesn't exist" % path) yield item continue try: wf_ids = self.wftool.getChainFor(obj) if wf_ids: wf_id = wf_ids[0] comment = 'Set workflow state upon import.' self.wftool.setStatusOf( wf_id, obj, {'review_state': state, 'action': state, 'actor': 'GEVER migration', 'time': DateTime(), 'comments': comment}) wfs = {wf_id: self.wftool.getWorkflowById(wf_id)} self.wftool._recursiveUpdateRoleMappings(obj, wfs) # Since the `View` permissions isn't affected, there's # no need for obj.reindexObjectSecurity() here except WorkflowException as exc: log.warning( "Cannot set workflow state for %s. " "An exception occured: %r" % (path, exc)) pass yield item
def __iter__(self): for item in self.previous: pathkey = '_path' statekey = 'review_state' if not (pathkey and statekey): # not enough info yield item continue if statekey not in item: # no WF state given yield item continue path, state = item[pathkey], item[statekey] obj = traverse(self.context, path, None) if obj is None: log.warning("Cannot set workflow state for %s. " "Object doesn't exist" % path) yield item continue try: wf_ids = self.wftool.getChainFor(obj) if wf_ids: wf_id = wf_ids[0] comment = 'Set workflow state upon import.' self.wftool.setStatusOf( wf_id, obj, { 'review_state': state, 'action': state, 'actor': 'GEVER migration', 'time': DateTime(), 'comments': comment }) wfs = {wf_id: self.wftool.getWorkflowById(wf_id)} self.wftool._recursiveUpdateRoleMappings(obj, wfs) # Since the `View` permissions isn't affected, there's # no need for obj.reindexObjectSecurity() here except WorkflowException as exc: log.warning("Cannot set workflow state for %s. " "An exception occured: %r" % (path, exc)) pass yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] ownerkey = self.ownerkey(*item.keys())[0] if not pathkey or not ownerkey or \ ownerkey not in item: # not enough info yield item continue if item[ownerkey] is None or len(item[ownerkey]) != 2: # owner is None or something else went wrong yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) if obj is None: yield item continue if not IBaseObject.providedBy(obj): continue if item[ownerkey][0] and item[ownerkey][1]: try: obj.changeOwnership( self.memtool.getMemberById(item[ownerkey][1])) except Exception as e: raise Exception('ERROR: %s SETTING OWNERSHIP TO %s' % (str(e), item[pathkey])) try: obj.manage_setLocalRoles(item[ownerkey][1], ['Owner']) except Exception as e: raise Exception('ERROR: %s SETTING OWNERSHIP2 TO %s' % (str(e), item[pathkey])) elif not item[ownerkey][0] and item[ownerkey][1]: try: obj._owner = item[ownerkey][1] except Exception as e: raise Exception('ERROR: %s SETTING __OWNERSHIP TO %s' % (str(e), item[pathkey])) yield item
def add_related_content(self, obj, item): """Look into WordPress list of related content and create Plone related content list. :param obj: [required] object to add related content :type obj: type constructor parameter :param item: [required] transmogrify item :type item: dict """ # Get the string with URLs from related content pinged = item.get('_pinged', '') if pinged == '': return # No related content # The URL is formated with multiple URLs together without # separator. To break it into a list, I need to split on # http and reconstruct the url # TODO: handle HTTPS scheme related_urls = set('http{0}'.format(url.rstrip('/')) for url in pinged.split('http')[1:]) # Create a list of related items to update object's field related_items = [] for url in related_urls: # Parse URL and check domain url = fix_id(url) o = urlparse(url) if o.netloc != self.domain: continue path = str(o.path).strip(' ').lstrip('/') related_obj = traverse(self.context, path, None) if related_obj is None: # object not found logger.warn('Broken link: {0}'.format(url)) continue # Get related item ID intids = getUtility(IIntIds) to_id = intids.getId(related_obj) related_items.append(RelationValue(to_id)) # No related content if len(related_items) == 0: return obj.relatedItems = related_items
def __iter__(self): for item in self.previous: guid = item['guid'] pathkey = self.pathkey(*item.keys())[0] path = item.get(pathkey) if not path: logger.warning("Missing path key for file %s" % guid) yield item continue obj = traverse(self.site, path, None) if obj is None: logger.warning( "Cannot set file. Document %s doesn't exist." % path) yield item continue file_added = False for file_field, file_pathkey in self.get_supported_fields(item): try: abs_filepath = self.get_abs_filepath( item, file_pathkey, path) self.validate_filepath(item, abs_filepath, path) except FileLoadingFailed: continue try: self.add_namedblob_file(abs_filepath, file_field, obj) # Mark file_added as successfull only for the primary # fields, `file` fo if file_pathkey == 'filepath': file_added = True except EnvironmentError as e: # TODO: Check for this in OGGBundle validation logger.warning( "Can't open file %s. %s." % (abs_filepath, str(e))) error = (guid, abs_filepath, str(e), path) self.bundle.errors['files_io_errors'].append(error) if file_added: self.run_after_creation_jobs(item, obj) yield item
def __iter__(self): for item in self.previous: guid = item['guid'] pathkey = self.pathkey(*item.keys())[0] path = item.get(pathkey) if not path: logger.warning("Missing path key for file %s" % guid) yield item continue obj = traverse(self.site, path, None) if obj is None: logger.warning("Cannot set file. Document %s doesn't exist." % path) yield item continue file_added = False for file_field, file_pathkey in self.get_supported_fields(item): try: abs_filepath = self.get_abs_filepath( item, file_pathkey, path) self.validate_filepath(item, abs_filepath, path) except FileLoadingFailed: continue try: self.add_namedblob_file(abs_filepath, file_field, obj) # Mark file_added as successfull only for the primary # fields, `file` fo if file_pathkey == 'filepath': file_added = True except EnvironmentError as e: # TODO: Check for this in OGGBundle validation logger.warning("Can't open file %s. %s." % (abs_filepath, str(e))) error = (guid, abs_filepath, str(e), path) self.bundle.errors['files_io_errors'].append(error) if file_added: self.run_after_creation_jobs(item, obj) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # object not found yield item continue if self.condition(item): obj.setLayout(self.view) yield item
def __iter__(self): portal = self.context for item in self.previous: if item.get('_type') == 'opengever.repository.repositoryroot': repository_id = self._get_repo_root_id(item, portal) if repository_id is not None: # Plone Site if not self.has_assigned_primary_portlet: assign_repo_root_portlets(portal, repository_id) self.has_assigned_primary_portlet = True # Repo root obj = traverse(portal, item['_path']) assign_tree_portlet( context=obj, root_path=repository_id, remove_nav=True, block_inheritance=True) yield item
def __iter__(self): # keep a copy of previous generator to post-process items self.previous, self.postprocess = itertools.tee(self.previous) for item in self.previous: yield item for item in self.postprocess: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # object not found continue self.add_related_content(obj, item)
def __iter__(self): for item in self.previous: if not self.condition(item): yield item continue obj = traverse(self.context, item.get(self.pathkey, '')) if not obj: self.logger.warn( "Context does not exist at %s" % item.get[self.pathkey]) yield item continue self.portlet_handler(obj, self.portlet_properties(item)) self.logger.info( "Added portlet at %s" % (item[self.pathkey]))
def __iter__(self): # noqa: C901 for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] transitionskey = self.transitionskey(*keys)[0] if not (pathkey and transitionskey): # not enough info yield item continue path, transitions = item[pathkey], item[transitionskey] if isinstance(transitions, basestring): transitions = (transitions, ) obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # path doesn't exist yield item continue for transition in transitions: if not isinstance(transition, basestring): state = transition['review_state'] time = transition['time'] action = transition.get('action') # no action if initial state if action: try: self.wftool.doActionFor(obj, action) except WorkflowException: pass history = getattr(obj, 'workflow_history', None) if history: for wf in history: for wf_state in history[wf]: if wf_state['review_state'] == state: wf_state['time'] = time obj.workflow_history = history else: try: self.wftool.doActionFor(obj, transition) except WorkflowException: pass yield item
def __iter__(self): # noqa: C901 for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] transitionskey = self.transitionskey(*keys)[0] if not (pathkey and transitionskey): # not enough info yield item continue path, transitions = item[pathkey], item[transitionskey] if isinstance(transitions, basestring): transitions = (transitions,) obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # path doesn't exist yield item continue for transition in transitions: if not isinstance(transition, basestring): state = transition['review_state'] time = transition['time'] action = transition.get('action') # no action if initial state if action: try: self.wftool.doActionFor(obj, action) except WorkflowException: pass history = getattr(obj, 'workflow_history', None) if history: for wf in history: for wf_state in history[wf]: if wf_state['review_state'] == state: wf_state['time'] = time obj.workflow_history = history else: try: self.wftool.doActionFor(obj, transition) except WorkflowException: pass yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] propertieskey = self.propertieskey(*item.keys())[0] if not pathkey or not propertieskey or \ propertieskey not in item: # not enough info yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) if obj is None: # path doesn't exist yield item continue if not getattr(aq_base(obj), '_setProperty', False): yield item continue for pid, pvalue, ptype in item[propertieskey]: if getattr(aq_base(obj), pid, None) is not None: # if object have a attribute equal to property, do nothing continue if ptype == 'string': pvalue = safe_unicode(pvalue).encode('utf-8') try: if obj.hasProperty(pid): obj._updateProperty(pid, pvalue) else: obj._setProperty(pid, pvalue, ptype) except ConflictError: raise except Exception as e: raise Exception('Failed to set property "%s" type "%s"' ' to "%s" at object %s. ERROR: %s' % (pid, ptype, pvalue, str(obj), str(e))) yield item
def __iter__(self): portal = self.context for item in self.previous: if item.get('_type') == 'opengever.repository.repositoryroot': repository_id = self._get_repo_root_id(item, portal) if repository_id is not None: # Plone Site if not self.has_assigned_primary_portlet: assign_repo_root_portlets(portal, repository_id) self.has_assigned_primary_portlet = True # Repo root obj = traverse(portal, item['_path']) assign_tree_portlet(context=obj, root_path=repository_id, remove_nav=True, block_inheritance=True) yield item
def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info yield item continue path = item[pathkey] ob = traverse(self.context, str(path).lstrip('/'), None) if ob is None: yield item continue # object not found if not isinstance(ob, CatalogAware): yield item continue # can't notify portal_catalog if self.verbose: # add a log to display reindexation progess self.counter += 1 logger.info('Reindex object %s (%s)', path, self.counter) # update catalog if self.indexes: self.portal_catalog.reindexObject(ob, idxs=self.indexes) else: self.portal_catalog.reindexObject(ob) # solr reindexing if self.solr_enabled: # Register collective.indexing hook, to make sure solr changes # are realy send to solr. See # collective.indexing.queue.IndexQueue.hook. getQueue().hook() # Using catalogs reindexObject does not trigger corresponding solr # reindexing, so we do it manually. processor = getUtility(IIndexQueueProcessor, name='ftw.solr') processor.index(ob) yield item
def __iter__(self): for item in self.previous: keys = item.keys() typekey = self.typekey(*keys)[0] pathkey = self.pathkey(*keys)[0] if not (typekey and pathkey): LOG.warn('Not enough info for item: %s' % item) yield item continue type_, path = item[typekey], item[pathkey] path = path.encode('ASCII') container, id = posixpath.split(path.strip('/')) context = traverse(self.context, container, None) if context is None: error = 'Container %s does not exist for item %s' % (container, path) if self.required: raise KeyError(error) LOG.warn(error) yield item continue if self.update and getattr(aq_base(context), id, None) is not None: # item exists and update flag has been set yield item continue # tell our event stats event handler that we collect stats later IAnnotations(context.REQUEST)['org.bccvl.site.stats.delay'] = True # item does not exist or update flag is false, so we create a new # object in any case obj = createContentInContainer(context, type_, id=id) if obj.getId() != id: item[pathkey] = posixpath.join(container, obj.getId()) yield item
def __iter__(self): # Yield all items and collect them, so we can apply post-processing # steps *after* all the other sections have been executed items_to_post_process = [] for item in self.previous: items_to_post_process.append(item) yield item # Any operations performed here will be applied after all the previous # sections have been run for all the items for count, item in enumerate(items_to_post_process, start=1): if item['_type'] in VERSIONABLE_TYPES: log.info("Creating initial version: %s" % item['_path']) obj = traverse(self.site, item['_path'], None) create_initial_version(obj) if count % INTERMEDIATE_COMMIT_INTERVAL == 0: self.commit_and_log( "Intermediate commit during OGGBundle post-processing. " "%s of %s items." % (count, len(items_to_post_process))) if count % GARBAGE_COLLECT_INTERVAL == 0: # Periodically help garbage collection along garbage_collect(self.transmogrifier) if count % PROGRESS_INTERVAL == 0: total = len(items_to_post_process) percentage = 100 * float(count) / float(total) log.info( "Post-processing: %s of %s items (%.2f%%) processed." % (count, total, percentage)) rss = get_rss() / 1024.0 log.info("Current memory usage (RSS): %0.2f MB" % rss) self.commit_and_log("Final commit after OGGBundle post-processing. " "%s of %s items." % (count, len(items_to_post_process))) self.bundle.stats['timings']['done_post_processing'] = datetime.now()
def __iter__(self): for item in self.previous: # not enough info if '_path' not in item: yield item continue path = safe_unicode(item['_path'].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) # path doesn't exist if obj is None: yield item continue if IBaseObject and IBaseObject.providedBy(obj): for key in item.keys(): if not key.startswith(self.datafield_prefix): continue fieldname = key[len(self.datafield_prefix):] field = obj.getField(fieldname) if field is None: continue value = base64.b64decode(item[key]['data']) # XXX: handle other data field implementations field_value = field.get(obj) if not hasattr(field_value, 'data') or ( value != field_value.data): field.set(obj, value) obj.setFilename(item[key]['filename'], fieldname) obj.setContentType( item[key]['content_type'], fieldname) yield item
def normalize(self, item, max_length, parent_path): title = item['effective_title'] if not isinstance(title, unicode): title = title.decode('utf-8') # Use URLNormalizer to get locale-dependent transcriptions, # and IDNormalizer to get lowercased, ID-safe values. normalized_id = self.normalizer.normalize(title, max_length=max_length) normalized_id = self.id_normalizer.normalize( normalized_id, max_length=max_length) # Avoid id conflicts parent = traverse(self.context, parent_path) chooser = INameChooser(parent) if not chooser.checkName(title.encode('utf-8'), parent): # namechooser expect the object itself as second paremter, but it's # only used for getting the request, therefore we use the parent. normalized_id = INameChooser(parent).chooseName( normalized_id, parent) return normalized_id
def __iter__(self): for item in self.previous: # not enough info if '_path' not in item: yield item continue path = safe_unicode(item['_path'].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) # path doesn't exist if obj is None: yield item continue if IBaseObject and IBaseObject.providedBy(obj): for key in list(item.keys()): if not key.startswith(self.datafield_prefix): continue fieldname = key[len(self.datafield_prefix):] field = obj.getField(fieldname) if field is None: continue value = base64.b64decode(item[key]['data']) # XXX: handle other data field implementations field_value = field.get(obj) if not hasattr(field_value, 'data') or (value != field_value.data): field.set(obj, value) obj.setFilename(item[key]['filename'], fieldname) obj.setContentType(item[key]['content_type'], fieldname) yield item
def __iter__(self): for item in self.previous: keys = item.keys() typekey = self.typekey(*keys)[0] pathkey = self.pathkey(*keys)[0] if not (typekey and pathkey): LOG.warn('Not enough info for item: %s' % item) yield item continue type_, path = item[typekey], item[pathkey] path = path.encode('ASCII') container, id = posixpath.split(path.strip('/')) context = traverse(self.context, container, None) if context is None: error = 'Container %s does not exist for item %s' % ( container, path) if self.required: raise KeyError(error) LOG.warn(error) yield item continue if self.update and getattr(aq_base(context), id, None) is not None: # item exists and update flag has been set yield item continue # item does not exist or update flag is false, so we create a new # object in any case obj = createContentInContainer(context, type_, id=id) if obj.getId() != id: item[pathkey] = posixpath.join(container, obj.getId()) yield item
def __iter__(self): # keep a copy of previous generator to post-process items self.previous, self.postprocess = itertools.tee(self.previous) for item in self.previous: yield item for item in self.postprocess: pathkey = self.pathkey(*item.keys())[0] if not pathkey: # not enough info continue path = item[pathkey] obj = traverse(self.context, str(path).lstrip('/'), None) if obj is None: # object not found continue if self.key not in item: # not enough info continue html = item[self.key] # Parse text with regex and call resolve_uid method # to fix for every URL html = URL_RE.sub(self.resolve_uid, html) # Create Rich Text value to update content field value = RichTextValue(html, 'text/html', 'text/html') # Update object value setattr(obj, self.key, value) # XXX: this seems to be very expensive # Update linkintegrity references if IBaseObject.providedBy(obj): modifiedArchetype(obj, event=None) elif IDexterityContent.providedBy(obj): modifiedDexterity(obj, event=None)