def create_object_slug(self, container, data, *args, **kwargs): """Create an content object slug for the given data """ id = data.get("id") portal_type = data.get("portal_type") types_tool = api.get_tool("portal_types") fti = types_tool.getTypeInfo(portal_type) logger.info("Creating {} with ID {} in parent path {}".format( portal_type, id, api.get_path(container))) if fti.product: obj = _createObjectByType(portal_type, container, id) else: # newstyle factory factory = getUtility(IFactory, fti.factory) obj = factory(id, *args, **kwargs) if hasattr(obj, '_setPortalTypeName'): obj._setPortalTypeName(fti.getId()) # notifies ObjectWillBeAddedEvent, ObjectAddedEvent and ContainerModifiedEvent container._setObject(id, obj) # we get the object here with the current object id, as it might be renamed # already by an event handler obj = container._getOb(obj.getId()) return obj
def setupitems(self): """Lookup available setup items :returns: catalog brains """ query = { "path": { "query": api.get_path(self.setup), "depth": 1, }, "sort_on": "sortable_title", "sort_order": "ascending" } return api.search(query, "portal_catalog")
def setupitems(self): """Lookup available setup items :returns: catalog brains """ query = { "path": { "query": api.get_path(self.setup), "depth": 1, }, "sort_on": "sortable_title", "sort_order": "ascending" } items = api.search(query, "portal_catalog") return filter(lambda item: not item.exclude_from_nav, items)
def translate_path(self, remote_path): """ Translates a remote physical path into local path taking into account the prefix. If prefix is not enabled, then just the Remote Site ID will be replaced by the Local one. In case prefixes are enabled, then walk through all parents and add prefixes if necessary. :param remote_path: a path in a remote instance :return string: the translated path """ if not remote_path or "/" not in remote_path: raise SyncError("error", "Invalid remote path: '{}'".format(remote_path)) if self.is_portal_path(remote_path): return api.get_path(self.portal) portal_id = self.portal.getId() remote_portal_id = remote_path.split("/")[1] if not self.remote_prefix and not self.local_prefix: return str(remote_path.replace(remote_portal_id, portal_id)) rec = self.sh.find_unique(REMOTE_PATH, remote_path) if rec is None: raise SyncError( "error", "Missing Remote path in Soup table: {}".format(remote_path)) # Check if previously translated and saved if rec[LOCAL_PATH]: return str(rec[LOCAL_PATH]) # Get parent's local path remote_parent_path = utils.get_parent_path(remote_path) parent_path = self.translate_path(remote_parent_path) # Will check whether prefix needed by portal type portal_type = rec[PORTAL_TYPE] prefix = self.get_prefix(portal_type) # Remove Local Prefix rem_id = utils.get_id_from_path(remote_path) local_id = self.trim_local_prefix(rem_id) res = "{0}/{1}{2}".format(parent_path, prefix, local_id) res = res.replace(remote_portal_id, portal_id) # Save the local path in the Souper to use in the future self.sh.update_by_remote_path(remote_path, LOCAL_PATH=res) return str(res)
def is_portal_path(self, path): """ Check if the given path is the path of any portal object. :return: """ if not path: return False portal_path = api.get_path(self.portal) if path == portal_path: return True # Can be portal path in remote parts = path.split("/") if len(parts) < 3: return True return False
def spotlight_search_route(context, request): """The spotlight search route """ catalogs = [ "portal_catalog", "bika_setup_catalog", "bika_catalog", # "bika_analysis_catalog" ] search_results = [] for catalog in catalogs: search_results.extend(search(catalog=catalog)) def get_state(brain): state = getattr(brain, "review_state", "") if not isinstance(state, basestring): return "" return state items = [] for brain in search_results: icon = api.get_icon(brain) # avoid 404 errors with these guys if "document_icon.gif" in icon: icon = "" id = api.get_id(brain) title = api.get_title(brain) items.append({ "id": id, "title": title, "title_or_id": title or id, "description": api.get_description(brain), "uid": api.get_uid(brain), "path": api.get_path(brain), "url": api.get_url(brain), "state": get_state(brain), "icon": icon, }) return { "count": len(items), "items": items, }
def setupitems(self): """Lookup available setup items :returns: catalog brains """ query = { "path": { "query": api.get_path(self.setup), "depth": 1, }, } items = api.search(query, "portal_catalog") # filter out items items = filter(lambda item: not item.exclude_from_nav, items) # sort by (translated) title def cmp_by_translated_title(brain1, brain2): title1 = t(api.get_title(brain1)) title2 = t(api.get_title(brain2)) return cmp(title1, title2) return sorted(items, cmp=cmp_by_translated_title)
def _create_object_slug(self, container, data, *args, **kwargs): """Create an content object slug for the given data """ id = data.get("id") remote_path = data.get("remote_path") portal_type = data.get("portal_type") types_tool = api.get_tool("portal_types") fti = types_tool.getTypeInfo(portal_type) if not fti: self.skipped.append(remote_path) logger.error("Type Info not found for {}".format(portal_type)) return None logger.debug("Creating {} with ID {} in parent path {}".format( portal_type, id, api.get_path(container))) if fti.product: obj = _createObjectByType(portal_type, container, id) else: # new style factory factory = getUtility(IFactory, fti.factory) obj = factory(id, *args, **kwargs) if hasattr(obj, '_setPortalTypeName'): obj._setPortalTypeName(fti.getId()) # notifies ObjectWillBeAddedEvent, ObjectAddedEvent and # ContainerModifiedEvent container._setObject(id, obj) # we get the object here with the current object id, as it # might be renamed # already by an event handler obj = container._getOb(obj.getId()) # Be sure that Creation Flag is Cleared. if obj.checkCreationFlag(): obj.unmarkCreationFlag() return obj
def get_path(brain_or_object): """Proxy to senaite.api.get_path """ return api.get_path(brain_or_object)
def import_data(self, domain): """Import the data from the storage identified by domain """ logger.info("*** IMPORT DATA {} ***".format(domain)) storage = self.get_storage(domain=domain) datastore = storage["data"] indexstore = storage["index"] uidmap = storage["uidmap"] credentials = storage["credentials"] # At some points api cannot retrieve objects by UID in the end of # creation process. Thus we keep them in an dictionary to access easily. objmap = {} # We will create objects from top to bottom, but will update from bottom # to up. ordered_uids = [] # initialize a new session with the stored credentials for later requests username = credentials.get("username") password = credentials.get("password") self.session = self.get_session(username, password) logger.info("Initialized a new session for user {}".format(username)) # Get UIDs grouped by their parent path ppaths = indexstore.get("by_parent_path") if ppaths is None: message = _( "No parent path info found in the import data. " "Please install senaite.jsonapi>=1.1.1 on the source instance " "and clear&refetch this storage") self.add_status_message(message, "warning") return # Import by paths from top to bottom for ppath in sorted(ppaths): # nothing to do if not ppath: continue logger.info("Importing items for parent path {}".format(ppath)) uids = ppaths[ppath] for uid in uids: ordered_uids.append(uid) # get the data for this uid data = datastore[uid] # check if the object exists in this instance remote_path = data.get("path") local_path = self.translate_path(remote_path) existing = self.portal.unrestrictedTraverse( str(local_path), None) if existing: # remember the UID -> object UID mapping for the update step uidmap[uid] = api.get_uid(existing) objmap[uid] = existing else: # get the container object by path container_path = self.translate_path(ppath) container = self.portal.unrestrictedTraverse( str(container_path), None) # create an object slug in this container obj = self.create_object_slug(container, data) # remember the UID -> object UID mapping for the update step uidmap[uid] = api.get_uid(obj) objmap[uid] = obj # When creation process is done, commit the transaction to avoid # ReferenceField relation problems. transaction.commit() # UIDs were added from up to bottom. Reverse the list to update objects # from bottom to up. ordered_uids.reverse() # Update all objects with the given data for uid in ordered_uids: obj = objmap.get(uid, None) if obj is None: logger.warn("Object not found: {} ".format(uid)) continue logger.info("Update object {} with import data".format( api.get_path(obj))) self.update_object_with_data(obj, datastore[uid], domain) self.reindex_updated_objects()