class BookingManagerConferenceIndex(Persistent): def __init__(self): self._tree = OOBTree() self._name = "bookingManagerByConf" def initialize(self, dbi=None): pass def getName(self): return self._name def index(self, conf, csbm): if not self._tree.has_key(conf): self._tree[conf] = csbm self._tree._p_changed = 1 def unindex(self, conf): del self._tree[conf] self._tree._p_changed = 1 def get(self, conf): return self._tree.get(conf,None) def dump(self): return [(k, s) for k, s in self._tree.iteritems()]
class BookingManagerConferenceIndex(Persistent): def __init__(self): self._tree = OOBTree() self._name = "bookingManagerByConf" def initialize(self, dbi=None): for i, conf in enumerate(ConferenceHolder()._getIdx().itervalues()): # Store CSBookingManager in the index csbm = getattr(conf, "_CSBookingManager", None) if csbm is None: csbm = CSBookingManager(conf) self.index(conf.getId(), csbm) if dbi and i % 1000 == 999: dbi.commit() if dbi: dbi.commit() def getName(self): return self._name def index(self, conf, csbm): if not self._tree.has_key(conf): self._tree[conf] = csbm self._tree._p_changed = 1 def unindex(self, conf): del self._tree[conf] self._tree._p_changed = 1 def get(self, conf): return self._tree.get(conf,None) def dump(self): return [(k, s) for k, s in self._tree.iteritems()]
class BookingManagerConferenceIndex(Persistent): def __init__(self): self._tree = OOBTree() self._name = "bookingManagerByConf" def initialize(self, dbi=None): for i, conf in enumerate(ConferenceHolder()._getIdx().itervalues()): # Store CSBookingManager in the index csbm = getattr(conf, "_CSBookingManager", None) if csbm is None: csbm = CSBookingManager(conf) self.index(conf.getId(), csbm) if dbi and i % 1000 == 999: dbi.commit() if dbi: dbi.commit() def getName(self): return self._name def index(self, conf, csbm): if not self._tree.has_key(conf): self._tree[conf] = csbm self._tree._p_changed = 1 def unindex(self, conf): del self._tree[conf] self._tree._p_changed = 1 def get(self, conf): return self._tree.get(conf, None) def dump(self): return [(k, s) for k, s in self._tree.iteritems()]
class DeviceNetworksCache(object): """ Data structure used to store the networks devices belong to. OOBTree Key: Device.id Value: OOBtree Key: Network Value: number of ip addresses that belong to that network """ def __init__(self): self.cache = OOBTree() def add_device_network(self, device_id, network_id): """ device_id = Device.getId() network_id = IpNetwork.getPrimaryUrlPath() """ device_dict = self.cache.get(device_id) if device_dict is None: device_dict = OOBTree() self.cache[device_id] = device_dict network_value = device_dict.get(network_id, 0) + 1 device_dict[network_id] = network_value def remove_device_network(self, device_id, network_id): """ device_id = Device.getId() network_id = IpNetwork.getPrimaryUrlPath() """ device_dict = self.cache.get(device_id) if device_dict: network_value = device_dict.get(network_id, 0) - 1 if device_dict.has_key(network_id): if network_value > 0: device_dict[network_id] = network_value else: del device_dict[network_id] def remove_device(self, device_id): if self.cache.get(device_id): del self.cache[device_id] def get_device_networks(self, device_id): nets = set() if self.cache.get(device_id): nets = set(self.cache.get(device_id).keys()) return nets def __str__(self): to_str = "" for dev, nets in self.cache.iteritems(): to_str = to_str + "{0} => {1}\n".format(dev, len(nets.keys())) for net in nets.keys(): to_str = to_str + "\t{0}\n".format(net) return to_str
class CategoryDayIndex(CategoryDateIndex): def __init__(self, visibility=True): super(CategoryDayIndex, self).__init__() self._useVisibility = visibility def _indexConf(self, categid, conf): # only the more restrictive setup is taken into account if self._idxCategItem.has_key(categid): res = self._idxCategItem[categid] else: res = CalendarDayIndex() res.indexConf(conf) self._idxCategItem[categid] = res def reindexConf(self, conf): self.unindexConf(conf) self.indexConf(conf) def indexConf(self, conf): level = 0 for categ in conf.getOwnerPath(): if not self._useVisibility or conf.getFullVisibility() > level: self._indexConf(categ.getId(),conf) level+=1 if not self._useVisibility or conf.getFullVisibility() > level: self._indexConf("0",conf) def buildIndex(self, dbi): self._idxCategItem = OOBTree() from MaKaC.conference import CategoryManager self.indexCateg(CategoryManager().getById('0'), dbi=dbi) def getObjectsInDays(self, categid, sDate, eDate): if self._idxCategItem.has_key(categid): return self._idxCategItem[categid].getObjectsInDays(sDate, eDate) else: return [] def iterateObjectsIn(self, categid, sDate, eDate): if categid in self._idxCategItem: return self._idxCategItem[categid].iterateObjectsIn(sDate, eDate) else: return [] def _check(self, dbi=None): """ Performs some sanity checks """ for categId, calDayIdx in self._idxCategItem.iteritems(): for problem in calDayIdx._check(dbi=dbi, categId=categId): yield problem
class CollaborationIndex(Persistent): def __init__(self): self._indexes = OOBTree() def getAllBookingsIndex(self): return self.getIndex("all") def getIndex(self, name): try: return self._indexes[name] except KeyError: index = BookingsIndex(name) self._indexes[name] = index return index def _getBookingInstancesByDate(self, index, dateFormat, fromDate=None, toDate=None): bookings = defaultdict(list) for dt, bkw in Catalog.getIdx("cs_booking_instance")[index].iter_bookings(fromDate, toDate): bookings[dt].append(bkw) return list((dt.strftime(dateFormat), bkws) for (dt, bkws) in sorted(bookings.iteritems())), len(bookings) def getBookings( self, indexName, viewBy, orderBy, minKey, maxKey, tz="UTC", onlyPending=False, conferenceId=None, categoryId=None, pickle=False, dateFormat=None, page=None, resultsPerPage=None, grouped=False, ): # TODO: Use iterators instead of lists if onlyPending: indexName += "_pending" reverse = orderBy == "descending" try: index = self.getIndex(indexName) totalInIndex = index.getCount() if ( categoryId and not CategoryManager().hasKey(categoryId) or conferenceId and not ConferenceHolder().hasKey(conferenceId) ): finalResult = QueryResult([], 0, 0, totalInIndex, 0) else: if viewBy == "conferenceTitle": items, nBookings = index.getBookingsByConfTitle(minKey, maxKey, conferenceId, categoryId) elif viewBy == "conferenceStartDate": items, nBookings = index.getBookingsByConfDate( minKey, maxKey, conferenceId, categoryId, tz, dateFormat, grouped=grouped ) elif viewBy == "instanceDate": items, nBookings = self._getBookingInstancesByDate(indexName, dateFormat, minKey, maxKey) else: items, nBookings = index.getBookingsByDate( viewBy, minKey, maxKey, tz, conferenceId, categoryId, dateFormat ) if reverse: items.reverse() nGroups = len(items) if page: page = int(page) if resultsPerPage: resultsPerPage = int(resultsPerPage) else: resultsPerPage = 10 nPages = nGroups / resultsPerPage if nGroups % resultsPerPage > 0: nPages = nPages + 1 if page > nPages: finalResult = QueryResult([], 0, 0, totalInIndex, nPages) else: finalResult = QueryResult( items[(page - 1) * resultsPerPage : page * resultsPerPage], nBookings, nGroups, totalInIndex, nPages, ) else: finalResult = QueryResult(items, nBookings, nGroups, totalInIndex, 0) except KeyError: Logger.get("VideoServ").warning( "Tried to retrieve index with name " + indexName + " but the index did not exist. Maybe no bookings have been added to it yet" ) finalResult = QueryResult([], 0, 0, 0, 0) if CollaborationTools.hasCollaborationOption( "verifyIndexingResults" ) and CollaborationTools.getCollaborationOptionValue("verifyIndexingResults"): finalResult.purgeNonExistingBookings() if pickle: # ATTENTION: this call silently changes the fossil map CollaborationTools.updateIndexingFossilsDict() return fossilize(finalResult, IQueryResultFossil, tz=tz) else: return finalResult def dump(self): return [(k, v.dump()) for k, v in self._indexes.iteritems()] def cleanAll(self): """ Wipes out everything """ CollaborationIndex.__init__(self) for pluginInfo in CollaborationTools.getCollaborationPluginType().getOption("pluginsPerIndex").getValue(): self._indexes[pluginInfo.getName()] = BookingsIndex(pluginInfo.getName()) def indexAll(self, index_names=None, dbi=None): """ Indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ i = 0 for cid, conf in ConferenceHolder()._getIdx().iteritems(): csbm = Catalog.getIdx("cs_bookingmanager_conference").get(conf.getId()) if csbm is None: continue # note: probably not the most efficient implementation since _indexBooking is getting the list # of indexes where each booking should be indexed on every iteration for booking in csbm.getBookingList(): csbm._indexBooking(booking, index_names=index_names) i += 1 if dbi and i % 1000 == 999: dbi.commit() if dbi: dbi.commit() def reindexAll(self, dbi=None): """ Cleans the indexes, and then indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ self.cleanAll() self.indexAll(dbi=dbi)
class CollaborationIndex(Persistent): def __init__(self): self._indexes = OOBTree() def getAllBookingsIndex(self): return self.getIndex("all") def getIndex(self, name): try: return self._indexes[name] except KeyError: index = BookingsIndex(name) self._indexes[name] = index return index def _getBookingInstancesByDate(self, index, dateFormat, fromDate=None, toDate=None): bookings = defaultdict(list) for dt, bkw in Catalog.getIdx( 'cs_booking_instance')[index].iter_bookings(fromDate, toDate): bookings[dt].append(bkw) return list( (dt.strftime(dateFormat), bkws) for (dt, bkws) in sorted(bookings.iteritems())), len(bookings) def getBookings(self, indexName, viewBy, orderBy, minKey, maxKey, tz='UTC', onlyPending=False, conferenceId=None, categoryId=None, pickle=False, dateFormat=None, page=None, resultsPerPage=None, grouped=False): # TODO: Use iterators instead of lists if onlyPending: indexName += "_pending" reverse = orderBy == "descending" try: index = self.getIndex(indexName) totalInIndex = index.getCount() if categoryId and not CategoryManager().hasKey(categoryId) or conferenceId and \ not ConferenceHolder().hasKey(conferenceId): finalResult = QueryResult([], 0, 0, totalInIndex, 0) else: if viewBy == "conferenceTitle": items, nBookings = index.getBookingsByConfTitle( minKey, maxKey, conferenceId, categoryId) elif viewBy == "conferenceStartDate": items, nBookings = index.getBookingsByConfDate( minKey, maxKey, conferenceId, categoryId, tz, dateFormat, grouped=grouped) elif viewBy == "instanceDate": items, nBookings = self._getBookingInstancesByDate( indexName, dateFormat, minKey, maxKey) else: items, nBookings = index.getBookingsByDate( viewBy, minKey, maxKey, tz, conferenceId, categoryId, dateFormat) if reverse: items.reverse() nGroups = len(items) if page: page = int(page) if resultsPerPage: resultsPerPage = int(resultsPerPage) else: resultsPerPage = 10 nPages = nGroups / resultsPerPage if nGroups % resultsPerPage > 0: nPages = nPages + 1 if page > nPages: finalResult = QueryResult([], 0, 0, totalInIndex, nPages) else: finalResult = QueryResult( items[(page - 1) * resultsPerPage:page * resultsPerPage], nBookings, nGroups, totalInIndex, nPages) else: finalResult = QueryResult(items, nBookings, nGroups, totalInIndex, 0) except KeyError: Logger.get("VideoServ").warning( "Tried to retrieve index with name " + indexName + " but the index did not exist. Maybe no bookings have been added to it yet" ) finalResult = QueryResult([], 0, 0, 0, 0) if CollaborationTools.hasCollaborationOption( "verifyIndexingResults" ) and CollaborationTools.getCollaborationOptionValue( "verifyIndexingResults"): finalResult.purgeNonExistingBookings() if pickle: # ATTENTION: this call silently changes the fossil map CollaborationTools.updateIndexingFossilsDict() return fossilize(finalResult, IQueryResultFossil, tz=tz) else: return finalResult def dump(self): return [(k, v.dump()) for k, v in self._indexes.iteritems()] def cleanAll(self): """ Wipes out everything """ CollaborationIndex.__init__(self) for pluginInfo in CollaborationTools.getCollaborationPluginType( ).getOption("pluginsPerIndex").getValue(): self._indexes[pluginInfo.getName()] = BookingsIndex( pluginInfo.getName()) def indexAll(self, index_names=None, dbi=None): """ Indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ i = 0 for cid, conf in ConferenceHolder()._getIdx().iteritems(): csbm = Catalog.getIdx("cs_bookingmanager_conference").get( conf.getId()) if csbm is None: continue #note: probably not the most efficient implementation since _indexBooking is getting the list # of indexes where each booking should be indexed on every iteration for booking in csbm.getBookingList(): csbm._indexBooking(booking, index_names=index_names) i += 1 if dbi and i % 1000 == 999: dbi.commit() if dbi: dbi.commit() def reindexAll(self, dbi=None): """ Cleans the indexes, and then indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ self.cleanAll() self.indexAll(dbi=dbi)
class CollaborationIndex(Persistent): def __init__(self): self._indexes = OOBTree() def getAllBookingsIndex(self): return self.getIndex("all") def getIndex(self, name): try: return self._indexes[name] except KeyError: index = BookingsIndex(name) self._indexes[name] = index return index def getBookings(self, indexName, viewBy, orderBy, minKey, maxKey, tz = 'UTC', onlyPending=False, conferenceId=None, categoryId=None, pickle=False, dateFormat=None, page=None, resultsPerPage=None, grouped=False): # TODO: Use iterators instead of lists if onlyPending: indexName += "_pending" reverse = orderBy == "descending" try: index = self.getIndex(indexName) totalInIndex = index.getCount() if categoryId and not CategoryManager().hasKey(categoryId) or conferenceId and \ not ConferenceHolder().hasKey(conferenceId): finalResult = QueryResult([], 0, 0, totalInIndex, 0) else: if viewBy == "conferenceTitle": items, nBookings = index.getBookingsByConfTitle(minKey, maxKey, conferenceId, categoryId) elif viewBy == "conferenceStartDate": items, nBookings = index.getBookingsByConfDate(minKey, maxKey, conferenceId, categoryId, tz, dateFormat, grouped=grouped) else: items, nBookings = index.getBookingsByDate(viewBy, minKey, maxKey, tz, conferenceId, categoryId, dateFormat) if reverse: items.reverse() nGroups = len(items) if page: page = int(page) if resultsPerPage: resultsPerPage = int(resultsPerPage) else: resultsPerPage = 10 nPages = nGroups / resultsPerPage if nGroups % resultsPerPage > 0: nPages = nPages + 1 if page > nPages: finalResult = QueryResult([], 0, 0, totalInIndex, nPages) else: finalResult = QueryResult(items[(page - 1) * resultsPerPage : page * resultsPerPage], nBookings, nGroups, totalInIndex, nPages) else: finalResult = QueryResult(items, nBookings, nGroups, totalInIndex, 0) except KeyError: Logger.get("VideoServ").warning("Tried to retrieve index with name " + indexName + " but the index did not exist. Maybe no bookings have been added to it yet") finalResult = QueryResult([], 0, 0, 0) if CollaborationTools.hasCollaborationOption("verifyIndexingResults") and CollaborationTools.getCollaborationOptionValue("verifyIndexingResults"): finalResult.purgeNonExistingBookings() if pickle: CollaborationTools.updateIndexingFossilsDict() return fossilize(finalResult, IQueryResultFossil, tz = tz) else: return finalResult def dump(self): return [(k, v.dump()) for k,v in self._indexes.iteritems()] def cleanAll(self): """ Wipes out everything """ CollaborationIndex.__init__(self) for pluginInfo in CollaborationTools.getCollaborationPluginType().getOption("pluginsPerIndex").getValue(): self._indexes[pluginInfo.getName()] = BookingsIndex(pluginInfo.getName()) def indexAll(self): """ Indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ for conf in ConferenceHolder().getList(): csbm = conf.getCSBookingManager() #note: probably not the most efficient implementation since _indexBooking is getting the list # of indexes where each booking should be indexed on every iteration for booking in csbm.getBookingList(): csbm._indexBooking(booking) def reindexAll(self): """ Cleans the indexes, and then indexes all the bookings from all the conferences WARNING: obviously, this can potentially take a while """ self.cleanAll() self.indexAll()
class CategoryIndex(Persistent): def __init__( self ): self._idxCategItem = OOBTree() def dump(self): return list(self._idxCategItem.items()) def _indexConfById(self, categid, confid): # only the more restrictive setup is taken into account categid = str(categid) if self._idxCategItem.has_key(categid): res = self._idxCategItem[categid] else: res = [] res.append(confid) self._idxCategItem[categid] = res def unindexConf(self, conf): confid = str(conf.getId()) self.unindexConfById(confid) def unindexConfById(self, confid): for categid in self._idxCategItem.keys(): if confid in self._idxCategItem[categid]: res = self._idxCategItem[categid] res.remove(confid) self._idxCategItem[categid] = res def reindexCateg(self, categ): for subcat in categ.getSubCategoryList(): self.reindexCateg(subcat) for conf in categ.getConferenceList(): self.reindexConf(conf) def reindexConf(self, conf): self.unindexConf(conf) self.indexConf(conf) def indexConf(self, conf): categs = conf.getOwnerPath() level = 0 for categ in conf.getOwnerPath(): if conf.getFullVisibility() > level: self._indexConfById(categ.getId(),conf.getId()) level+=1 if conf.getFullVisibility() > level: self._indexConfById("0",conf.getId()) def getItems(self, categid): categid = str(categid) if self._idxCategItem.has_key(categid): return self._idxCategItem[categid] else: return [] def _check(self, dbi=None): """ Performs some sanity checks """ i = 0 from MaKaC.conference import ConferenceHolder confIdx = ConferenceHolder()._getIdx() for cid, confs in self._idxCategItem.iteritems(): for confId in confs: # it has to be in the conference holder if confId not in confIdx: yield "[%s] '%s' not in ConferenceHolder" % (cid, confId) # the category has to be one of the owners elif cid not in (map(lambda x:x.id, ConferenceHolder().getById(confId).getOwnerPath()) + ['0']): yield "[%s] Conference '%s' is not owned" % (cid, confId) if dbi and i % 100 == 99: dbi.sync() i += 1
class MultiPointerTrack(Persistent): """ A MultiPointerTrack is a kind of structure that is based on an IOBTree, where each entry contains an ordered set (or list, depending on the implementation) of elements. Then, several "pointers" can be created, which point to different positions of the track (very much like runners in a race track). This class is abstract, implementations should be derived. """ def __init__(self, elemContainer): self._container = OOBTree() self._pointers = PersistentMapping() self._elemContainer = elemContainer # initialize first entry #self._container[timestamp(0)] = elemContainer() def addPointer(self, pid, startPos=None): """ Registers a new pointer """ self._pointers[pid] = None if startPos: self.movePointer(pid, startPos) def removePointer(self, pid): """ Removes a pointer from the list """ del self._pointers[pid] def prepareEntry(self, ts): """ Creates an empty sub-structure (elemContainer) for a given timestamp """ self._container[timestamp(ts)] = self._elemContainer() def getCurrentPosition(self, pid): """ Returns the current entry (set/list) for a given pointer id """ currentPos = self._pointers[pid] # TODO: assertion? check? return self._container[timestamp(currentPos)] def getPointerTimestamp(self, pid): """ Gets the current 'position' of a pointer (id) """ return self._pointers[pid] def __getitem__(self, ts): """ Implements __getitem__, so that mpt[timestamp] works """ if isinstance(ts, slice): return self._getSlice(ts) else: return self._container[timestamp(ts)] def _getSlice(self, s): """ Calculates a slice of the structure (timestamp-wise) """ if s.step != None: raise TypeError('Extended slices are not accepted here') return self._container.values(s.start, s.stop) def values(self, *args): """ Return values or ranges (timestamps) of the structure """ fargs = [] for a in args: if a == None: fargs.append(None) else: fargs.append(timestamp(a)) return self._container.values(*fargs) def _append(self, ts, val): """ Should be overloaded. """ raise Exception("Unimplemented method") def add(self, intTS, value): """ Adds a value to the container corresponding to a specific timestamp """ ts = timestamp(intTS) if ts not in self._container: self.prepareEntry(intTS) self._append(ts, value) def _pointerIterator(self, pid, func, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id). Takes a function that is applied to yielded values """ return self.iterate(self._pointers[pid], till, func) def iterate(self, fromPos=None, till=None, func=(lambda x: x)): """ Generator that iterates through the data structure """ if till != None: till = timestamp(till) # negative numbers mean "last but one", "last but two", etc... if till == timestamp(-1): # most common case till = self._container.maxKey() - timestamp(1) if fromPos != None: fromPos = timestamp(fromPos) for ts, entry in self._container.iteritems(till, fromPos): if fromPos and ts == fromPos: # stop immediately if we're past fromPos raise StopIteration for elem in entry: yield func((int(ts), elem)) def mostRecentTS(self, maximum=None): """ Returns most recent timestamp in track (minimum key) If 'maximum' is provided, return it if less recent """ # check that the tree has something if len(self._container) == 0: raise EmptyTrackException() mr = self._container.minKey() if maximum: maximum = timestamp(maximum) # in timestamp logic, max() returns the oldest return max(mr, maximum) else: return mr def oldestTS(self): """ Returns least recent timestamp in track (maximum key) """ # check that the tree has something if len(self._container) == 0: raise EmptyTrackException() return self._container.maxKey() def pointerIterValues(self, pid, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id) - iterates over values """ return self._pointerIterator(pid, lambda x: x[1], till=till) def pointerIterItems(self, pid, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id) - iterates over key-value pairs (iteritems) """ return self._pointerIterator(pid, lambda x: x, till=till) def movePointer(self, pid, pos): """ Moves a given pointer (id) to a given timestamp """ if pid not in self._pointers: raise KeyError("Pointer '%s' doesn't seem to exist!" % pid) # check that the tree has something if len(self._container) == 0: raise EmptyTrackException() self._pointers[pid] = pos def __len__(self): """ Returns the number of timestamp entries """ return len(self._container) def __delitem__(self, item): """ Deletes a given timestamp entry (or range) """ self._container.__delitem__(item) def __iter__(self): """ Iterates over the whole structure, element by elements (goes inside containers) """ return self.iterate() def __contains__(self, ts): return timestamp(ts) in self._container
class Folder(Item): key_type = 'string' def __init__(self, parent=None, name=None, title=None, description=None, **kwargs): super(Folder, self).__init__(parent=parent, name=name, title=title, description=description, **kwargs) self.__items = OOBTree() self.__items_len = Length() @property def is_folder(self): return True def allow_delete_item(self, name): return True def _item_attach(self, item): """ internal method to attach item to this folder items collection @param item: Item @return: boolean """ # ensure that this item has no parent if isinstance(item, Item) and item.parent is None: # after this the parent of item will be my self self.__items[item.name] = item item_attached = item.inform_parent_attached(self) if item_attached: self.__items_len.change(1) return True else: del self.__items[item.name] return False return False def _item_detach(self, item): """ internal method to detach item from this folder items collection @param item: Item @return: boolean """ # ensure that i am the parent of this item if isinstance(item, Item) and item.parent == self: # after this the will have no parent del self.__items[item.name] item_detached = item.inform_parent_detached() if item_detached: self.__items_len.change(-1) return True else: self.__items[item.name] = item return False return False def allow_add_item(self, item, overwrite=False, raise_err=False): """ Check whether i can add item to this folder items collection @param item: Item , any object instance of Item @param overwrite: boolean , whether i have to overwrite any existing item with item.name @param raise_err: boolean , whether i have to raise Exception in case of not acceptance @return: boolean """ log_context = 'allow_add_item' if not isinstance(item, Item): message = 'expected type: Item received: %s' % type(item) if raise_err: raise TypeError(message) else: log_debug(message, log_context) return False item_name = item.name existing_item = self.get_item(item_name) if existing_item: #verify that i can delete i if not overwrite: message = 'item: %s already exist' % item_name if raise_err: raise ErrorAlreadyExist(message) else: log_debug(message, log_context) return False if not self.allow_delete_item(item_name): message = 'folder "%s" do not allow item:"%s" to be deleted' % (self, item.name) if raise_err: raise ErrorDeleteNotAllowed(message) else: log_debug(message, log_context) return False if not existing_item.allow_delete: if raise_err: raise ErrorItemNotDeleteAble('old item:"%s" is not delete-able' % item.name) else: log_debug('old item:"%s" is not delete-able' % item.name, log_context) return False return True def add_item(self, item, overwrite=False): """ Add item to this folder items collection @param item: Item, any object instance of Item @param overwrite: boolean, whether i have to overwrite any existing item @return: boolean """ if self.allow_add_item(item, overwrite=overwrite, raise_err=True): item_name = item.name if self.has_item(item_name): old_item = self.get_item(item_name) self._item_detach(old_item) if self.__items is None: self.__items = OOBTree() self._item_attach(item) self.mark_changed() return True def has_item(self, name): """ Check whether this folder has item by this name @param name: str @return: boolean """ if name in self.__items: return True else: return False def has_items(self): """ check whether the folder has any item @return: boolean """ if self.__items_len(): return True return False def __contains__(self, name): """ the same as has_item , in this form we can check by form if name in folder: # do some things or if name not in folder: # do some things @param name: string @return: boolean """ return self.has_item(name) def __iter__(self): return iter(self.get_items()) def get_item(self, name): """ Return the item associated with this name in this folder collection if exist if not exist return None :param name: string @return: Item """ item = self.__items.get(name, None) if item is None: log_debug('item %s do not exist' % name, 'get_item') return item def delete_item(self, name): """ @param name: str @return: Item """ if self.has_item(name): if not self.allow_delete_item(name): raise ErrorDeleteNotAllowed('folder "%s" do not allow item:"%s" to be deleted' % (self, name)) item = self.get_item(name) if not item.allow_delete(): raise ErrorItemNotDeleteAble('item:"%s" do not allow to be deleted' % name) self._item_detach(item) self.mark_changed() return item return None def delete_items(self, names): if not isinstance(names, list): raise TypeError('expected list get type %s' % type(names)) deleted_items = [] names_not_deleted = [] for name in names: if name not in deleted_items and name not in names_not_deleted: try: item = self.delete_item(name) except Exception as exp: #will not raise exception here log_debug(exp.message, 'delete_items') item = None if item: deleted_items.append(item) else: names_not_deleted.append(name) return deleted_items, names_not_deleted def rename_item(self, name, new_name): item = self.get_item(name) if not item: raise ErrorDoNotExist('item:%s do not exist' % name) if self.has_item(new_name): raise ErrorAlreadyExist('item %s already exist' % new_name) self._item_detach(item) try: item.name = new_name except Exception as exp: log_debug(str(exp)) self._item_attach(item) raise exp else: self._item_attach(item) self.mark_changed() return True def move_item(self, name, target_folder): assert isinstance(target_folder, Folder) item = self.get_item(name) if not item: raise ErrorDoNotExist() # must check that target_folder is not the item it self if item == target_folder: raise Exception('can not move item:"%s" to it self' % name) # item is a folder, must verify that the target folder is not in hierarchy (one of the parent folders) of item if isinstance(item, Folder) and target_folder.is_folder_in_parents(item): raise Exception('target_folder %s is in the hierarchy (is one of the parents) of "%s"' % (target_folder, name)) if target_folder.has_item(name): raise ErrorAlreadyExist() if not item.allow_move(target_folder): raise ErrorMoveNotAllowed('item:"%s" not allowed to move to folder:"%s"' % (name, target_folder.url())) if not target_folder.allow_add_item(item): raise ErrorAddItemNotAllowed('target folder:"%s" do not allow item to be added' % target_folder) # delete item from collection, and detach item self._item_detach(item) try: target_folder.add_item(item) except Exception as exp: # add item to collection and attach it again self._item_attach(item) raise exp else: self.mark_changed() return True def list_items(self, **kwargs): if self.__items is None: return [] items_list = list(self.__items) def filter_validator_interface(name, interface_validator): item = self.get_item(name) if interface_validator.providedBy(item): return True else: return False def filter_validator_caller(name, caller_validator): item = self.get_item(name) if caller_validator(item): return True else: return False validator = kwargs.get('validator', None) if validator: if isinstance(validator, InterfaceClass): items_list = filter(filter_validator_interface, items_list) elif callable(validator): items_list = filter(filter_validator_caller, items_list) else: raise TypeError('validator must be Interface or callable') return items_list def get_items(self, **kwargs): items = [] for key, item in self.__items.iteritems(): items.append(item) def filter_validator_interface(v_item, interface_validator): if interface_validator.provided_by(v_item): return True else: return False def filter_validator_caller(v_item, caller_validator): if caller_validator(v_item): return True else: return False validator = kwargs.get('validator', None) if validator: if isinstance(validator, InterfaceClass): items = filter(filter_validator_interface, items) elif callable(validator): items = filter(filter_validator_caller, items) else: raise TypeError('validator must be Interface or callable') return items def len_items(self): return self.__items_len()
class MultiPointerTrack(Persistent): """ A MultiPointerTrack is a kind of structure that is based on an IOBTree, where each entry contains an ordered set (or list, depending on the implementation) of elements. Then, several "pointers" can be created, which point to different positions of the track (very much like runners in a race track). This class is abstract, implementations should be derived. """ def __init__(self, elemContainer): self._container = OOBTree() self._pointers = PersistentMapping() self._elemContainer = elemContainer # initialize first entry #self._container[timestamp(0)] = elemContainer() def addPointer(self, pid, startPos=None): """ Registers a new pointer """ self._pointers[pid] = None if startPos: self.movePointer(pid, startPos) def removePointer(self, pid): """ Removes a pointer from the list """ del self._pointers[pid] def prepareEntry(self, ts): """ Creates an empty sub-structure (elemContainer) for a given timestamp """ self._container[timestamp(ts)] = self._elemContainer() def getCurrentPosition(self, pid): """ Returns the current entry (set/list) for a given pointer id """ currentPos = self._pointers[pid] # TODO: assertion? check? return self._container[timestamp(currentPos)] def getPointerTimestamp(self, pid): """ Gets the current 'position' of a pointer (id) """ return self._pointers[pid] def __getitem__(self, ts): """ Implements __getitem__, so that mpt[timestamp] works """ if isinstance(ts, slice): return self._getSlice(ts) else: return self._container[timestamp(ts)] def _getSlice(self, s): """ Calculates a slice of the structure (timestamp-wise) """ if s.step != None: raise TypeError('Extended slices are not accepted here') return self._container.values(s.start, s.stop) def values(self, *args): """ Return values or ranges (timestamps) of the structure """ fargs = [] for a in args: if a == None: fargs.append(None) else: fargs.append(timestamp(a)) return self._container.values(*fargs) def _append(self, ts, val): """ Should be overloaded. """ raise Exception("Unimplemented method") def add(self, intTS, value): """ Adds a value to the container corresponding to a specific timestamp """ ts = timestamp(intTS) if ts not in self._container: self.prepareEntry(intTS) self._append(ts, value) def _pointerIterator(self, pid, func, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id). Takes a function that is applied to yielded values """ return self.iterate(self._pointers[pid], till, func) def iterate(self, fromPos=None, till=None, func=(lambda x: x)): """ Generator that iterates through the data structure """ if till != None: till = timestamp(till) # negative numbers mean "last but one", "last but two", etc... if till == timestamp(-1): # most common case till = self._container.maxKey() - timestamp(1) if fromPos != None: fromPos = timestamp(fromPos) for ts, entry in self._container.iteritems(till, fromPos): if fromPos and ts == fromPos: # stop immediately if we're past fromPos raise StopIteration for elem in entry: yield func((int(ts), elem)) def mostRecentTS(self, maximum=None): """ Returns most recent timestamp in track (minimum key) If 'maximum' is provided, return it if less recent """ # check that the tree has something if len(self._container) == 0: raise EmptyTrackException() mr = self._container.minKey() if maximum: maximum = timestamp(maximum) # in timestamp logic, max() returns the oldest return max(mr, maximum) else: return mr def oldestTS(self): """ Returns least recent timestamp in track (maximum key) """ # check that the tree has something if len(self._container) == 0: raise EmptyTrackException() return self._container.maxKey() def pointerIterValues(self, pid, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id) - iterates over values """ return self._pointerIterator(pid, lambda x: x[1], till=till) def pointerIterItems(self, pid, till=None): """ Iterates over the positions that are left (till the end of the track) for a given pointer (id) - iterates over key-value pairs (iteritems) """ return self._pointerIterator(pid, lambda x: x, till=till) def is_empty(self): for __, ___ in self._container.iteritems(): return False return True def movePointer(self, pid, pos): """ Moves a given pointer (id) to a given timestamp """ if pid not in self._pointers: raise KeyError("Pointer '%s' doesn't seem to exist!" % pid) # check that the tree has something if self.is_empty(): raise EmptyTrackException() self._pointers[pid] = pos def __len__(self): """ Returns the number of timestamp entries """ return len(self._container) def __delitem__(self, item): """ Deletes a given timestamp entry (or range) """ self._container.__delitem__(item) def __iter__(self): """ Iterates over the whole structure, element by elements (goes inside containers) """ return self.iterate() def __contains__(self, ts): return timestamp(ts) in self._container
class fsIndex(object): def __init__(self): self._data = OOBTree() def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError, key del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class CategoryEventStartDateIndex(Index): def __init__(self): self._container = OOBTree() # add home category by default self.add_category("0") def __getitem__(self, key): return self._container[key] def __setitem__(self, key, value): self._container[key] = value def getCategory(self, categId, create=False): if categId not in self._container: if create: self.add_category(categId) else: raise KeyError(categId) return self._container[categId] def add_category(self, categId): self._container[categId] = IOIndex(IIndexableByStartDateTime) def index_obj(self, obj): try: category = self.getCategory(obj.getOwner().getId()) except KeyError: # some legacy events are in categories that don't exist anymore... return category.index_obj(obj) def unindex_obj(self, obj): try: category = self.getCategory(obj.getOwner().getId()) except KeyError: # some legacy events are in categories that don't exist anymore... return try: category.unindex_obj(obj) except ElementNotFoundException: # some legacy events are not in this index... pass def remove_category(self, categId): del self._container[categId] def _initializeSubIndex(self, cset): tsIndex = IOIndex(IIndexableByStartDateTime) for conf in cset: tsIndex.index_obj(conf) return tsIndex def initialize(self, dbi=None): from MaKaC.conference import CategoryManager for cid, categ in CategoryManager()._getIdx().iteritems(): self[cid] = self._initializeSubIndex(categ.conferences) if dbi: dbi.commit() def _check(self, dbi=None): from MaKaC.conference import CategoryManager, ConferenceHolder confIdx = ConferenceHolder()._getIdx() categIdx = CategoryManager()._getIdx() i = 0 for cid, index in self._container.iteritems(): # simple data structure check for problem in index._check(): yield problem # consistency with CategoryManager if cid not in categIdx: yield "Category '%s' not in CategoryManager" % cid # consistency with ConferenceHolder for ts, conf in index.iteritems(): if conf.getId() not in confIdx: yield "[%s] Conference '%s'(%s) not in ConferenceHolder" % (cid, conf.getId(), ts) if dbi and i % 100 == 99: dbi.abort() i += 1
class SingleValueIndex(Index): """An `index <Index>` where each key may only point to a single value.""" accepts_multiple_values = False @overrides(Index.__init__) def __init__(self, pairs = None): self.__items = OOBTree() self.__descending_items = OOBTree() Index.__init__(self, pairs) @overrides(Index.add) def add(self, key, value): if key is None: raise ValueError("Can't use None as a key for a SingleValueIndex") self.__items[key] = value self.__descending_items[Descending(key)] = value @overrides(Index.remove) def remove(self, key, value = undefined): if value is not undefined \ and self.get(key, undefined) != value: return if value is undefined: try: del self.__items[key] del self.__descending_items[key] except KeyError: pass @overrides(Index.items) def items( self, min = undefined, max = undefined, exclude_min = False, exclude_max = False, descending = False ): min = self.__boundary(min, descending) max = self.__boundary(max, descending) if descending: min, max = max, min exclude_min, exclude_max = exclude_max, exclude_min for desc_key, value in self.__descending_items.iteritems( min = min, max = max, excludemin = exclude_min, excludemax = exclude_max ): yield (desc_key.value, value) else: for pair in self.__items.iteritems( min = min, max = max, excludemin = exclude_min, excludemax = exclude_max ): yield pair def __boundary(self, boundary, descending): if boundary is undefined: return None return Descending(boundary) if descending else boundary @overrides(Index.min_key) def min_key(self, exclude_none = False): if exclude_none: for key in list(self.keys()): if key is not None: return key else: return self.__items.minKey() @overrides(Index.max_key) def max_key(self): return self.__descending_items.minKey().value @overrides(Index.__len__) def __len__(self): return len(self.__items) @overrides(Index.__bool__) def __bool__(self): return bool(self.__items) @overrides(Index.__contains__) def __contains__(self, key): return key in self.__items def __getitem__(self, key): """Get the value for the specified key. :param key: The key to retrieve the value for. :return: The value for the specified key. :raise KeyError: Raised if the indicated key isn't present in the index. """ if isinstance(key, slice): raise ValueError( "Slicing an index is not supported; use keys()/values() " "instead") else: return self.__items[key] def get(self, key, default = None): """Get the value for the specified key, returning `default` if the key is undefined. :param key: The key to retrieve the value for. :param default: The value that should be returned if the key is not defined by the index. :return: The value for the specified key. """ return self.__items.get(key, default)
class HBTreeFolder2Base (Persistent): """Base for BTree-based folders. BUG: Due to wrong design, we can't store 2 objects <A> and <A>-<B> where <A> does not contain '-'. We detect conflicts at the root level using 'type(ob) is OOBTree' """ security = ClassSecurityInfo() manage_options=( ({'label':'Contents', 'action':'manage_main',}, ) + Folder.manage_options[1:] ) security.declareProtected(view_management_screens, 'manage_main') manage_main = DTMLFile('contents', globals()) _htree = None # OOBTree: { id -> object } _count = None # A BTrees.Length _v_nextid = 0 # The integer component of the next generated ID title = '' def __init__(self, id=None): if id is not None: self.id = id self._initBTrees() def _initBTrees(self): self._htree = OOBTree() self._count = Length() def _populateFromFolder(self, source): """Fill this folder with the contents of another folder. """ for name, value in source.objectItems(): self._setOb(name, aq_base(value)) security.declareProtected(view_management_screens, 'manage_fixCount') def manage_fixCount(self, dry_run=0): """Calls self._fixCount() and reports the result as text. """ old, new = self._fixCount(dry_run) path = '/'.join(self.getPhysicalPath()) if old == new: return "No count mismatch detected in HBTreeFolder2 at %s." % path else: return ("Fixed count mismatch in HBTreeFolder2 at %s. " "Count was %d; corrected to %d" % (path, old, new)) def _fixCount(self, dry_run=0): """Checks if the value of self._count disagrees with the content of the htree. If so, corrects self._count. Returns the old and new count values. If old==new, no correction was performed. """ old = self._count() new = sum(1 for x in self._htree_iteritems()) if old != new and not dry_run: self._count.set(new) return old, new def hashId(self, id): return id.split(H_SEPARATOR) def _htree_get(self, id): id_list = self.hashId(id) if len(id_list) == 1: ob = self._htree[id] if type(ob) is OOBTree: raise KeyError else: ob = self._htree[id_list.pop(0)] if type(ob) is not OOBTree: raise KeyError id_list[-1] = id for sub_id in id_list: ob = ob[sub_id] return ob def _getOb(self, id, default=_marker): """Return the named object from the folder """ try: return self._htree_get(id).__of__(self) except KeyError: if default is _marker: raise KeyError(id) return default def __getitem__(self, id): try: return self._htree_get(id).__of__(self) except KeyError: raise KeyError(id) def _setOb(self, id, object): """Store the named object in the folder. """ if type(object) is OOBTree: raise ValueError('HBTreeFolder2 can not store OOBTree objects') htree = self._htree for sub_id in self.hashId(id)[:-1]: try: htree = htree[sub_id] except KeyError: htree[sub_id] = htree = OOBTree() continue if type(htree) is not OOBTree: assert self._htree[sub_id] is htree, (htree, id) raise KeyError('There is already an item whose id is %r' % sub_id) if htree.has_key(id): raise KeyError('There is already an item named %r.' % id) htree[id] = object self._count.change(1) def _delOb(self, id): """Remove the named object from the folder. """ htree = self._htree h = [] for sub_id in self.hashId(id)[:-1]: h.append((htree, sub_id)) htree = htree.get(sub_id) if type(htree) is not OOBTree: raise KeyError(id) if type(htree[id]) is OOBTree: raise KeyError(id) del htree[id] self._count.change(-1) while h and not htree: htree, sub_id = h.pop() del htree[sub_id] security.declareProtected(view_management_screens, 'getBatchObjectListing') def getBatchObjectListing(self, REQUEST=None): """Return a structure for a page template to show the list of objects. """ if REQUEST is None: REQUEST = {} pref_rows = int(REQUEST.get('dtpref_rows', 20)) b_start = int(REQUEST.get('b_start', 1)) b_count = int(REQUEST.get('b_count', 1000)) b_end = b_start + b_count - 1 url = self.absolute_url() + '/manage_main' count = self.objectCount() if b_end < count: next_url = url + '?b_start=%d' % (b_start + b_count) else: b_end = count next_url = '' if b_start > 1: prev_url = url + '?b_start=%d' % max(b_start - b_count, 1) else: prev_url = '' formatted = [listtext0 % pref_rows] for optID in islice(self.objectIds(), b_start - 1, b_end): optID = escape(optID) formatted.append(listtext1 % (escape(optID, quote=1), optID)) formatted.append(listtext2) return {'b_start': b_start, 'b_end': b_end, 'prev_batch_url': prev_url, 'next_batch_url': next_url, 'formatted_list': ''.join(formatted)} security.declareProtected(view_management_screens, 'manage_object_workspace') def manage_object_workspace(self, ids=(), REQUEST=None): '''Redirects to the workspace of the first object in the list.''' if ids and REQUEST is not None: REQUEST.RESPONSE.redirect( '%s/%s/manage_workspace' % ( self.absolute_url(), quote(ids[0]))) else: return self.manage_main(self, REQUEST) security.declareProtected(access_contents_information, 'tpValues') def tpValues(self): """Ensures the items don't show up in the left pane. """ return () security.declareProtected(access_contents_information, 'objectCount') def objectCount(self): """Returns the number of items in the folder.""" return self._count() security.declareProtected(access_contents_information, 'has_key') def has_key(self, id): """Indicates whether the folder has an item by ID. """ try: self._htree_get(id) except KeyError: return 0 return 1 # Work around for the performance regression introduced in Zope 2.12.23. # Otherwise, we use superclass' __contains__ implementation, which uses # objectIds, which is inefficient in HBTreeFolder2 to lookup a single key. __contains__ = has_key def _htree_iteritems(self, min=None): # BUG: Due to bad design of HBTreeFolder2, buckets other than the root # one must not contain both buckets & leafs. Otherwise, this method # fails. h = self._htree recurse_stack = [] try: for sub_id in self.hashId(min) if min else ('',): if recurse_stack: i.next() if type(h) is not OOBTree: break id += H_SEPARATOR + sub_id if type(h.itervalues().next()) is not OOBTree: sub_id = id else: id = sub_id i = h.iteritems(sub_id) recurse_stack.append(i) h = h[sub_id] except (KeyError, StopIteration): pass while recurse_stack: i = recurse_stack.pop() try: while 1: id, h = i.next() if type(h) is OOBTree: recurse_stack.append(i) i = h.iteritems() else: yield id, h except StopIteration: pass security.declareProtected(access_contents_information, 'getTreeIdList') def getTreeIdList(self, htree=None): """ Return list of all tree ids """ r = [] s = [(None, self._htree.iteritems())] while s: base_id, items = s.pop() if base_id: for k, v in items: if type(v) is not OOBTree: r.append(base_id) # As an optimization, and because _htree_iteritems does not # support mixed buckets except at the root, we consider that # this one only contains leafs. break s.append((base_id + H_SEPARATOR + k, v.iteritems())) else: for k, v in items: if type(v) is not OOBTree: r.append(base_id) for k, v in items: if type(v) is OOBTree: s.append((k, v.iteritems())) break s.append((k, v.iteritems())) r.sort() return r security.declareProtected(access_contents_information, 'objectValues') def objectValues(self, base_id=_marker): return HBTreeObjectValues(self, base_id) security.declareProtected(access_contents_information, 'objectIds') def objectIds(self, base_id=_marker): return HBTreeObjectIds(self, base_id) security.declareProtected(access_contents_information, 'objectItems') def objectItems(self, base_id=_marker): # Returns a list of (id, subobject) tuples of the current object. return HBTreeObjectItems(self, base_id) # superValues() looks for the _objects attribute, but the implementation # would be inefficient, so superValues() support is disabled. _objects = () security.declareProtected(access_contents_information, 'objectIds_d') def objectIds_d(self, t=None): return dict.fromkeys(self.objectIds(t), 1) def _checkId(self, id, allow_dup=0): if not allow_dup and self.has_key(id): raise BadRequestException, ('The id "%s" is invalid--' 'it is already in use.' % id) def _setObject(self, id, object, roles=None, user=None, set_owner=1): v=self._checkId(id) if v is not None: id=v # If an object by the given id already exists, remove it. if self.has_key(id): self._delObject(id) self._setOb(id, object) object = self._getOb(id) if set_owner: object.manage_fixupOwnershipAfterAdd() # Try to give user the local role "Owner", but only if # no local roles have been set on the object yet. if hasattr(object, '__ac_local_roles__'): if object.__ac_local_roles__ is None: user=getSecurityManager().getUser() if user is not None: userid=user.getId() if userid is not None: object.manage_setLocalRoles(userid, ['Owner']) object.manage_afterAdd(object, self) return id def _delObject(self, id, dp=1): object = self._getOb(id) try: object.manage_beforeDelete(object, self) except BeforeDeleteException, ob: raise except ConflictError: raise
class fsIndex(object): def __init__(self, data=None): self._data = OOBTree() if data: self.update(data) def __getstate__(self): return dict(state_version=1, _data=[(k, v.toString()) for (k, v) in self._data.iteritems()]) def __setstate__(self, state): version = state.pop('state_version', 0) getattr(self, '_setstate_%s' % version)(state) def _setstate_0(self, state): self.__dict__.clear() self.__dict__.update(state) def _setstate_1(self, state): self._data = OOBTree([(k, fsBucket().fromString(v)) for (k, v) in state['_data']]) def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def save(self, pos, fname): with open(fname, 'wb') as f: pickler = cPickle.Pickler(f, 1) pickler.fast = True pickler.dump(pos) for k, v in self._data.iteritems(): pickler.dump((k, v.toString())) pickler.dump(None) @classmethod def load(class_, fname): with open(fname, 'rb') as f: unpickler = cPickle.Unpickler(f) pos = unpickler.load() if not isinstance(pos, (int, long)): return pos # Old format index = class_() data = index._data while 1: v = unpickler.load() if not v: break k, v = v data[k] = fsBucket().fromString(v) return dict(pos=pos, index=index) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError, key del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class CategoryIndex(Persistent): def __init__(self): self._idxCategItem = OOBTree() def dump(self): return list(self._idxCategItem.items()) def _indexConfById(self, categid, confid): # only the more restrictive setup is taken into account categid = str(categid) if self._idxCategItem.has_key(categid): res = self._idxCategItem[categid] else: res = [] res.append(confid) self._idxCategItem[categid] = res def unindexConf(self, conf): confid = str(conf.getId()) self.unindexConfById(confid) def unindexConfById(self, confid): for categid in self._idxCategItem.keys(): if confid in self._idxCategItem[categid]: res = self._idxCategItem[categid] res.remove(confid) self._idxCategItem[categid] = res def reindexCateg(self, categ): for subcat in categ.getSubCategoryList(): self.reindexCateg(subcat) for conf in categ.getConferenceList(): self.reindexConf(conf) def reindexConf(self, conf): self.unindexConf(conf) self.indexConf(conf) def indexConf(self, conf): categs = conf.getOwnerPath() level = 0 for categ in conf.getOwnerPath(): if conf.getFullVisibility() > level: self._indexConfById(categ.getId(), conf.getId()) level += 1 if conf.getFullVisibility() > level: self._indexConfById("0", conf.getId()) def getItems(self, categid): categid = str(categid) if self._idxCategItem.has_key(categid): return self._idxCategItem[categid] else: return [] def _check(self, dbi=None): """ Performs some sanity checks """ i = 0 from MaKaC.conference import ConferenceHolder confIdx = ConferenceHolder()._getIdx() for cid, confs in self._idxCategItem.iteritems(): for confId in confs: # it has to be in the conference holder if confId not in confIdx: yield "[%s] '%s' not in ConferenceHolder" % (cid, confId) # the category has to be one of the owners elif cid not in (map( lambda x: x.id, ConferenceHolder().getById(confId).getOwnerPath()) + ['0']): yield "[%s] Conference '%s' is not owned" % (cid, confId) if dbi and i % 100 == 99: dbi.sync() i += 1
class CategoryEventStartDateIndex(Index): def __init__(self): self._container = OOBTree() # add home category by default self.add_category('0') def __getitem__(self, key): return self._container[key] def __setitem__(self, key, value): self._container[key] = value def getCategory(self, categId, create=False): if categId not in self._container: if create: self.add_category(categId) else: raise KeyError(categId) return self._container[categId] def add_category(self, categId): self._container[categId] = IOIndex(IIndexableByStartDateTime) def index_obj(self, obj): self.getCategory(obj.getOwner().getId()).index_obj(obj) def unindex_obj(self, obj): self.getCategory(obj.getOwner().getId()).unindex_obj(obj) def remove_category(self, categId): del self._container[categId] def _initializeSubIndex(self, cset): tsIndex = IOIndex(IIndexableByStartDateTime) for conf in cset: tsIndex.index_obj(conf) return tsIndex def initialize(self, dbi=None): from MaKaC.conference import CategoryManager for cid, categ in CategoryManager()._getIdx().iteritems(): self[cid] = self._initializeSubIndex(categ.conferences) if dbi: dbi.commit() def _check(self, dbi=None): from MaKaC.conference import CategoryManager, ConferenceHolder confIdx = ConferenceHolder()._getIdx() categIdx = CategoryManager()._getIdx() i = 0 for cid, index in self._container.iteritems(): # simple data structure check for problem in index._check(): yield problem # consistency with CategoryManager if cid not in categIdx: yield "Category '%s' not in CategoryManager" % cid # consistency with ConferenceHolder for ts, conf in index.iteritems(): if conf.getId() not in confIdx: yield "[%s] Conference '%s'(%s) not in ConferenceHolder" \ % (cid, conf.getId(), ts) if dbi and i % 100 == 99: dbi.abort() i += 1
class ObjectRegistry(Persistent): """See the objectregistry module docstring, this is deprecated and should not be used by new code. """ def __init__(self): self.__non_unique_key_registry = BTree() self.__obr_registry = IOBTree() self.__obr_largest_key_ever = -1 self.__non_unique_keys_for_obj = IOBTree() def get_keys_for_object(self, obj): if (not hasattr(obj, '_obr_unique_key') or not self.__non_unique_keys_for_obj.has_key(obj._obr_unique_key) ): return () else: return self.__non_unique_keys_for_obj[obj._obr_unique_key] def __register_object(self, obj): """Should not be called directly, call __get_object_registered """ # important because code below resets things like the non-unique # key set # assert( not hasattr(obj, '_obr_unique_key') ) obr_unique_key = get_and_establish_attribute( obj, '_obr_unique_key', lambda: (0 if len(self.__obr_registry) == 0 else max(self.__obr_registry.maxKey() + 1, self.__obr_largest_key_ever + 1 ) ) ) self.__obr_registry[obr_unique_key] = obj self.__obr_largest_key_ever = max(obr_unique_key, self.__obr_largest_key_ever ) self.__non_unique_keys_for_obj[obr_unique_key] = Set() return obr_unique_key def __get_object_registered(self, obj): # the assumption here about order of operations on ternary expressions # is that self.__register_object(obj) won't be called unless # the ternary condition fails result = (obj._obr_unique_key if hasattr(obj, '_obr_unique_key') else self.__register_object(obj) ) assert( self.__obr_registry.has_key(result) ) assert( self.__non_unique_keys_for_obj.has_key(result) ) return result def __deregister_object(self, obj): obj_key = obj._obr_unique_key if self.__non_unique_keys_for_obj.has_key(obj_key): assert( len(self.__non_unique_keys_for_obj[obj_key]) == 0 ) del self.__non_unique_keys_for_obj[obj_key] del self.__obr_registry[obj_key] delattr(obj, '_obr_unique_key') def register_interest_by_non_unique_key( self, key, obj, owner): obj_key, owner_key = (self.__get_object_registered(obj), self.__get_object_registered(owner) ) set_for_key = self.__non_unique_key_registry.setdefault(key, Set()) set_for_key.insert( (obj_key, owner_key) ) for obr_unique_key in obj_key, owner_key: self.__non_unique_keys_for_obj[obr_unique_key].insert( key ) def registered_obj_and_owner_per_unique_key(self, key): return ( (self.__obr_registry[obj_key], self.__obr_registry[owner_key]) for obj_key, owner_key in self.__non_unique_key_registry.get(key,() ) ) # end generator expression def registered_obj_and_owner_per_unique_key_range(self, key_min, key_max): return ( (key, (self.__obr_registry[obj_key], self.__obr_registry[owner_key]) ) # end tuple for (key, da_set) in self.__non_unique_key_registry.iteritems(key_min, key_max) for (obj_key, owner_key) in da_set ) # end generator expression def deregister_interest_by_non_unique_key(self, key, obj, owner): obj_key = obj._obr_unique_key owner_key = owner._obr_unique_key self.__non_unique_key_registry.get(key).remove( (obj_key, owner_key) ) # this is sort of rediculous counting up all the references for # obj and owner and making a deletion decision on that # when we could of just kept count them all along # # And remember, this isn't the count for all references for obj and # owner, just all references related to key, very important at the # end of this function obj_count = 0 owner_count = 0 for obj_search, owner_search in \ self.registered_obj_and_owner_per_unique_key(key): obj_search_key = obj_search._obr_unique_key owner_search_key = owner_search._obr_unique_key if obj_search_key == obj_key: obj_count+=1 if owner_search_key == owner_key: owner_count+=1 for count, da_obj_key, da_obj in \ ( (obj_count, obj_key, obj), (owner_count, owner_key, owner) ): if count == 0: self.__non_unique_keys_for_obj[da_obj_key].remove(key) # very important, the association of da_obj with # the key had to have been the only key for us to # completely deregister it if len(self.__non_unique_keys_for_obj[da_obj_key]) == 0: self.__deregister_object(da_obj) def final_deregister_interest_for_obj_non_unique_key(self, key, obj, owner): obj_key = obj._obr_unique_key owner_key = owner._obr_unique_key # this is a rediculous linear implementation, the structure clearly # needs to be altered to nested sets instead da_set = self.__non_unique_key_registry.get(key) # filter by entries with the same object da_list = [ (search_obj_key, search_owner_key) for search_obj_key, search_owner_key in da_set if search_obj_key == obj_key ] assert( len(da_list) >= 1 ) if len(da_list) > 1: return False else: assert(len(da_list) == 1 ) assert( da_list[0] == (obj_key, owner_key) ) self.deregister_interest_by_non_unique_key( key, obj, owner) return True
class fsIndex(object): def __init__(self, data=None): self._data = OOBTree() if data: self.update(data) def __getstate__(self): return dict(state_version=1, _data=[(k, v.toString()) for (k, v) in self._data.iteritems()]) def __setstate__(self, state): version = state.pop("state_version", 0) getattr(self, "_setstate_%s" % version)(state) def _setstate_0(self, state): self.__dict__.clear() self.__dict__.update(state) def _setstate_1(self, state): self._data = OOBTree([(k, fsBucket().fromString(v)) for (k, v) in state["_data"]]) def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def save(self, pos, fname): with open(fname, "wb") as f: pickler = cPickle.Pickler(f, 1) pickler.fast = True pickler.dump(pos) for k, v in self._data.iteritems(): pickler.dump((k, v.toString())) pickler.dump(None) @classmethod def load(class_, fname): with open(fname, "rb") as f: unpickler = cPickle.Unpickler(f) pos = unpickler.load() if not isinstance(pos, (int, long)): return pos # Old format index = class_() data = index._data while 1: v = unpickler.load() if not v: break k, v = v data[k] = fsBucket().fromString(v) return dict(pos=pos, index=index) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError, key del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class fsIndex(object): def __init__(self): self._data = OOBTree() def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix