def add_many(self, activities, batch_interface=None, trim=True, *args, **kwargs): ''' Add many activities :param activities: a list of activities :param batch_interface: the batch interface ''' validate_list_of_strict(activities, (self.activity_class, FakeActivity)) add_count = self.timeline_storage.add_many( self.key, activities, batch_interface=batch_interface, *args, **kwargs) # trim the feed sometimes if trim and random.random() <= self.trim_chance: self.trim() self.on_update_feed(new=activities, deleted=[]) return add_count
def remove_many_aggregated(self, aggregated, *args, **kwargs): """ Removes the list of aggregated activities :param aggregated: the list of aggregated activities to remove """ validate_list_of_strict(aggregated, (self.aggregated_activity_class, FakeAggregatedActivity)) self.timeline_storage.remove_many(self.key, aggregated, *args, **kwargs)
def add_many_aggregated(self, aggregated, *args, **kwargs): ''' Adds the list of aggregated activities :param aggregated: the list of aggregated activities to add ''' validate_list_of_strict( aggregated, (self.aggregated_activity_class, FakeAggregatedActivity)) self.timeline_storage.add_many(self.key, aggregated, *args, **kwargs)
def add_many_aggregated(self, aggregated, *args, **kwargs): ''' Adds the list of aggregated activities :param aggregated: the list of aggregated activities to add ''' validate_list_of_strict( aggregated, (self.aggregated_activity_class, FakeAggregatedActivity)) self.timeline_storage.add_many(self.key, aggregated, *args, **kwargs)
def add_many(self, activities, batch_interface=None, trim=True, *args, **kwargs): ''' Add many activities :param activities: a list of activities :param batch_interface: the batch interface ''' validate_list_of_strict( activities, (self.activity_class, FakeActivity)) add_count = self.timeline_storage.add_many( self.key, activities, batch_interface=batch_interface, *args, **kwargs) # trim the feed sometimes if trim and random.random() <= self.trim_chance: self.trim() self.on_update_feed(new=activities, deleted=[]) return add_count
def add_many(self, activities, trim=True, current_activities=None, *args, **kwargs): ''' Adds many activities to the feed Unfortunately we can't support the batch interface. The writes depend on the reads. Also subsequent writes will depend on these writes. So no batching is possible at all. :param activities: the list of activities ''' validate_list_of_strict(activities, (self.activity_class, FakeActivity)) # start by getting the aggregator aggregator = self.get_aggregator() t = timer() # get the current aggregated activities if current_activities is None: current_activities = self[:self.merge_max_length] msg_format = 'reading %s items took %s' logger.debug(msg_format, self.merge_max_length, t.next()) # merge the current activities with the new ones new, changed, deleted = aggregator.merge(current_activities, activities) logger.debug('merge took %s', t.next()) # new ones we insert, changed we do a delete and insert new_aggregated = self._update_from_diff(new, changed, deleted) new_aggregated = aggregator.rank(new_aggregated) # trim every now and then if trim and random.random() <= self.trim_chance: self.timeline_storage.trim(self.key, self.max_length) return new_aggregated
def add_many(self, activities, trim=True, current_activities=None, *args, **kwargs): ''' Adds many activities to the feed Unfortunately we can't support the batch interface. The writes depend on the reads. Also subsequent writes will depend on these writes. So no batching is possible at all. :param activities: the list of activities ''' validate_list_of_strict( activities, (self.activity_class, FakeActivity)) # start by getting the aggregator aggregator = self.get_aggregator() t = timer() # get the current aggregated activities if current_activities is None: current_activities = self[:self.merge_max_length] msg_format = 'reading %s items took %s' logger.debug(msg_format, self.merge_max_length, t.next()) # merge the current activities with the new ones new, changed, deleted = aggregator.merge( current_activities, activities) logger.debug('merge took %s', t.next()) # new ones we insert, changed we do a delete and insert new_aggregated = self._update_from_diff(new, changed, deleted) new_aggregated = aggregator.rank(new_aggregated) # trim every now and then if trim and random.random() <= self.trim_chance: self.timeline_storage.trim(self.key, self.max_length) return new_aggregated