Example #1
0
def delete_all_tags(i_am_sure=False):
    if not i_am_sure:
        return

    # globally
    del get_tags_database().tags
    get_tags_database().tags = OOBTree.OOBTree()

    # global reverse mapping
    del get_tagged_item_database().root
    get_tagged_item_database().root = OOBTree.OOBTree()

    # for each group, ensure tags are empty
    for group_id, group in get_group_database().root.iteritems():
        del group.tags
        group.tags = OOBTree.OOBTree()
    #

    for user_id, user in get_user_database().root.iteritems():
        if hasattr(user, "tags"):
            if len(user.tags) > 0:
                del user.tags
                user.tags = OOBTree.OOBTree()
    #
    get_transaction().commit()
Example #2
0
    def _generate_document_for_wiki_page(self, wikipage, existing):
        latest_version = wikipage.versions[-1]
        group = wikipage.wiki.group
        
        fields = []    

        # fields are in the form of [name, value, isStored, isIndexed, isTokenized]
        fields.append(['title', latest_version.title, False, True, True])
        fields.append(['karma', str(wikipage.get_karma_score()).zfill(6), False, True, False])
        fields.append(['u_name', latest_version.author.display_name(), False, True, True])    
        fields.append(['date', str(get_unix_timestamp(latest_version.date)), False, True, False])    # index it so that we can sort by it
        fields.append(['oid', self._encode_oid(wikipage._p_oid), True, True, False])
        fields.append(['type', 'Wikipage', True, True, True])                       
        fields.append(['g_name', group.display_name(), False, True, True])

        # create the main text to index (title + __raw + comments + commenting authors names + name + last editor name)
        tidb = get_tagged_item_database()
        tags = " ".join(tidb.get_tags(wikipage._p_oid))
        comments = wikipage.get_comments()
        text = "%s %s %s %s %s %s" % (latest_version.title, latest_version.get_raw(), ''.join(["%s %s " % (x.get_summary(), x.author.display_name()) for x in comments]), wikipage.name, latest_version.author.display_name(), tags)
        fields.append(['text', text, False, True, True])    

        # create the preview text    
        preview = self._generate_preview_text(latest_version.get_raw())
        fields.append(['preview', preview, True, False, False])

        # send the document for indexing        
        self._queue_document(fields, existing)
def index_all_tagged ():
    tidb = get_tagged_item_database()

    i = 1
    for oid in tidb:
        item = get_oid(oid)
        index_tagged_item(item)
        _commit(i)
        i += 1
    _commit(0)
Example #4
0
    def tag_item(self, user_id, item_oid, tags, comment):
        if not tags:
            return

        # update this tag container to hold these tags for this item
        HasTags.tag_item(self, user_id, item_oid, tags, comment)

        # update the reverse mapping with the new tag attributes
        # for any tag, get the new attributes object
        new_attributes = self.tags[tags[0]][item_oid][user_id]
        tidb = get_tagged_item_database()
        tidb.add_item(item_oid, user_id, new_attributes)
Example #5
0
def tag_item (tags, user, item_oid, group, comment = None, is_user=False):
    """ all tags are applied through this function, which keeps
    the various databases consistent with each other."""
    user_id = user.get_user_id()

    tags = qon.tags.standardize_tags(tags)

    # clear out any removed tags
    tags_db = get_tags_database()
    tidb = get_tagged_item_database()
    if is_user:
        user_db = get_user_database()

    # what gets removed? what gets added?
    old_tags = tidb.get_tags(item_oid, user_id)
    tags_to_remove = [tag for tag in old_tags if tag not in tags]
    tags_to_add = [tag for tag in tags if tag not in old_tags]

    if tags_to_remove:
        # remove user from removed tags
        tags_db.remove_tags(tags_to_remove, item_oid, user_id)
    
        if group:
            group.remove_tags(tags_to_remove, item_oid, user_id)
    
        # remove the tag from the user's list too.
        user.remove_tags(tags_to_remove, item_oid)

        if is_user:
            user_db.remove_tags(tags_to_remove, item_oid, user_id)
        #
    #

    if tags_to_add:
        # add to the global database
        tags_db.tag_item(user.get_user_id(), item_oid, tags, comment)
    
        # group gets its tag information
        if group:
            group.tag_item(user_id, item_oid, tags_to_add, comment)
    
        # update the user's tag cloud
        user.tag_item(tags, item_oid)

        if is_user:
            user_db.tag_item(user_id, item_oid, tags_to_add, comment)
        #
    #
    get_transaction().commit()
Example #6
0
    def remove_tags(self, tags, item_oid, user_id):
        if not tags:
            return

        tidb = get_tagged_item_database()

        # what tags does this item,user have before removal?
        item_tags = tidb.get_tags(item_oid, user_id)

        HasTags.remove_tags(self, tags, item_oid, user_id)

        remaining_tags = [tag for tag in item_tags if tag not in tags]
        if remaining_tags:
            atag = remaining_tags[0]  # an arbitrary tag
            if 1:  # atag in self.tags and item_oid in self.tags[atag] and user_id in self.tags[atag][item_oid]:
                new_attributes = self.tags[atag][item_oid][user_id]
                tidb.add_item(item_oid, user_id, new_attributes)
        else:
            # if all attributes are gone, remove this item,user from the tidb
            tidb.remove_item_user(item_oid, user_id)
Example #7
0
    def _generate_document_for_blog_item(self, blogitem, existing, t):
        fields = []

        # don't use watchable_modified_date() now that comments live separately from blog items
        last_edited_date = blogitem.modified
        if not last_edited_date:
            last_edited_date = blogitem.date

        # gather fields that are consistent across all types of blogs
        # fields are in the form of [name, value, isStored, isIndexed, isTokenized]
        fields.append(['title', blogitem.title, False, True, True])
        fields.append(['karma', str(blogitem.get_karma_score()).zfill(6), False, True, False])
        fields.append(['u_name', blogitem.author.display_name(), False, True, True])
        fields.append(['date', str(get_unix_timestamp(last_edited_date)), False, True, False])   # index it so that we can sort by it.  add T to fix Lucene range query bug
        fields.append(['oid', self._encode_oid(blogitem._p_oid), True, True, False])

        # create the main text for indexing
        # (title + summary + author name)
        text = "%s %s %s" % (blogitem.title, blogitem.get_summary(), blogitem.author.display_name())
        fields.append(['text', text, False, True, True])

        # create the preview text    
        preview = self._generate_preview_text(blogitem.get_summary())
        fields.append(['preview', preview, True, False, False])

        # gather fields that differ depending on the type of blog    
        if t is qon.group.Group:
            group = blogitem.blog.ihb
            fields.append(['type', 'Discussion', True, True, True])
            fields.append(['g_name', group.display_name(), False, True, True])
            
        if t is qon.user.User:
            fields.append(['type', 'Usernews', True, True, True])             

        # index on tags
        tidb = get_tagged_item_database()
        tags = " ".join(tidb.get_tags(blogitem._p_oid))
        fields.append(['tags', tags, False, True, False])

        # send the document for indexing        
        self._queue_document(fields, existing)
Example #8
0
    def get_related_tags_n_counts (self, current_tags, limit=100):
        """ consider items tagged with all of the current tags, 
        and return the tags that also tag that small set.

        The counts returned reflect how many items share that additional 
        tag. (Not number of people style popularity.)
        """
        results = []
        tidb = get_tagged_item_database()

        items_intersection = None
        for tag in current_tags:
            # get the items tagged with this tag
            items = self.get_tagged_items(tag)

            if not items_intersection:
                # start with _only_ tags that are applied by nice taggers
                items_intersection = [item for item in items if tidb.get_item_popularity(item, tag) > 0 ]
            else:
                # iterate over the shrinking intersection to save time
                items_intersection = [item for item in items_intersection if item in items]
            #
        #

        # {tag: count} where count is how many times the tag shows up
        # looking through the item's tags
        related_tags = {}
        for item_oid in items_intersection:
            for tag in tidb.get_tags(item_oid):
                if tag not in current_tags:
                    related_tags[tag] = related_tags.get(tag, 0) + 1
        #

        counts_n_tags = [(ct,tag) for tag, ct in related_tags.iteritems()]

        counts_n_tags.sort()
        # now the popular tags are at the end of counts_n_tags
        tags_n_counts = [ (tag, count)for count, tag in counts_n_tags[-limit:] ]
        tags_n_counts.sort()
        # now we have the most popular, alphabetically
        return tags_n_counts
Example #9
0
    def _generate_document_for_user(self, user, existing):
        fields = []    

        # fields are in the form of [name, value, isStored, isIndexed, isTokenized]
        fields.append(['karma', str(user.get_karma_score()).zfill(6), False, True, False])
        fields.append(['u_name', user.display_name(), False, True, True])
        fields.append(['date', str(get_unix_timestamp(user.get_user_data().member_since())), False, True, False])    # index it so that we can sort by it            
        fields.append(['oid', self._encode_oid(user._p_oid), True, True, False])
        fields.append(['type', 'User', True, True, True])                   

        # create the main text to index (bio + email addresses + name)
        tidb = get_tagged_item_database()
        tags = " ".join(tidb.get_tags(user._p_oid))
        text = "%s %s %s %s %s" % (user.bio, user.location, " ".join(user.email_list()), user.display_name(), tags)
        fields.append(['text', text, False, True, True])    

        # create the preview text    
        preview = self._generate_preview_text(user.bio)
        fields.append(['preview', preview, True, False, False])

        # send the document for indexing        
        self._queue_document(fields, existing)
Example #10
0
def publish_stats():
    """Call after update_stats to publish to a wiki page."""
    
    _stats_group = 'community-general'
    _stats_page = 'user_statistics'
    _stats_author = 'admin'
    
    from datetime import datetime
    from qon.ui.blocks.util import format_datetime_utc_ymd
    
    group_db = get_group_database()
    group = group_db.get_group(_stats_group)
    if group:
        wiki = group.get_wiki()
        try:
            page = wiki.pages[_stats_page]
        except KeyError:
            return
        
        # build new text to publish stats
        # date, num_users, num_groups, num_topics, num_comments, num_pages, num_revisions,
        # total_bank, total_user_pos, total_user_neg, total_topic_pos, total_topic_pos,
        # total_comment_pos, total_comment_neg, total_page_pos, total_page_neg
        # jimc: added: total PMs, group PMs, group PM recipients total increases from 15 to 18
        # added total tags, total tagged items for 20 total fields
        stats_fmt = ['%d' for i in range(20)]       # fields
        stats_fmt = ','.join(stats_fmt)             # comma-separated
        
        # indent and date is first field
        stats_fmt = '    %s,' % format_datetime_utc_ymd(datetime.utcnow()) + stats_fmt + '\n'
        
        # fill in stats
        list_db = get_list_database()
        group_stats = list_db.group_stats(force=True, ignore_out_of_date=False)
        #group_stats = list_db.group_stats(ignore_out_of_date=True)

        tidb = get_tagged_item_database()
        tags_db = get_tags_database()
        total_items_tagged = len(tidb)
        total_tags = len(tags_db.tags)

        stats = stats_fmt % (
            group_stats['users'],
            group_stats['groups'],
            group_stats['topics'],
            group_stats['comments'],
            group_stats['pages'],
            group_stats['revisions'],
            list_db.karma_total_bank(),
            list_db.karma_total_user()[0],
            list_db.karma_total_user()[1],
            list_db.karma_total_topic()[0],
            list_db.karma_total_topic()[1],
            list_db.karma_total_comment()[0],
            list_db.karma_total_comment()[1],
            list_db.karma_total_page()[0],
            list_db.karma_total_page()[1],
            # total pms, for groups, and number of group pm recipients
            list_db.total_users_pms(),
            group_stats['total_group_pms'],
            group_stats['total_group_pm_recipients'],
            total_tags,
            total_items_tagged,
            )
        
        # author is admin user
        author = get_user_database().get_user(_stats_author)
        
        # get current revision
        raw = page.versions[-1].get_raw()
        
        # append stats line
        raw += stats
        
        # set new revision - will commit
        qon.api.wiki_edit_page(wiki, page, page.name, author, page.versions[-1].title, raw)