Пример #1
0
def rebuild_all_caches():
    req = request.RequestDummy()
    wiki_list = wikiutil.getWikiList(req)
    for wiki_name in wiki_list:
       req.switch_wiki(wiki_name)
       plist = wikiutil.getPageList(req)
       maintenance.buildCaches(wiki_name, plist, doprint=True)
    req.db_disconnect()
Пример #2
0
def rename_old_user_pages(request):
    from Sycamore import user
    for wikiname in wikiutil.getWikiList(request):
        request.switch_wiki(wikiname)
        user_pages = _get_user_pages(request) 
        num_user_pages = len(user_pages)
        n = 0
        for page in user_pages:
            new_user_pagename = config.user_page_prefix + page.proper_name()
            new_user_page = Page(new_user_pagename, request)
            if new_user_page.exists():
                # something crazzzzy is going on
                continue 
            old_pagename_propercased = page.proper_name()
            d = {'new_name': new_user_pagename.lower(), 'new_propercased_name': new_user_pagename,
                'old_pagename': page.page_name, 'wiki_id': request.config.wiki_id,
                'wiki_name': request.config.wiki_name, 'latest_ed_time': page.mtime(),
                'old_propercased_name': page.proper_name()}

            print page.page_name, '->', new_user_pagename
            _user_page_move(request, copy(d))
            _user_page_redirect(request, d)
            n += 1
Пример #3
0
    def process(self):
        # processes the search
        wiki_name = self.request.config.wiki_name
        if not self.wiki_global:
            wikis = [wiki_name]
        else:
            wikis = wikiutil.getWikiList(self.request)

        for wiki_name in wikis: 
            pagelist = wikiutil.getPageList(self.request)
            matches = []
            for pagename in pagelist:
                page = Page(pagename, self.request, wiki_name=wiki_name)
                text = page.get_raw_body()
                text_matches = find_num_matches(self.regexp, text)
                if text_matches:
                    percentage = (text_matches*1.0/len(text.split()))*100
                    self.text_results.append(searchResult(page.page_name, text,
                                                          percentage,
                                                          page.page_name,
                                                          wiki_name)) 
              
                title = page.page_name
                title_matches = find_num_matches(self.regexp, title)
                if title_matches:
                      percentage = (title_matches*1.0/len(title.split()))*100
                      self.title_results.append(searchResult(title, title,
                                                             percentage,
                                                             page.page_name,
                                                           wiki_name))
            # sort the title and text results by relevancy
            self.title_results.sort(lambda x,y: cmp(y.percentage,
                                                    x.percentage))
            self.text_results.sort(lambda x,y: cmp(y.percentage,
                                                   x.percentage))

            # normalize the percentages.
            # still gives shit, but what can you expect from regexp?
            # install xapian!
            if self.title_results:
                i = 0
                max_title_percentage = self.title_results[0].percentage
                self.title_results = self.title_results[
                    self.t_start_loc:self.t_start_loc+self.num_results+1]
                for title in self.title_results:
                    if i > self.num_results:
                        break
                    title.percentage = title.percentage/max_title_percentage
                    title.percentage = title.percentage*100
                    i += 1

            if self.text_results: 
                i = 0 
                max_text_percentage = self.text_results[0].percentage
                self.text_results = self.text_results[
                    self.p_start_loc:self.p_start_loc+self.num_results+1]
                for text in self.text_results:
                    if i > self.num_results:
                        break
                    text.percentage = text.percentage/max_text_percentage
                    text.percentage = text.percentage*100
                    i += 1
Пример #4
0
__directory__ = os.path.dirname(__file__)
sys.path.extend([os.path.abspath(os.path.join(__directory__, '..', '..', '..'))])

from Sycamore import wikiutil, config, request, caching, wikidb, maintenance, buildDB, wikiacl
from Sycamore.Page import Page
from Sycamore.buildDB import FlatPage

req = request.RequestDummy()

userlist = []
req.cursor.execute("SELECT id, name from users")
result = req.cursor.fetchall()
for entry in result:
  userlist.append(entry) 

for wikiname in wikiutil.getWikiList(req):
   req.switch_wiki(wikiname)
   print wikiname
   for thisuser_id, thisuser_name in userlist:
      print "  ", thisuser_name.encode(config.charset)
      req.cursor.execute("SELECT usersEdits.editTime from (SELECT allPages.editTime from allPages where userEdited=%(thisuser)s and wiki_id=%(thiswiki)s) as usersEdits order by usersEdits.editTime asc limit 1;", {'thisuser': thisuser_id, 'thiswiki': req.config.wiki_id})
      result = req.cursor.fetchone()
      if result:
         edit_time = result[0]
      else:
         edit_time = None
      
      if edit_time:
          req.cursor.execute("SELECT user_name from userWikiInfo where user_name=%(thisuser_name)s and wiki_id=%(thiswiki)s", {'thisuser_name':thisuser_name, 'thiswiki': req.config.wiki_id})
          has_user_info_on_wiki = req.cursor.fetchone()
          if has_user_info_on_wiki:
Пример #5
0
all_pages.update(init_basic_pages('global'))

if __name__ == '__main__':
    from Sycamore import request
    # building for first time..don't try to load config from db
    req = request.RequestDummy(process_config=False)  
    cursor = req.cursor
    init_db(cursor)
    create_tables(cursor)
    create_views(cursor)
    create_config(req)
    create_other_stuff(req)
    print "inserting basic pages..."
    insert_pages(req)
    build_search_index()
    setup_admin(req)
    
    req.db_disconnect()  # commit before building caches
    
    req = request.RequestDummy(process_config=True)
    wiki_list = wikiutil.getWikiList(req)
    for wiki_name in wiki_list:
        req.config = config.Config(wiki_name, req, process_config=True)
        plist = wikiutil.getPageList(req)
        maintenance.buildCaches(wiki_name, plist, doprint=True)
    req.db_disconnect()

    print "-------------"
    print ("All done!  Now, start up the sycamore server and "
          "create the admin account!")