def process(self): import socket, cPickle encoded_terms = wikiutil.quoteFilename( cPickle.dumps(self.needle_as_entered, True)) server_address, server_port = config.remote_search s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((server_address, server_port)) output = s.makefile('w', 0) output.write('F\n') if self.wiki_global: output.write('*\n\n') else: output.write('%s\n\n' % self.request.config.wiki_name) output.write('S\n%s\n%s\n' % (self.p_start_loc, self.t_start_loc)) output.write('%s\n' % encoded_terms) output.write('\n') output.write('E\n\n') # end output.close() input = s.makefile('r', 0) for line in input: results_encoded = line.strip() break title_results, text_results, spelling_suggest = cPickle.loads( wikiutil.unquoteFilename(results_encoded)) s.close() self.title_results = title_results self.text_results = text_results self._spelling_suggestion = spelling_suggest
def init_basic_pages(prefix='common'): """ Initializes basic pages from share/initial_pages directory. """ pages = {} # We do the basic database population here page_list = map(unquoteFilename, filter(lambda x: not x.startswith('.'), os.listdir(os.path.join(share_directory, 'initial_pages', prefix)))) for pagename in page_list: page_loc = os.path.join(share_directory, 'initial_pages', prefix, quoteFilename(pagename)) page_text_file = open(os.path.join(page_loc, "text")) page_text = ''.join(page_text_file.readlines()) page_text_file.close() pages[pagename] = FlatPage(text=page_text) if os.path.exists(os.path.join(page_loc, "files")): file_list = map(unquoteFilename, filter(lambda x: not x.startswith('.'), os.listdir(os.path.join(page_loc, "files")))) for filename in file_list: file = open(os.path.join(page_loc, "files", quoteFilename(filename))) file_content = ''.join(file.readlines()) file.close() pages[pagename].files.append((filename, file_content)) if os.path.exists(os.path.join(page_loc, "acl")): file = open(os.path.join(page_loc, "acl"), "r") text = ''.join(file.readlines()) file.close() pages[pagename].acl = parseACL(text) return pages
def remove_from_remote_index(page): server_address, server_port = config.remote_search s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((server_address, server_port)) output = s.makefile('w', 0) output.write('F\n') output.write('%s\n\n' % page.request.config.wiki_name) output.write('D\n') output.write('%s\n' % wikiutil.quoteFilename(page.page_name)) output.write('\n') output.write('E\n\n') # end output.close() s.close()
def update(self, content, links): links = self._consider_talk_link(links) cached_time = time.time() self.request.cursor.execute("""UPDATE curPages set cachedText=%(cached_content)s, cachedTime=%(cached_time)s where name=%(key)s and wiki_id=%(wiki_id)s""", {'cached_content':wikidb.dbapi.Binary(content), 'cached_time':cached_time, 'key':self.key, 'wiki_id':self.request.config.wiki_id}, isWrite=True) self.request.cursor.execute("""DELETE from links where source_pagename=%(key)s and wiki_id=%(wiki_id)s""", {'key':self.key, 'wiki_id':self.request.config.wiki_id}, isWrite=True) for link in links: self.request.cursor.execute("""INSERT into links (source_pagename, destination_pagename, destination_pagename_propercased, wiki_id) values (%(key)s, %(link)s, %(link_propercased)s, %(wiki_id)s)""", {'key':self.key, 'link':link.lower(), 'link_propercased':link, 'wiki_id':self.request.config.wiki_id}, isWrite=True) page_info = pageInfo(Page(self.key, self.request), get_from_cache=False, cached_content=content, cached_time=cached_time) text = wikidb.binaryToString(content) page_info.cached_text = (text, cached_time) if config.memcache: if self.request.set_cache: self.request.mc.set("page_info:%s" % wikiutil.mc_quote(self.key), page_info) else: self.request.mc.add("page_info:%s" % wikiutil.mc_quote(self.key), page_info) self.request.req_cache['page_info'][(wikiutil.quoteFilename(self.key), self.request.config.wiki_id)] = page_info
def process_search(needles, wiki_name, client, p_start_loc, t_start_loc): global db_location, db_files_moving req = request.RequestDummy() if wiki_name: req.switch_wiki(wiki_name) wiki_global = False else: wiki_global = True i = 0 while db_files_moving: i += 1 time.sleep(1) if i > 30: output = client.makefile('w',0) output.write("\n\nE\n\n") req.db_disconnect() return # We're sent the raw needle so we must prepare it prepared_needles = search.prepare_search_needle(needles) thesearch = search.XapianSearch(prepared_needles, req, db_location=db_location, wiki_global=wiki_global, p_start_loc=p_start_loc, t_start_loc=t_start_loc) thesearch.process() spelling_suggestion = thesearch.spelling_suggestion(needles) results = (thesearch.title_results, thesearch.text_results, spelling_suggestion) thesearch_encoded = wikiutil.quoteFilename(cPickle.dumps(results)) output = client.makefile('w',0) output.write(thesearch_encoded) output.write("\n\nE\n\n") req.db_disconnect() del req