Beispiel #1
0
def execute(macro, needle):
    request = macro.request
    _ = request.getText

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    if needle.isspace():
        err = _(
            'Please use a more selective search term instead of '
            '{{{"%s"}}}',
            wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    if (needle == 'all'):
        results = search.searchPages(request,
                                     'CategoryStack -StackList -StackTemplate',
                                     sort='page_name')
    else:
        lookfor = needle + ' CategoryStack -StackList -StackTemplate'
        results = search.searchPages(request, lookfor, sort='page_name')
    return pageListWithContext(results,
                               macro,
                               request,
                               macro.formatter,
                               paging=False)

    ret = []
    for result in results:
        pass

    return string.join(ret)
Beispiel #2
0
def execute(macro, needle):
    request = macro.request
    _ = request.getText

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    if needle.isspace():
        err = _('Please use a more selective search term instead of '
                '{{{"%s"}}}', wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()
 
    # Search the pages and return the results
    if(needle=='all'):
      results = search.searchPages(request, 'CategoryStack -StackList -StackTemplate', sort='page_name')
    else:
      lookfor= needle + ' CategoryStack -StackList -StackTemplate'
      results = search.searchPages(request, lookfor, sort='page_name')
    return pageListWithContext(results, macro, request, macro.formatter, paging=False)


    ret = []
    for result in results:
      pass

    return string.join(ret)
Beispiel #3
0
def macro_AllTutorialsSorted(macro, needle):
    request = macro.request
    _ = request.getText

    # if no args given, invoke "classic" behavior
    if needle is None:
        return search_box("fullsearch", macro)

    # With empty arguments, simulate title click (backlinks to page)
    elif needle == '':
        needle = '"%s"' % macro.formatter.page.page_name

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    elif needle.isspace():
        err = _('Please use a more selective search term instead of '
                '{{{"%s"}}}', wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages for stacks and return the results
    results = search.searchPages(request, 'CategoryStack -StackList -StackTemplate', sort='page_name')
    getStackInfo(results, macro, request, macro.formatter, paging=False)

    # Search the pages for templates and return the results
    results = search.searchPages(request, needle, sort='page_name')
    return pageListWithContext(results, macro, request, macro.formatter, paging=False)


    ret = []
    for result in results:
      pass

    return string.join(ret)
Beispiel #4
0
def getPageListFromSearch (request,args):
    # If called with empty or no argument, default to regex search for .+, the full page list.
    needle = wikiutil.get_unicode(request, args, 'needle', u'regex:.+')

    # With whitespace argument, return same error message as FullSearch
    #if not needle.strip():
    #    err = 'Please use a more selective search term instead of {{{"%s"}}}' % needle
    #    return '<span class="error">%s</span>' % err

    # Return a title search for needle, sorted by name.
    try:
        results = search.searchPages(request, needle,
                                     titlesearch=1, case=0,
                                     sort='page_name')
        pages = results.hits#pageList(macro.request, macro.formatter, paging=False)
        pages = map(lambda x:x.page_name, pages)
    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        """
        ret = ''.join([macro.formatter.text('<<PageList('),
                      _('Your search query {{{"%s"}}} is invalid. Please refer to '
                        'HelpOnSearching for more information.', wiki=True,
                        percent=True) % wikiutil.escape(needle),
                      macro.formatter.text(')>>')])
        """
        pages = []
    return pages
Beispiel #5
0
def execute(macro, needle):
    request = macro.request
    _ = request.getText

    # if no args given, invoke "classic" behavior
    if needle is None:
        return search_box("fullsearch", macro)

    # With empty arguments, simulate title click (backlinks to page)
    elif needle == '':
        needle = '"%s"' % macro.formatter.page.page_name

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    elif needle.isspace():
        err = _(
            'Please use a more selective search term instead of '
            '{{{"%s"}}}',
            wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    results = search.searchPages(request, needle, sort='page_name')

    return results.pageList(request, macro.formatter, paging=False)
    def xmlrpc_searchPagesEx(self, query_string, search_type, length, case, mtime, regexp):
        """
        Searches pages for query_string - extended version for compatibility

        This function, in contrary to searchPages(), doesn't return HTML-formatted data.

        @param query_string: term to search for
        @param search_type: "text" or "title" search
        @param length: length of context preview (in characters)
        @param case: should the search be case sensitive?
        @param mtime: only output pages modified after mtime
        @param regexp: should the query_string be treates as a regular expression?
        @return: (page name, context preview, page url)
        """
        from MoinMoin import search
        from MoinMoin.formatter.text_plain import Formatter

        kwargs = {"sort": "page_name", "case": case, "regex": regexp}
        if search_type == "title":
            kwargs["titlesearch"] = True

        results = search.searchPages(self.request, query_string, **kwargs)
        results.formatter = Formatter(self.request)
        results.request = self.request

        return [(self._outstr(hit.page_name),
                 self._outstr(results.formatContext(hit, length, 1)),
                 self.request.getQualifiedURL(hit.page.url(self.request, {})))
                for hit in results.hits]
Beispiel #7
0
def execute(macro, args):
    _ = macro._
    case = 0

    # If called with empty or no argument, default to regex search for .+, the full page list.
    needle = wikiutil.get_unicode(macro.request, args, "needle", u"regex:.+")

    # With whitespace argument, return same error message as FullSearch
    if not needle.strip():
        err = _('Please use a more selective search term instead of {{{"%s"}}}', wiki=True) % needle
        return '<span class="error">%s</span>' % err

    # Return a title search for needle, sorted by name.
    try:
        results = search.searchPages(macro.request, needle, titlesearch=1, case=case, sort="page_name")
        ret = results.pageList(macro.request, macro.formatter, paging=False)
    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        ret = "".join(
            [
                macro.formatter.text("<<PageList("),
                _(
                    'Your search query {{{"%s"}}} is invalid. Please refer to ' "HelpOnSearching for more information.",
                    wiki=True,
                    percent=True,
                )
                % wikiutil.escape(needle),
                macro.formatter.text(")>>"),
            ]
        )
    return ret
Beispiel #8
0
def execute(macro, args):
    _ = macro._
    case = 0

    # If called with empty or no argument, default to regex search for .+, the full page list.
    needle = wikiutil.get_unicode(macro.request, args, 'needle', u'regex:.+')

    # With whitespace argument, return same error message as FullSearch
    if not needle.strip():
        err = _(
            'Please use a more selective search term instead of {{{"%s"}}}',
            wiki=True) % needle
        return '<span class="error">%s</span>' % err

    # Return a title search for needle, sorted by name.
    try:
        results = search.searchPages(macro.request,
                                     needle,
                                     titlesearch=1,
                                     case=case,
                                     sort='page_name')
        ret = results.pageList(macro.request, macro.formatter, paging=False)
    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        ret = ''.join([
            macro.formatter.text('<<PageList('),
            _(
                'Your search query {{{"%s"}}} is invalid. Please refer to '
                'HelpOnSearching for more information.',
                wiki=True,
                percent=True) % wikiutil.escape(needle),
            macro.formatter.text(')>>')
        ])
    return ret
Beispiel #9
0
def execute(macro, args):
    _ = macro._
    case = 0

    # If called with empty or no argument, default to regex search for .+, the full page list.
    needle = wikiutil.get_unicode(macro.request, args, 'needle', u'regex:.+')

    # With whitespace argument, return same error message as FullSearch
    if not needle.strip():
        err = _('Please use a more selective search term instead of {{{"%s"}}}', wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Return a title search for needle, sorted by name.
    try:
        results = search.searchPages(macro.request, needle,
                                     titlesearch=1, case=case,
                                     sort='page_name')
        fmt = macro.formatter.__class__(macro.request, is_included=True)
        fmt._base_depth = macro.formatter._base_depth
        ret = results.pageList(macro.request, fmt, paging=False,excluded=macro.formatter.page.getPageLinks(macro.request))
    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        ret = ''.join([macro.formatter.text('<<PageList('),
                      _('Your search query {{{"%s"}}} is invalid. Please refer to '
                        'HelpOnSearching for more information.', wiki=True,
                        percent=True) % wikiutil.escape(needle),
                      macro.formatter.text(')>>')])
    return ret
Beispiel #10
0
def execute(macro, needle):
    request = macro.request
    _ = request.getText

    # if no args given, invoke "classic" behavior
    if needle is None:
        return search_box("fullsearch", macro)

    # With empty arguments, simulate title click (backlinks to page)
    elif needle == '':
        needle = '"%s"' % macro.formatter.page.page_name

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    elif needle.isspace():
        err = _('Please use a more selective search term instead of '
                '{{{"%s"}}}', wiki=True) % needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    results = search.searchPages(request, needle, sort='page_name')

    fmt = macro.formatter.__class__(request, is_included=True)
    fmt._base_depth = macro.formatter._base_depth
    return results.pageList(request, fmt, paging=False,excluded=macro.formatter.page.getPageLinks(request))
Beispiel #11
0
def execute(macro, needle):
    request = macro.request
    _ = request.getText

    # if no args given, invoke "classic" behavior
    if needle is None:
        return macro._m_search("fullsearch")

    # With empty arguments, simulate title click (backlinks to page)
    elif needle == '':
        needle = '"%s"' % macro.formatter.page.page_name

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    elif needle.isspace():
        err = _('Please use a more selective search term instead of '
                '{{{"%s"}}}') %  needle
        return '<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    query = search.QueryParser().parse_query(needle)
    results = search.searchPages(request, query)
    results.sortByPagename()

    return results.pageList(request, macro.formatter)
Beispiel #12
0
def getPages(pagename, request):

    "Return the links minus category links for 'pagename' using the 'request'."

    query = search.QueryParser().parse_query('category:%s' % pagename)
    if isMoin15():
        results = search.searchPages(request, query)
        results.sortByPagename()
    else:
        results = search.searchPages(request, query, "page_name")

    cat_pattern = getCategoryPattern(request)
    pages = []
    for page in results.hits:
        if not cat_pattern.match(page.page_name):
            pages.append(page)
    return pages
Beispiel #13
0
 def xmlrpc_searchPages(self, query_string):
     from MoinMoin import search
     query = search.QueryParser().parse_query(query_string)
     results = search.searchPages(self.request, query)
     results.formatter = text_html.Formatter(self.request)
     results.request = self.request
     return [(self._outstr(hit.page_name),
              self._outstr(results.formatContext(hit, 180, 1)))
             for hit in results.hits]
Beispiel #14
0
def _page_search(request, pagename, include_self=False):
    # XXX error handling!
    searchresult = search.searchPages(request, 't:"%s"' % pagename)

    pages = [p.page_name for p in searchresult.hits]
    pages.sort()
    if include_self:
        pages[0:0] = [pagename]
    return pages
Beispiel #15
0
 def contains(what):
     if hits_dict.has_key(what):
         hits = hits_dict[what]
     else:
         parser = search.QueryParser(regex=1)
         query = parser.parse_query(what)
         results = search.searchPages(macro.request, query)
         hits = [h.page_name for h in results.hits]
         hits_dict[what] = hits
     return page in hits
Beispiel #16
0
 def xmlrpc_searchPages(self, query_string):
     """ Searches pages for query_string.
         Returns a list of tuples (foundpagename, context)
     """
     from MoinMoin import search
     results = search.searchPages(self.request, query_string)
     results.formatter = self.request.html_formatter
     results.request = self.request
     return [(self._outstr(hit.page_name),
              self._outstr(results.formatContext(hit, 180, 1)))
             for hit in results.hits]
Beispiel #17
0
 def xmlrpc_searchPages(self, query_string):
     """
     Searches pages for query_string.
     Returns a list of tuples (foundpagename, context)
     """
     from MoinMoin import search
     results = search.searchPages(self.request, query_string)
     results.formatter = self.request.html_formatter
     results.request = self.request
     return [(self._outstr(hit.page_name),
              self._outstr(results.formatContext(hit, 180, 1)))
             for hit in results.hits]
Beispiel #18
0
    def searchpackage(self, request, searchkey):
        """ Search MoinMoin for the string specified and return a list of
        matching pages, provided they are not system pages and not
        present in the underlay.

        @param request: current request
        @param searchkey: string to search for
        @rtype: list
        @return: list of pages matching searchkey
        """

        pagelist = searchPages(request, searchkey)

        titles = []
        for title in pagelist.hits:
            if not wikiutil.isSystemPage(request, title.page_name) or not title.page.getPageStatus()[0]:
                titles.append(title.page_name)
        return titles
Beispiel #19
0
    def searchpackage(self, request, searchkey):
        """ Search MoinMoin for the string specified and return a list of
        matching pages, provided they are not system pages and not
        present in the underlay.

        @param request: current request
        @param searchkey: string to search for
        @rtype: list
        @return: list of pages matching searchkey
        """

        pagelist = searchPages(request, searchkey)

        titles = []
        for title in pagelist.hits:
            if not wikiutil.isSystemPage(request, title.page_name) or not title.page.getPageStatus()[0]:
                titles.append(title.page_name)
        return titles
Beispiel #20
0
def page_list(request):
    from MoinMoin import search
    name = request.form.get("pagename",[""])[0]
    if name:
        searchresult = search.searchPages(
            request,
            search.QueryParser().parse_query('t:"%s"' % name))
        
        pages = [p.page_name for p in searchresult.hits]
    else:
        pages = [name]
    request.http_headers()
    request.write(
        '''<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
 <head>
  <title>Insert Page Link</title>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
  <meta content="noindex,nofollow" name="robots">
 </head>
 <body scroll="no" style="OVERFLOW: hidden">
  <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
   <tr>
    <td>
     <table cellSpacing="0" cellPadding="0" align="center" border="0">
      <tr>
       <td>
       <span fckLang="PageDlgName">Page Name</span><br>
       <select id="txtName" size="1">
       %s
       </select>
     </td>
    </tr>
   </table>
  </td>
 </tr>
</table>
</body>
</html>
''' % "".join(["<option>%s</option>\n" % p for p in pages]))
Beispiel #21
0
    def _macro_PageList(self, needle):
        _ = self._
        literal=0
        case=0

        # If called with empty or no argument, default to regex search for .+,
        # the full page list.
        if not needle:
            needle = 'regex:.+'

        # With whitespace argument, return same error message as FullSearch
        elif needle.isspace():
            err = _('Please use a more selective search term instead of '
                    '{{{"%s"}}}') %  needle
            return '<span class="error">%s</span>' % err
            
        # Return a title search for needle, sorted by name.
        query = search.QueryParser(literal=literal, titlesearch=1,
                                   case=case).parse_query(needle)
        results = search.searchPages(self.request, query)
        results.sortByPagename()
        return results.pageList(self.request, self.formatter)
Beispiel #22
0
def page_list(request):
    from MoinMoin import search
    name = request.values.get("pagename", "")
    if name:
        searchresult = search.searchPages(request, 't:"%s"' % name)
        pages = [p.page_name for p in searchresult.hits]
    else:
        pages = [name]
    request.write(
        '''<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
 <head>
  <title>Insert Page Link</title>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
  <meta content="noindex,nofollow" name="robots">
 </head>
 <body scroll="no" style="OVERFLOW: hidden">
  <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
   <tr>
    <td>
     <table cellSpacing="0" cellPadding="0" align="center" border="0">
      <tr>
       <td>
       <span fckLang="PageDlgName">Page name</span><br>
       <select id="txtName" size="1">
       %s
       </select>
     </td>
    </tr>
   </table>
  </td>
 </tr>
</table>
</body>
</html>
''' % "".join(["<option>%s</option>\n" % p for p in pages]))
Beispiel #23
0
def execute(pagename, request, fieldname='value', titlesearch=0, statistic=0):
    _ = request.getText
    titlesearch = checkTitleSearch(request)
    if titlesearch < 0:
        check_surge_protect(request, kick=True)  # get rid of spammer
        return

    advancedsearch = isAdvancedSearch(request)

    form = request.values

    # context is relevant only for full search
    if titlesearch:
        context = 0
    elif advancedsearch:
        context = 180  # XXX: hardcoded context count for advancedsearch
    else:
        context = int(form.get('context', 0))

    # Get other form parameters
    needle = form.get(fieldname, '')
    case = int(form.get('case', 0))
    regex = int(form.get('regex', 0))  # no interface currently
    hitsFrom = int(form.get('from', 0))
    mtime = None
    msg = ''
    historysearch = 0

    # if advanced search is enabled we construct our own search query
    if advancedsearch:
        and_terms = form.get('and_terms', '').strip()
        or_terms = form.get('or_terms', '').strip()
        not_terms = form.get('not_terms', '').strip()
        #xor_terms = form.get('xor_terms', '').strip()
        categories = form.getlist('categories') or ['']
        timeframe = form.get('time', '').strip()
        language = form.getlist('language') or ['']
        mimetype = form.getlist('mimetype') or [0]
        excludeunderlay = form.get('excludeunderlay', 0)
        nosystemitems = form.get('nosystemitems', 0)
        historysearch = form.get('historysearch', 0)

        mtime = form.get('mtime', '')
        if mtime:
            mtime_parsed = None

            # get mtime from known date/time formats
            for fmt in (request.user.datetime_fmt, request.cfg.datetime_fmt,
                        request.user.date_fmt, request.cfg.date_fmt):
                try:
                    mtime_parsed = time.strptime(mtime, fmt)
                except ValueError:
                    continue
                else:
                    break

            if mtime_parsed:
                mtime = time.mktime(mtime_parsed)
            else:
                # didn't work, let's try parsedatetime
                cal = Calendar()
                mtime_parsed, parsed_what = cal.parse(mtime)
                # XXX it is unclear if usage of localtime here and in parsedatetime module is correct.
                # time.localtime is the SERVER's local time and of no relevance to the user (being
                # somewhere in the world)
                # mktime is reverse function for localtime, so this maybe fixes it again!?
                if parsed_what > 0 and mtime_parsed <= time.localtime():
                    mtime = time.mktime(mtime_parsed)
                else:
                    mtime_parsed = None  # we don't use invalid stuff

            # show info
            if mtime_parsed:
                # XXX mtime_msg is not shown in some cases
                mtime_msg = _(
                    "(!) Only pages changed since '''%s''' are being displayed!",
                    wiki=True) % request.user.getFormattedDateTime(mtime)
            else:
                mtime_msg = _(
                    '/!\\ The modification date you entered was not '
                    'recognized and is therefore not considered for the '
                    'search results!',
                    wiki=True)
        else:
            mtime_msg = None

        word_re = re.compile(r'(\"[\w\s]+"|\w+)')
        needle = ''
        if categories[0]:
            needle += 'category:%s ' % ','.join(categories)
        if language[0]:
            needle += 'language:%s ' % ','.join(language)
        if mimetype[0]:
            needle += 'mimetype:%s ' % ','.join(mimetype)
        if excludeunderlay:
            needle += '-domain:underlay '
        if nosystemitems:
            needle += '-domain:system '
        if and_terms:
            needle += '(%s) ' % and_terms
        if not_terms:
            needle += '(%s) ' % ' '.join(
                ['-%s' % t for t in word_re.findall(not_terms)])
        if or_terms:
            needle += '(%s) ' % ' or '.join(word_re.findall(or_terms))

    # check for sensible search term
    stripped = needle.strip()
    if len(stripped) == 0:
        request.theme.add_msg(
            _(
                'Please use a more selective search term instead '
                'of {{{"%s"}}}',
                wiki=True) % wikiutil.escape(needle), "error")
        Page(request, pagename).send_page()
        return
    needle = stripped

    # Setup for type of search
    if titlesearch:
        title = _('Title Search: "%s"')
        sort = 'page_name'
    else:
        if advancedsearch:
            title = _('Advanced Search: "%s"')
        else:
            title = _('Full Text Search: "%s"')
        sort = 'weight'

    # search the pages
    from MoinMoin.search import searchPages, QueryParser, QueryError
    try:
        query = QueryParser(case=case, regex=regex,
                            titlesearch=titlesearch).parse_query(needle)
    except QueryError:  # catch errors in the search query
        request.theme.add_msg(
            _(
                'Your search query {{{"%s"}}} is invalid. Please refer to '
                'HelpOnSearching for more information.',
                wiki=True,
                percent=True) % wikiutil.escape(needle), "error")
        Page(request, pagename).send_page()
        return

    results = searchPages(request, query, sort, mtime, historysearch)

    # directly show a single hit for title searches
    # this is the "quick jump" functionality if you don't remember
    # the pagename exactly, but just some parts of it
    if titlesearch and len(results.hits) == 1:
        page = results.hits[0]
        if not page.attachment:  # we did not find an attachment
            page = Page(request, page.page_name)
            highlight = query.highlight_re()
            if highlight:
                querydict = {'highlight': highlight}
            else:
                querydict = {}
            url = page.url(request, querystr=querydict)
            request.http_redirect(url)
            return
    if not results.hits:  # no hits?
        f = request.formatter
        querydict = dict(wikiutil.parseQueryString(request.query_string))
        querydict.update({'titlesearch': 0})

        request.theme.add_msg(
            _(
                'Your search query {{{"%s"}}} didn\'t return any results. '
                'Please change some terms and refer to HelpOnSearching for '
                'more information.%s',
                wiki=True,
                percent=True) %
            (wikiutil.escape(needle), titlesearch and ''.join([
                '<br>',
                _('(!) Consider performing a', wiki=True),
                ' ',
                f.url(1, href=request.page.url(request, querydict, escape=0)),
                _('full-text search with your search terms'),
                f.url(0),
                '.',
            ]) or ''), "error")
        Page(request, pagename).send_page()
        return

    # This action generates data using the user language
    request.setContentLanguage(request.lang)

    request.theme.send_title(title % needle, pagename=pagename)

    # Start content (important for RTL support)
    request.write(request.formatter.startContent("content"))

    # Hints
    f = request.formatter
    hints = []

    if titlesearch:
        querydict = dict(wikiutil.parseQueryString(request.query_string))
        querydict.update({'titlesearch': 0})

        hints.append(''.join([
            _(
                "(!) You're performing a title search that might not include"
                ' all related results of your search query in this wiki. <<BR>>',
                wiki=True),
            ' ',
            f.url(1, href=request.page.url(request, querydict, escape=0)),
            f.text(
                _('Click here to perform a full-text search with your '
                  'search terms!')),
            f.url(0),
        ]))

    if advancedsearch and mtime_msg:
        hints.append(mtime_msg)

    if hints:
        request.write(searchHints(f, hints))

    # Search stats
    request.write(results.stats(request, request.formatter, hitsFrom))

    # Then search results
    info = not titlesearch
    if context:
        output = results.pageListWithContext(request,
                                             request.formatter,
                                             info=info,
                                             context=context,
                                             hitsFrom=hitsFrom,
                                             hitsInfo=1)
    else:
        output = results.pageList(request,
                                  request.formatter,
                                  info=info,
                                  hitsFrom=hitsFrom,
                                  hitsInfo=1)

    request.write(output)

    request.write(request.formatter.endContent())
    request.theme.send_footer(pagename)
    request.theme.send_closing_html()
Beispiel #24
0
def execute(macro, args):

   request = macro.request

   # get params
   if args:
      args = args.split(',')
   else:
      args = []

   kw = {}
   for arg in args :
      if '=' in arg:
         key, value = arg.split('=', 1)
         kw[str(key.strip())] = value.strip()

   try:
      maxTags = int( kw["maxTags"] )
   except (KeyError, ValueError):
      maxTags = 50

   try:
      autoCreate = kw["autoCreate"]
      if autoCreate == "true" or autoCreate == "True":
         autoCreate = True
      else:
         autoCreate = False

   except (KeyError):
      autoCreate = False

   #{level:hits , level:hits , ...}
   level = { 0 : 4 , 1 : 7 , 2 : 12 , 3 : 18 , 4 : 25 , 5 : 35 , 6 : 50 , 7 : 60 , 8 : 90 }

   args = r'regex:((\r)?\n----(\r)?\n[a-zA-Z])'

   # Search the pages
   query = search.QueryParser().parse_query(args)
   results = search.searchPages(request, query)
   pages = [hit.page_name for hit in results.hits]

   tags = []

   for page in pages:
      page = Page(request, page)
      if page.isStandardPage() and not page.isUnderlayPage():
         body = page.get_raw_body()
         match = re.search(r'----(\r)?\n(?P<tags>.*)(\r)?\n', body)
         match = match.group('tags')
         match = match.split(',')
         for tag in match:
            tags.insert(0, (str(tag)).strip())

   taglist = []
   taglist = list(frozenset(tags))

   def sort(t):
      return t[1]

   show = []
   for tag in taglist:
      show.append( (tag, tags.count(tag)) )
   show.sort(key=sort, reverse=True)
   show = show[0:maxTags]
   show.sort()

   html = []

   for tag in show:

      pagename = tag[0]
      hits = tag[1]

      # auto create tag page if not exist
      if autoCreate:
         page = Page(request, pagename)
         if page.isStandardPage(includeDeleted=False) == False and page.isUnderlayPage() == False:

            from MoinMoin.security import Permissions
            class SecurityPolicy(Permissions):
               def write(*args, **kw):
                  return True
               def save(*args, **kw):
                  return True
            request.user.may = SecurityPolicy(request.user)

            PageEditor(request, pagename).saveText(ur"[[FullSearch(regex:(-{4}(\r)?\n(.*)%s))]]"%(tag[0]), 0)

      #level0
      if hits < level[0]:
         html.append(u'<span  style="font-size:0.65em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level1
      elif hits < level[1]:
         html.append(u'<span  style="font-size:0.75em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level2
      elif hits < level[2]:
         html.append(u'<span  style="font-size:0.9em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level3
      elif hits < level[3]:
         html.append(u'<span  style="font-size:1.0em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level4
      elif hits < level[4]:
         html.append(u'<span  style="font-size:1.05em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level5
      elif hits < level[5]:
         html.append(u'<span  style="font-size:1.1em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level6
      elif hits < level[6]:
         html.append(u'<span  style="font-size:1.15em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level7
      elif hits < level[7]:
         html.append(u'<span  style="font-size:1.2em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level8
      elif hits < level[8]:
         html.append(u'<span  style="font-size:1.25em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))
      #level9
      else:
         html.append(u'<span  style="font-size:1.3em;"><a href="%s"> %s</a></span>'%(pagename, tag[0]))

   return ''.join(html)
Beispiel #25
0
def execute(pagename, request, fieldname='value', titlesearch=0):
    _ = request.getText
    titlesearch = isTitleSearch(request)

    # context is relevant only for full search
    if titlesearch:        
        context = 0      
    else:
        context = int(request.form.get('context', [0])[0])        
    
    # Get other form parameters
    needle = request.form.get(fieldname, [''])[0]
    case = int(request.form.get('case', [0])[0])
    regex = int(request.form.get('regex', [0])[0]) # no interface currently

    max_context = 1 # only show first `max_context` contexts XXX still unused

    # check for sensible search term
    striped = needle.strip()
    if len(striped) == 0:
        err = _('Please use a more selective search term instead '
                'of {{{"%s"}}}') % needle
        # send http headers
        request.http_headers()
        Page(request, pagename).send_page(request, msg=err) 
        return

    # search the pages
    from MoinMoin import search
    query = search.QueryParser(case=case, regex=regex,
                               titlesearch=titlesearch).parse_query(needle)
    results = search.searchPages(request, query)

    # directly show a single hit
    # XXX won't work with attachment search
    # improve if we have one...
    if len(results.hits) == 1:
        page = Page(request, results.hits[0].page_name)
        # TODO: remove escape=0 in 1.4
        url = page.url(request, querystr={'highlight': query.highlight_re()},
                       escape=0)
        request.http_redirect(url)
        raise MoinMoinNoFooter

    # send http headers
    request.http_headers()

    # This action generate data using the user language
    request.setContentLanguage(request.lang)

    # Setup for type of search
    if titlesearch:
        title = _('Title Search: "%s"')
        results.sortByPagename()
    else:
        title = _('Full Text Search: "%s"')
        results.sortByWeight() 

    wikiutil.send_title(request, title % needle, form=request.form,
                        pagename=pagename)
    
    # Start content (important for RTL support)
    formatter = Formatter(request)
    request.write(formatter.startContent("content"))

    # First search stats
    request.write(results.stats(request, formatter))

    # Then search results
    info = not titlesearch
    if context:
        output = results.pageListWithContext(request, formatter, info=info,
                                             context=context)
    else:
        output = results.pageList(request, formatter, info=info)        
    request.write(output)

    # End content and send footer
    request.write(formatter.endContent())
    wikiutil.send_footer(request, pagename, editable=0, showactions=0,
                         form=request.form)
Beispiel #26
0
 def testTitleSearchOR(self):
     """ search: title search with OR expression """
     result = search.searchPages(self.request, u"title:FrontPage or title:RecentChanges")
     assert len(result.hits) == 2
Beispiel #27
0
        @param query_string: term to search for
        @param search_type: "text" or "title" search
        @param length: length of context preview (in characters)
        @param case: should the search be case sensitive?
        @param mtime: only output pages modified after mtime
        @param regexp: should the query_string be treates as a regular expression?
        @return: (page name, context preview, page url)
        """
        from MoinMoin import search
        from MoinMoin.formatter.text_plain import Formatter

        kwargs = {"sort": "page_name", "case": case, "regex": regexp}
        if search_type == "title":
            kwargs["titlesearch"] = True

        results = search.searchPages(self.request, query_string, **kwargs)
        results.formatter = Formatter(self.request)
        results.request = self.request

        return [(self._outstr(hit.page_name),
                 self._outstr(results.formatContext(hit, length, 1)),
                 self.request.getQualifiedURL(hit.page.url(self.request, {})))
                for hit in results.hits]

    def xmlrpc_getMoinVersion(self):
        """
        Returns a tuple of the MoinMoin version:
        (project, release, revision)
        """
        from MoinMoin import version
        return (version.project, version.release, version.revision)
Beispiel #28
0
 def testTitleSearchNegativeTerm(self):
     """ search: title search for a AND expression with a negative term """
     helpon_count = len(search.searchPages(self.request, u"title:HelpOn").hits)
     result = search.searchPages(self.request, u"title:HelpOn -title:Acl")
     assert len(result.hits) == helpon_count - 1  # finds all HelpOn* except one
Beispiel #29
0
def execute(macro, needle, titlesearch=False, case=False):
    request = macro.request
    _ = request.getText

    # if no args given, invoke "classic" behavior
    if needle is None:
        return search_box("fullsearch", macro)

    highlight_titles = getattr(request.cfg, "search_macro_highlight_titles", 1)
    highlight_pages = getattr(request.cfg, "search_macro_highlight_pages", 1)

    err = None
    # It is needed because otherwise macro instances like
    # <<FullSearch(..., highlight=1)>> (which found occurrences of "...," and
    # "highlight=1" before the change) begin behaving differently.
    if getattr(request.cfg, "search_macro_parse_args", False):
        needle_found = False

        # parse_quoted_separated() is used instead of rsplit() and such for
        # proper parsing cases like FullSearch(",") and so.
        args = wikiutil.parse_quoted_separated_ext(needle,
                                                   separator=",",
                                                   name_value_separator="=")

        # First non-tuple item in resulting list to be needle
        for arg in args:
            if isinstance(arg, tuple):
                val = arg[1].lower() in [u'1', u'true', u'y']
                if arg[0] == u"highlight_pages":
                    highlight_pages = val
                elif arg[0] == u"highlight_titles":
                    highlight_titles = val
                else:
                    err = _(u"Unknown macro parameter: %s.") % arg[0]
            elif isinstance(arg, basestring):
                if not needle_found:
                    needle_found = True
                    needle = arg
                else:
                    err = _(u"More than one needle with "
                             "search_macro_parse_args config option enabled "
                             "('%(needle)s' found already, '%(arg)s' occurred)"
                             ) % {'needle': wikiutil.escape(needle),
                                  'arg': wikiutil.escape(arg)}

        if not needle_found:
            needle = ''

    # With empty arguments, simulate title click (backlinks to page)
    if needle == '' and not titlesearch:
        needle = u'"%s"' % macro.formatter.page.page_name

    # With whitespace argument, show error message like the one used in the search box
    # TODO: search should implement those errors message for clients
    elif not needle.strip():
        err = _(u'Please use a more selective search term instead of '
                '{{{"%s"}}}', wiki=True) % needle

    if err:
        return u'<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    try:
        results = search.searchPages(request, needle, titlesearch=titlesearch,
                                     case=case, sort='page_name')

        ret = results.pageList(request, macro.formatter, paging=False,
            highlight_titles=highlight_titles, highlight_pages=highlight_pages)

    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        ret = ''.join([macro.formatter.text(u'<<%s(' % macro.name),
                      _(u'Your search query {{{"%s"}}} is invalid. Please refer '
                        'to HelpOnSearching for more information.', wiki=True,
                        percent=True) % wikiutil.escape(needle),
                      macro.formatter.text(u')>>')])

    return ret
Beispiel #30
0
 def testTitleSearchNegatedFindAll(self):
     """ search: negated title search for some pagename that does not exist results in all pagenames """
     result = search.searchPages(self.request, u"-title:%s" % self.doesnotexist)
     assert len(result.hits) > 100  # XXX should be "all"
Beispiel #31
0
    # TODO: search should implement those errors message for clients
    elif not needle.strip():
        err = _(
            u'Please use a more selective search term instead of '
            '{{{"%s"}}}',
            wiki=True) % needle

    if err:
        return u'<span class="error">%s</span>' % err

    needle = needle.strip()

    # Search the pages and return the results
    try:
        results = search.searchPages(request,
                                     needle,
                                     titlesearch=titlesearch,
                                     case=case,
                                     sort='page_name')

        ret = results.pageList(request,
                               macro.formatter,
                               paging=False,
                               highlight_titles=highlight_titles,
                               highlight_pages=highlight_pages)

    except ValueError:
        # same error as in MoinMoin/action/fullsearch.py, keep it that way!
        ret = ''.join([
            macro.formatter.text(u'<<%s(' % macro.name),
            _(
                u'Your search query {{{"%s"}}} is invalid. Please refer '
Beispiel #32
0
def link_dialog(request):
    request.http_headers()
    # list of wiki pages
    name = request.form.get("pagename", [""])[0]
    if name:
        from MoinMoin import search
        # XXX error handling!
        searchresult = search.searchPages(
            request,
            search.QueryParser().parse_query('t:"%s"' % name))
        
        pages = [p.page_name for p in searchresult.hits]
        pages.sort()
        pages[0:0] = [name]
        page_list ='''
         <tr>
          <td colspan=2>
           <select id="sctPagename" size="1" onchange="OnChangePagename(this.value);">
           %s
           </select>
          <td>
         </tr> 
''' % "\n".join(['<option value="%s">%s</option>' % (page, page)
                 for page in pages])
    else:
        page_list = ""
    
    # list of interwiki names
    wikiutil.resolve_wiki(request, "Self:FrontPage")
    interwiki = request.cfg._interwiki_list.keys()
    interwiki.sort()
    iwpreferred = request.cfg.interwiki_preferred[:] # make a copy of list!
    if not iwpreferred or iwpreferred and iwpreferred[-1] != None:
        resultlist = iwpreferred
        for iw in interwiki:
            if not iw in iwpreferred:
                resultlist.append(iw)
    else:
        resultlist = iwpreferred[:-1]
    interwiki = "\n".join(
        ['<option value="%s">%s</option>' % (key, key) for key in resultlist])

    # wiki url
    url_prefix = request.cfg.url_prefix
    scriptname = request.getScriptname()
    if not scriptname or scriptname[-1] != "/":
        scriptname += "/"
    action = scriptname
    basepage = request.page.page_name.encode(config.charset)
    request.http_headers()
    request.write('''
<!--
 * FCKeditor - The text editor for internet
 * Copyright (C) 2003-2004 Frederico Caldeira Knabben
 * 
 * Licensed under the terms of the GNU Lesser General Public License:
 *   http://www.opensource.org/licenses/lgpl-license.php
 * 
 * For further information visit:
 *   http://www.fckeditor.net/
 * 
 * File Name: fck_link.html
 *  Link dialog window.
 * 
 * Version:  2.0 FC (Preview)
 * Modified: 2005-02-18 23:55:22
 * 
 * File Authors:
 *   Frederico Caldeira Knabben ([email protected])
-->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<meta name="robots" content="index,nofollow">
<html>
 <head>
  <title>Link Properties</title>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
  <meta name="robots" content="noindex,nofollow" />
  <script src="%(url_prefix)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
  <script src="%(url_prefix)s/applets/moinFCKplugins/moinlink/fck_link.js" type="text/javascript"></script>
  <script src="%(url_prefix)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
 </head>
 <body scroll="no" style="OVERFLOW: hidden">
  <div id="divInfo" style="DISPLAY: none">
   <span fckLang="DlgLnkType">Link Type</span><br />
   <select id="cmbLinkType" onchange="SetLinkType(this.value);">
    <option value="wiki" selected="selected">WikiPage</option>
    <option value="interwiki">Interwiki</option>
    <option value="url" fckLang="DlgLnkTypeURL">URL</option>
   </select>
   <br />
   <br />
   <div id="divLinkTypeWiki">
    <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
     <tr>
      <td>
       <form action=%(action)s method="GET">
       <input type="hidden" name="action" value="fckdialog">
       <input type="hidden" name="dialog" value="link">
       <input type="hidden" id="basepage" name="basepage" value="%(basepage)s">
       <table cellSpacing="0" cellPadding="0" align="center" border="0">
        <tr>
         <td>
          <span fckLang="PageDlgName">Page Name</span><br>
          <input id="txtPagename" name="pagename" size="30" value="%(name)s">
         </td>
         <td valign="bottom">
           <input id=btnSearchpage type="submit" value="Search">
         </td>
        </tr>
        %(page_list)s
       </table>
       </form> 
      </td>
     </tr>
    </table>
   </div>
   <div id="divLinkTypeInterwiki">
    <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
     <tr>
      <td>
       <table cellSpacing="0" cellPadding="0" align="center" border="0">
        <tr>
         <td>
          <span fckLang="WikiDlgName">Wiki:PageName</span><br>
          <select id="sctInterwiki" size="1">
          %(interwiki)s
          </select>:
          <input id="txtInterwikipagename"></input>
         </td>
        </tr>
       </table>
      </td>
     </tr>
    </table>
   </div>
   <div id="divLinkTypeUrl">
    <table cellspacing="0" cellpadding="0" width="100%%" border="0">
     <tr>
      <td nowrap="nowrap">
       <span fckLang="DlgLnkProto">Protocol</span><br />
       <select id="cmbLinkProtocol">
        <option value="http://" selected="selected">http://</option>
        <option value="https://">https://</option>
        <option value="ftp://">ftp://</option>
        <option value="file://">file://</option>
        <option value="news://">news://</option>
        <option value="mailto:">mailto:</option>
        <option value="" fckLang="DlgLnkProtoOther">&lt;other&gt;</option>
       </select>
      </td>
      <td nowrap="nowrap">&nbsp;</td>
      <td nowrap="nowrap" width="100%%">
       <span fckLang="DlgLnkURL">URL</span><br />
       <input id="txtUrl" style="WIDTH: 100%%" type="text" onkeyup="OnUrlChange();" onchange="OnUrlChange();" />
      </td>
     </tr>
    </table>
    <br />
   </div>
  </div> 
 </body>
</html>
''' % locals())
Beispiel #33
0
 def testTitleSearchFrontPage(self):
     """ search: title search for FrontPage """
     result = search.searchPages(self.request, u"title:FrontPage")
     assert len(result.hits) == 1
Beispiel #34
0
 def testTitleSearchAND(self):
     """ search: title search with AND expression """
     result = search.searchPages(self.request, u"title:Help title:Index")
     assert len(result.hits) == 1
Beispiel #35
0
def link_dialog(request):
    # list of wiki pages
    name = request.values.get("pagename", "")
    if name:
        from MoinMoin import search
        # XXX error handling!
        searchresult = search.searchPages(request, 't:"%s"' % name)

        pages = [p.page_name for p in searchresult.hits]
        pages.sort()
        pages[0:0] = [name]
        page_list = '''
         <tr>
          <td colspan=2>
           <select id="sctPagename" size="1" onchange="OnChangePagename(this.value);">
           %s
           </select>
          <td>
         </tr>
''' % "\n".join([
            '<option value="%s">%s</option>' % (page, page) for page in pages
        ])
    else:
        page_list = ""

    # list of interwiki names
    interwiki_list = wikiutil.load_wikimap(request)
    interwiki = interwiki_list.keys()
    interwiki.sort()
    iwpreferred = request.cfg.interwiki_preferred[:]
    if not iwpreferred or iwpreferred and iwpreferred[-1] is not None:
        resultlist = iwpreferred
        for iw in interwiki:
            if not iw in iwpreferred:
                resultlist.append(iw)
    else:
        resultlist = iwpreferred[:-1]
    interwiki = "\n".join(
        ['<option value="%s">%s</option>' % (key, key) for key in resultlist])

    # wiki url
    url_prefix_static = request.cfg.url_prefix_static
    scriptname = request.script_root + '/'
    action = scriptname
    basepage = request.page.page_name
    request.write(u'''
<!--
 * FCKeditor - The text editor for internet
 * Copyright (C) 2003-2004 Frederico Caldeira Knabben
 *
 * Licensed under the terms of the GNU Lesser General Public License:
 *   http://www.opensource.org/licenses/lgpl-license.php
 *
 * For further information visit:
 *   http://www.fckeditor.net/
 *
 * File Name: fck_link.html
 *  Link dialog window.
 *
 * Version:  2.0 FC (Preview)
 * Modified: 2005-02-18 23:55:22
 *
 * File Authors:
 *   Frederico Caldeira Knabben ([email protected])
-->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<meta name="robots" content="index,nofollow">
<html>
 <head>
  <title>Link Properties</title>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
  <meta name="robots" content="noindex,nofollow" />
  <script src="%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinlink/fck_link.js" type="text/javascript"></script>
  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
 </head>
 <body scroll="no" style="OVERFLOW: hidden">
  <div id="divInfo" style="DISPLAY: none">
   <span fckLang="DlgLnkType">Link Type</span><br />
   <select id="cmbLinkType" onchange="SetLinkType(this.value);">
    <option value="wiki" selected="selected">WikiPage</option>
    <option value="interwiki">Interwiki</option>
    <option value="url" fckLang="DlgLnkTypeURL">URL</option>
   </select>
   <br />
   <br />
   <div id="divLinkTypeWiki">
    <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
     <tr>
      <td>
       <form action=%(action)s method="GET">
       <input type="hidden" name="action" value="fckdialog">
       <input type="hidden" name="dialog" value="link">
       <input type="hidden" id="basepage" name="basepage" value="%(basepage)s">
       <table cellSpacing="0" cellPadding="0" align="center" border="0">
        <tr>
         <td>
          <span fckLang="PageDlgName">Page Name</span><br>
          <input id="txtPagename" name="pagename" size="30" value="%(name)s">
         </td>
         <td valign="bottom">
           <input id=btnSearchpage type="submit" value="Search">
         </td>
        </tr>
        %(page_list)s
       </table>
       </form>
      </td>
     </tr>
    </table>
   </div>
   <div id="divLinkTypeInterwiki">
    <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
     <tr>
      <td>
       <table cellSpacing="0" cellPadding="0" align="center" border="0">
        <tr>
         <td>
          <span fckLang="WikiDlgName">Wiki:PageName</span><br>
          <select id="sctInterwiki" size="1">
          %(interwiki)s
          </select>:
          <input id="txtInterwikipagename"></input>
         </td>
        </tr>
       </table>
      </td>
     </tr>
    </table>
   </div>
   <div id="divLinkTypeUrl">
    <table cellspacing="0" cellpadding="0" width="100%%" border="0">
     <tr>
      <td nowrap="nowrap">
       <span fckLang="DlgLnkProto">Protocol</span><br />
       <select id="cmbLinkProtocol">
        <option value="http://" selected="selected">http://</option>
        <option value="https://">https://</option>
        <option value="ftp://">ftp://</option>
        <option value="file://">file://</option>
        <option value="news://">news://</option>
        <option value="mailto:">mailto:</option>
        <option value="" fckLang="DlgLnkProtoOther">&lt;other&gt;</option>
       </select>
      </td>
      <td nowrap="nowrap">&nbsp;</td>
      <td nowrap="nowrap" width="100%%">
       <span fckLang="DlgLnkURL">URL</span><br />
       <input id="txtUrl" style="WIDTH: 100%%" type="text" onkeyup="OnUrlChange();" onchange="OnUrlChange();" />
      </td>
     </tr>
    </table>
    <br />
   </div>
  </div>
 </body>
</html>
''' % locals())
Beispiel #36
0
    # Want a particular category ?
    if opt_category:
        if opt_category.startswith(opt_category_word):
            opt_category = opt_category[len(opt_category_word):]
        category_regex = r"\b%s%s\b" % (opt_category_word, opt_category)
        category_rx = re.compile(category_regex)

    # Ignore categories ?
    if opt_bypages == 2:
        for p in hits:
            pages_hits[p] = set()

    parser = search.QueryParser(regex=1, case=1)  # case-sensitive
    query = parser.parse_query(content_regex)
    results = search.searchPages(macro.request, query)
    is_name_rx = re.compile('^\w+$')
    for hit in results.hits:

        # if particular category specified and not here, do not keep
        # this hit
        if opt_category:
            found = False
            for match in hit.get_matches():
                cat = match.re_match.group()
                if category_rx.match(cat): found = True
            if not found: continue

        # keep this hit and remember to what it belongs
        for match in hit.get_matches():
            if hit.page_name not in hits: continue
Beispiel #37
0
def attachment_dialog(request):
    request.http_headers()
    # list of wiki pages
    name = request.form.get("pagename", [""])[0]
    if name:
        from MoinMoin import search
        # XXX error handling!
        searchresult = search.searchPages(
            request,
            search.QueryParser().parse_query('t:"%s"' % name))
        
        pages = [p.page_name for p in searchresult.hits]
        pages.sort()
        pages[0:0] = [name]
        page_list ='''
         <tr>
          <td colspan=2>
           <select id="sctPagename" size="1" onchange="OnChangePagename(this.value);">
           %s
           </select>
          <td>
         </tr> 
''' % "\n".join(['<option value="%s">%s</option>' % (page, page)
                 for page in pages])
    else:
        page_list = ""
    
    # wiki url
    url_prefix = request.cfg.url_prefix
    scriptname = request.getScriptname()
    if not scriptname or scriptname[-1] != "/":
        scriptname += "/"
    action = scriptname
    request.http_headers()
    request.write('''
<!--
 * FCKeditor - The text editor for internet
 * Copyright (C) 2003-2004 Frederico Caldeira Knabben
 * 
 * Licensed under the terms of the GNU Lesser General Public License:
 *   http://www.opensource.org/licenses/lgpl-license.php
 * 
 * For further information visit:
 *   http://www.fckeditor.net/
 * 
 * File Name: fck_attachment.html
 *  Attachment dialog window.
 * 
 * Version:  2.0 FC (Preview)
 * Modified: 2005-02-18 23:55:22
 * 
 * File Authors:
 *   Frederico Caldeira Knabben ([email protected])
-->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<meta name="robots" content="index,nofollow">
<html>
 <head>
  <title>Attachment Properties</title>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
  <meta name="robots" content="noindex,nofollow" />
  <script src="%(url_prefix)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
  <script src="%(url_prefix)s/applets/moinFCKplugins/moinattachment/fck_attachment.js" type="text/javascript"></script>
  <script src="%(url_prefix)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
 </head>
 <body scroll="no" style="OVERFLOW: hidden">
  <div id="divInfo">
   <div id="divLinkTypeAttachment">
    <table height="100%%" cellSpacing="0" cellPadding="0" width="100%%" border="0">
     <tr>
      <td>
       <form action=%(action)s method="GET">
       <input type="hidden" name="action" value="fckdialog">
       <input type="hidden" name="dialog" value="attachment">
       <table cellSpacing="0" cellPadding="0" align="center" border="0">
        <tr>
         <td>
          <span fckLang="AttachmentDlgName">Attachment Name</span><br>
          <input id="txtAttachmentname" name="pagename" size="30" value="%(name)s">
         </td>
        </tr>
       </table>
       </form> 
      </td>
     </tr>
    </table>
   </div>
  </div> 
 </body>
</html>
''' % locals())
Beispiel #38
0
 def testFullSearchNegativeTerm(self):
     """ search: full search for a AND expression with a negative term """
     helpon_count = len(search.searchPages(self.request, u"HelpOn").hits)
     result = search.searchPages(self.request, u"HelpOn -ACL")
     assert 0 < len(result.hits) < helpon_count
def execute(pagename, request, fieldname='value', titlesearch=0, statistic=0):
    _ = request.getText
    titlesearch = checkTitleSearch(request)
    if titlesearch < 0:
        check_surge_protect(request, kick=True) # get rid of spammer
        return

    if 'metasearch' in request.values: 
        form = MultiDict(request.values)
        form['action'] = 'MetaSearch'
        val = form.get('value', '')
        form['q'] = val
        request.values = CombinedMultiDict([MultiDict(form)])
        return ms_execute(pagename, request)

    advancedsearch = isAdvancedSearch(request)

    form = request.values

    # context is relevant only for full search
    if titlesearch:
        context = 0
    elif advancedsearch:
        context = 180 # XXX: hardcoded context count for advancedsearch
    else:
        context = int(form.get('context', 0))

    # Get other form parameters
    needle = form.get(fieldname, '')
    case = int(form.get('case', 0))
    regex = int(form.get('regex', 0)) # no interface currently
    hitsFrom = int(form.get('from', 0))
    highlight_titles = int(form.get('highlight_titles', 1))
    highlight_pages = int(form.get('highlight_pages', 1))
    mtime = None
    msg = ''
    historysearch = 0

    # if advanced search is enabled we construct our own search query
    if advancedsearch:
        and_terms = form.get('and_terms', '').strip()
        or_terms = form.get('or_terms', '').strip()
        not_terms = form.get('not_terms', '').strip()
        #xor_terms = form.get('xor_terms', '').strip()
        categories = form.getlist('categories') or ['']
        timeframe = form.get('time', '').strip()
        language = form.getlist('language') or ['']
        mimetype = form.getlist('mimetype') or [0]
        excludeunderlay = form.get('excludeunderlay', 0)
        nosystemitems = form.get('nosystemitems', 0)
        historysearch = form.get('historysearch', 0)

        mtime = form.get('mtime', '')
        if mtime:
            mtime_parsed = None

            # get mtime from known date/time formats
            for fmt in (request.user.datetime_fmt,
                    request.cfg.datetime_fmt, request.user.date_fmt,
                    request.cfg.date_fmt):
                try:
                    mtime_parsed = time.strptime(mtime, fmt)
                except ValueError:
                    continue
                else:
                    break

            if mtime_parsed:
                mtime = time.mktime(mtime_parsed)
            else:
                # didn't work, let's try parsedatetime
                cal = Calendar()
                mtime_parsed, parsed_what = cal.parse(mtime)
                # XXX it is unclear if usage of localtime here and in parsedatetime module is correct.
                # time.localtime is the SERVER's local time and of no relevance to the user (being
                # somewhere in the world)
                # mktime is reverse function for localtime, so this maybe fixes it again!?
                if parsed_what > 0 and mtime_parsed <= time.localtime():
                    mtime = time.mktime(mtime_parsed)
                else:
                    mtime_parsed = None # we don't use invalid stuff

            # show info
            if mtime_parsed:
                # XXX mtime_msg is not shown in some cases
                mtime_msg = _("(!) Only pages changed since '''%s''' are being displayed!",
                              wiki=True) % request.user.getFormattedDateTime(mtime)
            else:
                mtime_msg = _('/!\\ The modification date you entered was not '
                        'recognized and is therefore not considered for the '
                        'search results!', wiki=True)
        else:
            mtime_msg = None

        word_re = re.compile(r'(\"[\w\s]+"|\w+)', re.UNICODE)
        needle = ''
        if categories[0]:
            needle += 'category:%s ' % ','.join(categories)
        if language[0]:
            needle += 'language:%s ' % ','.join(language)
        if mimetype[0]:
            needle += 'mimetype:%s ' % ','.join(mimetype)
        if excludeunderlay:
            needle += '-domain:underlay '
        if nosystemitems:
            needle += '-domain:system '
        if and_terms:
            needle += '(%s) ' % and_terms
        if not_terms:
            needle += '(%s) ' % ' '.join(['-%s' % t for t in word_re.findall(not_terms)])
        if or_terms:
            needle += '(%s) ' % ' or '.join(word_re.findall(or_terms))

    # check for sensible search term
    stripped = needle.strip()
    if len(stripped) == 0:
        request.theme.add_msg(_('Please use a more selective search term instead '
                'of {{{"%s"}}}', wiki=True) % wikiutil.escape(needle), "error")
        Page(request, pagename).send_page()
        return
    needle = stripped

    # Setup for type of search
    if titlesearch:
        title = _('Title Search: "%s"')
        sort = 'page_name'
    else:
        if advancedsearch:
            title = _('Advanced Search: "%s"')
        else:
            title = _('Full Text Search: "%s"')
        sort = 'weight'

    # search the pages
    from MoinMoin.search import searchPages, QueryParser, QueryError
    try:
        query = QueryParser(case=case, regex=regex,
                titlesearch=titlesearch).parse_query(needle)
    except QueryError: # catch errors in the search query
        request.theme.add_msg(_('Your search query {{{"%s"}}} is invalid. Please refer to '
                'HelpOnSearching for more information.', wiki=True, percent=True) % wikiutil.escape(needle), "error")
        Page(request, pagename).send_page()
        return

    results = searchPages(request, query, sort, mtime, historysearch)

    # directly show a single hit for title searches
    # this is the "quick jump" functionality if you don't remember
    # the pagename exactly, but just some parts of it
    if titlesearch and len(results.hits) == 1:
        page = results.hits[0]
        if not page.attachment: # we did not find an attachment
            page = Page(request, page.page_name)
            querydict = {}
            if highlight_pages:
                highlight = query.highlight_re()
                if highlight:
                    querydict.update({'highlight': highlight})
            url = page.url(request, querystr=querydict)
            request.http_redirect(url)
            return
    if not results.hits: # no hits?
        f = request.formatter
        querydict = wikiutil.parseQueryString(request.query_string).to_dict()
        querydict.update({'titlesearch': 0})

        request.theme.add_msg(_('Your search query {{{"%s"}}} didn\'t return any results. '
                'Please change some terms and refer to HelpOnSearching for '
                'more information.%s', wiki=True, percent=True) % (wikiutil.escape(needle),
                    titlesearch and ''.join([
                        '<br>',
                        _('(!) Consider performing a', wiki=True), ' ',
                        f.url(1, href=request.page.url(request, querydict, escape=0)),
                        _('full-text search with your search terms'),
                        f.url(0), '.',
                    ]) or ''), "error")
        Page(request, pagename).send_page()
        return

    # This action generates data using the user language
    request.setContentLanguage(request.lang)

    request.theme.send_title(title % needle, pagename=pagename)

    # Start content (important for RTL support)
    request.write(request.formatter.startContent("content"))

    # Hints
    f = request.formatter
    hints = []

    if titlesearch:
        querydict = wikiutil.parseQueryString(request.query_string).to_dict()
        querydict.update({'titlesearch': 0})

        hints.append(''.join([
            _("(!) You're performing a title search that might not include"
                ' all related results of your search query in this wiki. <<BR>>', wiki=True),
            ' ',
            f.url(1, href=request.page.url(request, querydict, escape=0)),
            f.text(_('Click here to perform a full-text search with your '
                'search terms!')),
            f.url(0),
        ]))

    if advancedsearch and mtime_msg:
        hints.append(mtime_msg)

    if hints:
        request.write(searchHints(f, hints))

    # Search stats
    request.write(results.stats(request, request.formatter, hitsFrom))

    # Then search results
    info = not titlesearch
    if context:
        output = results.pageListWithContext(request, request.formatter,
                info=info, context=context, hitsFrom=hitsFrom, hitsInfo=1,
                highlight_titles=highlight_titles,
                highlight_pages=highlight_pages)
    else:
        output = results.pageList(request, request.formatter, info=info,
                hitsFrom=hitsFrom, hitsInfo=1,
                highlight_titles=highlight_titles,
                highlight_pages=highlight_pages)

    request.write(output)

    request.write(request.formatter.endContent())
    request.theme.send_footer(pagename)
    request.theme.send_closing_html()
Beispiel #40
0
 def testFullSearchNegatedFindAll(self):
     """ search: negated full search for some string that does not exist results in all pages """
     result = search.searchPages(self.request, u"-%s" % self.doesnotexist)
     assert len(result.hits) > 100  # XXX should be "all"