Example #1
0
def run(send_stats=False):
    page = get_wiki_save_page()
    try:
        wikitext = page.get()
    except pywikibot.NoPage:
        pywikibot.output("%s not found." % page.aslink())
        wikitext = '[[%s:%s]]\n' % (pywikibot.getSite().namespace(14),
                                    pywikibot.translate(
                                        pywikibot.getSite(), reports_cat))
    final_summary = u''
    output_files = list()
    for f, section, summary in output_files_gen():
        pywikibot.output('File: \'%s\'\nSection: %s\n' % (f, section))
        output_data = read_output_file(f)
        output_files.append(f)
        entries = re.findall('=== (.*?) ===', output_data)
        if not entries:
            continue
        if append_date_to_entries:
            dt = time.strftime('%d-%m-%Y %H:%M', time.localtime())
            output_data = re.sub("(?m)^(=== \[\[.*?\]\] ===\n)",
                                 r"\1{{botdate|%s}}\n" % dt, output_data)
        m = re.search('(?m)^==\s*%s\s*==' % section, wikitext)
        if m:
            m_end = re.search(separatorC, wikitext[m.end():])
            if m_end:
                wikitext = wikitext[:m_end.start() + m.end(
                )] + output_data + wikitext[m_end.start() + m.end():]
            else:
                wikitext += '\n' + output_data
        else:
            wikitext += '\n' + output_data
        if final_summary:
            final_summary += ' '
        final_summary += u'%s: %s' % (summary, ', '.join(entries))

    if final_summary:
        pywikibot.output(final_summary + '\n')

        # if a page in 'Image' or 'Category' namespace is checked then fix
        # title section by adding ':' in order to avoid wiki code effects.
        wikitext = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Image', 6),
                          ur'=== [[:\1:', wikitext)
        wikitext = re.sub(
            u'(?i)=== \[\[%s:' % join_family_data('Category', 14),
            ur'=== [[:\1:', wikitext)

        # TODO:
        # List of frequent rejected address to improve upload process.
        wikitext = re.sub('http://(.*?)((forumcommunity|forumfree).net)',
                          r'<blacklist>\1\2', wikitext)

        if len(final_summary) >= 200:
            final_summary = final_summary[:200]
            final_summary = final_summary[:final_summary.rindex("[") -
                                          3] + "..."

        try:
            put(page, wikitext, comment=final_summary)
            for f in output_files:
                os.remove(f + '_pending')
                pywikibot.output("\'%s\' deleted." % f)
        except pywikibot.PageNotSaved:
            raise

        if append_date_to_entries:
            set_template(name='botdate')
        if '{{botbox' in wikitext:
            set_template(name='botbox')

    if send_stats:
        put_stats()
Example #2
0
def run(send_stats=False):
    page = get_wiki_save_page()
    try:
        wikitext = page.get()
    except pywikibot.NoPage:
        pywikibot.output("%s not found." % page.aslink())
        wikitext = '[[%s:%s]]\n' % (pywikibot.getSite().namespace(14),
                                    pywikibot.translate(pywikibot.getSite(),
                                                        reports_cat))
    final_summary = u''
    output_files = list()
    for f, section, summary in output_files_gen():
        pywikibot.output('File: \'%s\'\nSection: %s\n' % (f, section))
        output_data = read_output_file(f)
        output_files.append(f)
        entries = re.findall('=== (.*?) ===', output_data)
        if not entries:
            continue
        if append_date_to_entries:
            dt = time.strftime('%d-%m-%Y %H:%M', time.localtime())
            output_data = re.sub("(?m)^(=== \[\[.*?\]\] ===\n)",
                                 r"\1{{botdate|%s}}\n" % dt, output_data)
        m = re.search('(?m)^==\s*%s\s*==' % section, wikitext)
        if m:
            m_end = re.search(separatorC, wikitext[m.end():])
            if m_end:
                wikitext = (wikitext[:m_end.start() + m.end()] +
                            output_data + wikitext[m_end.start() + m.end():])
            else:
                wikitext += '\n' + output_data
        else:
            wikitext += '\n' + output_data
        if final_summary:
            final_summary += ' '
        final_summary += u'%s: %s' % (summary, ', '.join(entries))

    if final_summary:
        pywikibot.output(final_summary + '\n')

        # if a page in 'Image' or 'Category' namespace is checked then fix
        # title section by adding ':' in order to avoid wiki code effects.
        wikitext = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Image', 6),
                          ur'=== [[:\1:', wikitext)
        wikitext = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Category', 14),
                          ur'=== [[:\1:', wikitext)

        # TODO:
        # List of frequent rejected address to improve upload process.
        wikitext = re.sub('http://(.*?)((forumcommunity|forumfree).net)',
                          r'<blacklist>\1\2', wikitext)

        if len(final_summary) >= 200:
            final_summary = final_summary[:200]
            final_summary = final_summary[
                :final_summary.rindex("[") - 3] + "..."

        try:
            put(page, wikitext, comment=final_summary)
            for f in output_files:
                os.remove(f + '_pending')
                pywikibot.output("\'%s\' deleted." % f)
        except pywikibot.PageNotSaved:
            raise

        if append_date_to_entries:
            set_template(name='botdate')
        if '{{botbox' in wikitext:
            set_template(name='botbox')

    if send_stats:
        put_stats()
Example #3
0
            stop = len(data)

        exist = True
        if page_exist(title):
            # check {{botbox}}
            revid = re.search("{{(?:/box|botbox)\|.*?\|(.*?)\|",
                              data[head.end():stop])
            if revid:
                if not revid_exist(revid.group(1)):
                    exist = False
        else:
           exist = False

        if exist:
            ctitle = re.sub(u'(?i)=== \[\[%s:'
                            % join_family_data('Image', 6),
                            ur'=== [[:\1:', title)
            ctitle = re.sub(u'(?i)=== \[\[%s:'
                            % join_family_data('Category', 14),
                            ur'=== [[:\1:', ctitle)
            output += "=== [[" + ctitle + "]]" + data[head.end():stop]
        else:
            comment_entry.append("[[%s]]" % title)

        if add_separator:
            output += data[next_head.start():next_head.end()] + '\n'
            add_separator = False

    add_comment = u'%s: %s' % (pywikibot.translate(pywikibot.getSite(),
                                                   summary_msg),
                               ", ".join(comment_entry))
Example #4
0
            stop = next_head.start()
        else:
            stop = len(data)

        exist = True
        if page_exist(title):
            # check {{botbox}}
            revid = re.search("{{(?:/box|botbox)\|.*?\|(.*?)\|", data[head.end():stop])
            if revid:
                if not revid_exist(revid.group(1)):
                    exist = False
        else:
           exist = False

        if exist:
            ctitle = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Image', 6), ur'=== [[:\1:', title)
            ctitle = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Category', 14), ur'=== [[:\1:', ctitle)
            output += "=== [[" + ctitle + "]]" + data[head.end():stop]
        else:
            comment_entry.append("[[%s]]" % title)

        if add_separator:
            output += data[next_head.start():next_head.end()] + '\n'
            add_separator = False

    add_comment = u'%s: %s' % (wikipedia.translate(wikipedia.getSite(), summary_msg),", ".join(comment_entry))

    # remove useless newlines
    output = re.sub("(?m)^\n", "", output)

    if comment_entry:
Example #5
0
        else:
            stop = len(data)

        exist = True
        if page_exist(title):
            # check {{botbox}}
            revid = re.search("{{(?:/box|botbox)\|.*?\|(.*?)\|",
                              data[head.end():stop])
            if revid:
                if not revid_exist(revid.group(1)):
                    exist = False
        else:
            exist = False

        if exist:
            ctitle = re.sub(u'(?i)=== \[\[%s:' % join_family_data('Image', 6),
                            ur'=== [[:\1:', title)
            ctitle = re.sub(
                u'(?i)=== \[\[%s:' % join_family_data('Category', 14),
                ur'=== [[:\1:', ctitle)
            output += "=== [[" + ctitle + "]]" + data[head.end():stop]
        else:
            comment_entry.append("[[%s]]" % title)

        if add_separator:
            output += data[next_head.start():next_head.end()] + '\n'
            add_separator = False

    add_comment = u'%s: %s' % (pywikibot.translate(
        pywikibot.getSite(), summary_msg), ", ".join(comment_entry))
Example #6
0
def run(send_stats=False):
    page = get_wiki_save_page()

    try:
        wikitext = page.get()
    except wikipedia.NoPage:
        wikipedia.output("%s not found." % page.aslink())
        wikitext = "[[%s:%s]]\n" % (
            wikipedia.getSite().namespace(14),
            wikipedia.translate(wikipedia.getSite(), reports_cat),
        )

    final_summary = u""
    output_files = list()

    for f, section, summary in output_files_gen():
        wikipedia.output("File: '%s'\nSection: %s\n" % (f, section))

        output_data = read_output_file(f)
        output_files.append(f)

        entries = re.findall("=== (.*?) ===", output_data)

        if not entries:
            continue

        if append_date_to_entries:
            dt = time.strftime("%d-%m-%Y %H:%M", time.localtime())
            output_data = re.sub("(?m)^(=== \[\[.*?\]\] ===\n)", r"\1{{botdate|%s}}\n" % dt, output_data)

        m = re.search("(?m)^==\s*%s\s*==" % section, wikitext)
        if m:
            m_end = re.search(separatorC, wikitext[m.end() :])
            if m_end:
                wikitext = wikitext[: m_end.start() + m.end()] + output_data + wikitext[m_end.start() + m.end() :]
            else:
                wikitext += "\n" + output_data
        else:
            wikitext += "\n" + output_data

        if final_summary:
            final_summary += " "
        final_summary += u"%s: %s" % (summary, ", ".join(entries))

    if final_summary:
        wikipedia.output(final_summary + "\n")

        # if a page in 'Image' or 'Category' namespace is checked then fix
        # title section by adding ':' in order to avoid wiki code effects.

        wikitext = re.sub(u"(?i)=== \[\[%s:" % join_family_data("Image", 6), ur"=== [[:\1:", wikitext)
        wikitext = re.sub(u"(?i)=== \[\[%s:" % join_family_data("Category", 14), ur"=== [[:\1:", wikitext)

        # TODO:
        # List of frequent rejected address to improve upload process.

        wikitext = re.sub("http://(.*?)((forumcommunity|forumfree).net)", r"<blacklist>\1\2", wikitext)

        if len(final_summary) >= 200:
            final_summary = final_summary[:200]
            final_summary = final_summary[: final_summary.rindex("[") - 3] + "..."

        try:
            put(page, wikitext, comment=final_summary)
            for f in output_files:
                os.remove(f + "_pending")
                wikipedia.output("'%s' deleted." % f)
        except wikipedia.PageNotSaved:
            raise

        if append_date_to_entries:
            set_template(name="botdate")
        if "{{botbox" in wikitext:
            set_template(name="botbox")

    if send_stats:
        put_stats()
            stop = len(data)

        exist = True
        if page_exist(title):
            # check {{botbox}}
            revid = re.search("{{(?:/box|botbox)\|.*?\|(.*?)\|",
                              data[head.end():stop])
            if revid:
                if not revid_exist(revid.group(1)):
                    exist = False
        else:
           exist = False

        if exist:
            ctitle = re.sub(u'(?i)=== \[\[%s:'
                            % join_family_data('Image', 6),
                            ur'=== [[:\1:', title)
            ctitle = re.sub(u'(?i)=== \[\[%s:'
                            % join_family_data('Category', 14),
                            ur'=== [[:\1:', ctitle)
            output += "=== [[" + ctitle + "]]" + data[head.end():stop]
        else:
            comment_entry.append("[[%s]]" % title)

        if add_separator:
            output += data[next_head.start():next_head.end()] + '\n'
            add_separator = False

    add_comment = u'%s: %s' % (pywikibot.translate(pywikibot.getSite(),
                                                   summary_msg),
                               ", ".join(comment_entry))