コード例 #1
0
ファイル: new_version.py プロジェクト: Bilka2/Wiki-scripts
def main(forum_post_number,
         version,
         api_url='https://wiki.factorio.com/api.php',
         version_nav=True):
    session = requests.Session()
    edit_token = get_edit_token(session, api_url)
    latest_version_page_name = 'Main_Page/Latest_versions'
    version_nav_page_name = 'Template:VersionNav'

    latest_version_page = get_page(session, api_url, latest_version_page_name)
    if version_nav:
        version_nav_page = get_page(session, api_url, version_nav_page_name)

    if version in latest_version_page:
        return f'Version {version} already found on "{latest_version_page_name}". Aborting.'
    if version_nav:
        if version in version_nav_page:
            return f'Version {version} already found on "{version_nav_page_name}". Aborting.'

    if 'None' not in latest_version_page:
        new_latest_version_page = re.sub(
            r'({{Translation\|Latest experimental version}}: \[https:\/\/forums\.factorio\.com\/)\d+ \d\.\d+\.\d+',
            rf'\g<1>{forum_post_number} {version}', latest_version_page)
    else:
        new_latest_version_page = re.sub(
            r'({{Translation\|Latest experimental version}}: ){{Translation\|None}}',
            rf'\g<1>[https://forums.factorio.com/{forum_post_number} {version}]',
            latest_version_page)
    if version_nav:
        new_version_nav_page = re.sub(
            r'(}}\n)(}}\n<noinclude>{{Documentation}}<\/noinclude>)',
            rf'\1* {{{{TransLink|Version history/{version[:version.rfind(".")+1]}0#{version}|{version}}}}}\n\2',
            version_nav_page)

    edit_response_latest_version_page = edit_page(session, api_url, edit_token,
                                                  latest_version_page_name,
                                                  new_latest_version_page,
                                                  f'{version}')
    if version_nav:
        edit_response_version_nav_page = edit_page(session, api_url,
                                                   edit_token,
                                                   version_nav_page_name,
                                                   new_version_nav_page,
                                                   f'{version}')

    return edit_response_latest_version_page.text + (
        ('\n' + edit_response_version_nav_page.text) if version_nav else '')
コード例 #2
0
ファイル: analytics.py プロジェクト: Bilka2/Wiki-scripts
def main():
    session = requests.Session()
    edit_token = get_edit_token(session, api_url)

    with open(os.path.dirname(__file__) + '/' + file_name,
              newline='') as csvfile:
        reader = csv.reader(csvfile)
        rows = list(reader)

    #csv config
    page_data_start = 7
    number_of_pages = 25
    total_views_row = page_data_start + number_of_pages
    wanted_number_of_pages = 21  #has to be < number_of_pages

    content = 'Total number of views in the last week: ' + rows[
        total_views_row][1] + ' (' + rows[total_views_row][
            2] + ' unique)\n{|class=wikitable\n!#\n!Page\n!Number of views (unique)'
    #add together the two main pages ('/' and 'Main_Page')
    main_views = 0
    main_uniques = 0
    for x in range(page_data_start, number_of_pages + page_data_start):
        title = rows[x][0].replace('/', '', 1)
        if title == '':
            main_views = int(rows[x][1].replace(',', ''))
            main_uniques = int(rows[x][2].replace(',', ''))
    for y in range(page_data_start, number_of_pages + page_data_start):
        title = rows[y][0].replace('/', '', 1)
        if title == 'Main_Page':
            rows[y][1] = str(int(rows[y][1].replace(',', '')) + main_views)
            rows[y][2] = str(int(rows[y][2].replace(',', '')) + main_uniques)

    for row in rows[page_data_start:(number_of_pages +
                                     page_data_start)]:  #make sortable
        row[1] = int(row[1].replace(',', ''))
    sorted_rows = sorted(rows[page_data_start:(number_of_pages +
                                               page_data_start)],
                         key=itemgetter(1),
                         reverse=True)

    sorted_rows = sorted_rows[0:wanted_number_of_pages]

    n = 1
    for row in sorted_rows:
        title = row[0].replace('/', '', 1)
        views = str(row[1])
        uniques = row[2].replace(',', '')
        if title == '':
            continue
        content += '\n|-\n|' + str(
            n
        ) + '\n|[[' + title + ']]\n|' + views + ' (' + uniques + ')'  #wikitable row
        n += 1
    content += '\n|}'

    edit_response = edit_page(session, api_url, edit_token, page, content,
                              'Updated top pages')

    return edit_response.text
コード例 #3
0
    def update_infobox(self, file_name, klass):
        with open(
                os.path.dirname(os.path.abspath(__file__)) +
                f'/data/{self.version}/wiki-{file_name}-{self.version}.json',
                'r') as f:
            file = json.load(f)

        session = requests.Session()
        edit_token = get_edit_token(session, self.api_url)

        for name, data in file.items():
            infobox_data = klass(name, data)
            if infobox_data.name in self.no_infobox:  #this is after the class instantiation to make sure we append (research) to things like techs and similar
                continue
            page_name = 'Infobox:' + infobox_data.name
            page = get_page_safe(session, self.api_url, page_name)
            if not page:
                print(
                    f'Page for Infobox:{infobox_data.name} does not exist. Creating....'
                )
                page = '{{Infobox\n}}<noinclude>[[Category:Infobox page]]</noinclude>'
            new_page = page
            summary = ''

            for property in infobox_data.get_all_properties():
                new_page, summary = self.update_property(
                    property, new_page, summary)

            if page != new_page:
                if self.testing:
                    print(new_page + '      ' + summary)
                else:
                    print(
                        edit_page(session, self.api_url, edit_token, page_name,
                                  new_page, summary).text)
            else:
                print(f'{infobox_data.name} was not changed.')
コード例 #4
0
ファイル: redirects.py プロジェクト: Bilka2/Wiki-scripts
def main():
    session = requests.Session()
    edit_token = get_edit_token(session, api_url)

    redirects = get_allpages(session, api_url, apfilterredir='redirects')
    redirects.extend(
        get_allpages(session,
                     api_url,
                     apfilterredir='redirects',
                     apnamespace='6'))

    Redirect.session = session
    redirects = [Redirect(page['title']) for page in redirects]
    redirects.sort()

    content = '{|class=wikitable\n!#\n!Redirect\n!Links to this redirect'
    for i, redirect in enumerate(redirects):
        content += f'\n|-\n|{i+1}{redirect}'
    content += '\n|}'

    edit_response = edit_page(session, api_url, edit_token,
                              'Factorio:Redirects', content,
                              'Updated the list of redirects.')
    return edit_response.text
コード例 #5
0
def main(testing):
    print('Getting wanted pages')

    session = requests.Session()
    edit_token = get_edit_token(session, api_url)

    wanted_pages = get_wantedpages(session, api_url, qpoffset='0')
    wanted_pages.extend(get_wantedpages(session, api_url, qpoffset='5000'))
    #TODO: Automatically check if this is needed:
    #wanted_pages.extend(get_wantedpages(session, api_url, qpoffset = '10000'))
    print('Converting wanted pages')

    WantedPage.stubs = [
        page['title']
        for page in get_categorymembers(session, api_url, 'Category:Stubs')
    ]
    WantedPage.archived = [
        page['title']
        for page in get_categorymembers(session, api_url, 'Category:Archived')
    ]
    WantedPage.disambigs = [
        page['title'] for page in get_categorymembers(
            session, api_url, 'Category:Disambiguations')
    ]
    WantedPage.session = session

    wanted_pages = [
        WantedPage(page['title'], page['value']) for page in wanted_pages
    ]
    print('Sorting wanted pages')
    wanted_pages.sort()

    print('Separating wanted pages by location')
    wanted_pages_by_location = defaultdict(list)
    for page in wanted_pages:
        wanted_pages_by_location[page.location].append(page)

    print('Generating output and editing pages')
    edit_responses = []
    for location, pages in wanted_pages_by_location.items():
        output_array = [f'\n|-\n|{i+1}{page}' for i, page in enumerate(pages)]

        if location in WantedPage.valid_lang_suffixes:
            output = 'Number of wanted pages: ' + str(
                len(output_array)
            ) + '\n{|class=wikitable\n!#\n!Page\n!Links to this page\n!Length of the corresponding English page in bytes' + ''.join(
                output_array) + '\n|}'
        else:
            output = 'Number of wanted pages: ' + str(
                len(output_array)
            ) + '\n{|class=wikitable\n!#\n!Page\n!Links to this page' + ''.join(
                output_array) + '\n|}'

        if testing:
            edit_responses.append(output)
        else:
            edit_responses.append(
                edit_page(session, api_url, edit_token,
                          'Factorio:Wanted pages/' + location, output,
                          'Updated the list of wanted pages').text)

    return edit_responses
コード例 #6
0
ファイル: 017_update.py プロジェクト: Bilka2/Wiki-scripts
def update():
    api_url = 'https://wiki.factorio.com/api.php'
    session = requests.Session()
    edit_token = get_edit_token(session, api_url)
    with open('C:\\Users\\Win 10\\Documents\\Wiki-data\\moves_and_more.json'
              ) as f:
        moves_and_more_data = json.load(f)

    # move that one really special page - need to update the infobox on the page too + archive the page
    for title in moves_and_more_data['special_move']:
        page = get_page(session, api_url, title)
        page = page.replace('{{:Infobox:Wood}}',
                            '{{:Infobox:Wood (archived)}}')
        if 'Infobox' not in title:
            page = '{{archive}}' + page

        print(
            edit_page(session, api_url, edit_token, title, page,
                      'Archived wood (removed in 0.17)').text)

        print(
            move_page(session, api_url, edit_token, title,
                      title.replace('Wood', 'Wood (archived)'),
                      'Archived wood (removed in 0.17)',
                      False).text)  # no redirect

    # archive pages + files = prepend edit {{archive}} onto them
    for title in moves_and_more_data['archive']:
        print(
            edit_page(session, api_url, edit_token, title, '{{archive}}',
                      'Archived page (removed in 0.17)',
                      True).text)  # prepend edit

    # move pages + files - leave redirects - also do infoboxes on the pages
    for move_data in moves_and_more_data['move']:
        if 'Infobox' not in move_data['from'] and 'File' not in move_data[
                'from']:
            page = get_page(session, api_url, move_data['from'])
            from_title_no_lang_suffix = re.search('([^/]+)(\/\S+)?',
                                                  move_data['from']).group(1)
            to_title_no_lang_suffix = re.search('([^/]+)(\/\S+)?',
                                                move_data['to']).group(1)
            page = page.replace(
                '{{:Infobox:' + from_title_no_lang_suffix + '}}',
                '{{:Infobox:' + to_title_no_lang_suffix + '}}')

            print(
                edit_page(session, api_url, edit_token, move_data['from'],
                          page, 'Renamed in 0.17').text)

        print(
            move_page(session, api_url, edit_token, move_data['from'],
                      move_data['to'], 'Renamed in 0.17').text)

    # upload files
    for filename in moves_and_more_data['upload']:
        file = open('C:\\Users\\Win 10\\Documents\\Wiki-data\\icons\\', 'rb')

        print(
            upload_file(session, api_url, edit_token, filename, file,
                        '{{Game image}}').text)

    #create pages
    with open('C:\\Users\\Win 10\\Documents\\Wiki-data\\new_pages.json') as f:
        create_page_data = json.load(f)

    for name, page in create_page_data.items():
        print(
            edit_page(session, api_url, edit_token, name, page,
                      'Added in 0.17').text)

    # infobox update
    InfoboxUpdate([
        InfoboxType.Entity, InfoboxType.Technology, InfoboxType.Item,
        InfoboxType.Recipe, InfoboxType.Prototype
    ], api_url, '0.17.0', False)

    # updating https://wiki.factorio.com/Template:VersionNav
    versionnav = get_page(session, api_url, 'Template:VersionNav')
    versionnav = versionnav.replace(
        '}}\n<noinclude>',
        '|group10 = {{Translation|0.17}}\n|list10 =\n* {{TransLink|Version history/0.17.0#0.17.0|0.17.0}}\n}}\n<noinclude>'
    )
    print(
        edit_page(session, api_url, edit_token, 'Template:VersionNav',
                  versionnav, '0.17').text)

    # updating https://wiki.factorio.com/Main_Page/Latest_versions
    latest_versions = get_page(session, api_url, 'Main_Page/Latest_versions')
    latest_versions = latest_versions.replace(
        '[[File:Space science pack.png|link=]]',
        '[[File:Automation science pack.png|link=]]')
    latest_versions = latest_versions.replace(
        '[[File:Speed module 3.png|link=]]', '[[File:Speed module.png|link=]]')
    latest_versions = latest_versions.replace(
        '{{Translation|The wiki is based on version}} [[Version history/0.16.0|0.16]]',
        '{{Translation|The wiki is based on version}} [[Version history/0.17.0|0.17]]'
    )
    print(
        edit_page(session, api_url, edit_token, 'Main_Page/Latest_versions',
                  latest_versions, 'Experimental 0.17').text)

    # sitenotice https://wiki.factorio.com/MediaWiki:Sitenotice
    sitenotice = "'''This wiki is about [[Tutorial:Cheat_sheet#0.17_change_overview|0.17]], the current [[Install_guide#Downloading_and_installing_experimental_versions|experimental version]] of ''Factorio''.'''\n\nInformation about 0.16, the current stable version of ''Factorio'', can be found on [https://stable.wiki.factorio.com/ stable.wiki.factorio.com]."
    print(
        edit_page(session, api_url, edit_token, 'MediaWiki:Sitenotice',
                  sitenotice, 'Experimental 0.17').text)
コード例 #7
0
ファイル: new_fff.py プロジェクト: Bilka2/Wiki-scripts
def main(api_url='https://wiki.factorio.com/api.php'):
    feed = feedparser.parse('https://www.factorio.com/blog/rss')
    if not ('Friday Facts' in feed.entries[0].title):
        return 'Latest blog post is not FFF. Aborting.'
    title = feed.entries[0].title.replace(
        re.search('Friday Facts #\d+ - ', feed.entries[0].title).group(),
        '')  #without {Friday Facts #241 - } etc
    number = re.search('#\d+',
                       feed.entries[0].title).group().replace('#', '')  #string
    release_time = time.strftime("%b %d",
                                 feed.entries[0].updated_parsed)  #string
    news_line = '* {0} FFF #{1}: [https://www.factorio.com/blog/post/fff-{1} {2}]\n'.format(
        release_time, number, title)  #line that ends up on page

    session = requests.Session()
    edit_token = get_edit_token(session, api_url)

    content = get_page(session, api_url, page_name)

    if news_line in content:
        return 'FFF already found on page. Aborting.'

    sections = turn_page_into_sections(content, 2)

    for section in sections:
        if section['title'] == 'Latest':
            latest_news = section['content']
        elif section['title'] == 'Archive':
            archive_sections = turn_page_into_sections(section['content'], 4)

    #add new fff
    news_list_start = re.search(
        '<div style=\"column-count:2;-moz-column-count:2;-webkit-column-count:2\">\n\* \w\w\w',
        latest_news).end() - 5
    new_news = latest_news[:news_list_start] + news_line + latest_news[
        news_list_start:]

    #remove oldest fff
    old_fff = re.sub(
        '\n</div><includeonly>', '',
        re.search('(\* [^\n]+?)\n</div><includeonly>', new_news).group())
    new_news = new_news.replace(old_fff + '\n', '')

    #add oldest fff to archive_section
    old_month = month_abbr_to_month_name(
        re.search('\* \w\w\w', old_fff).group().replace('* ', ''))
    found_section = False
    for section in archive_sections:
        if section['title'] == old_month:
            found_section = True
            archive_list_start = re.search('=\n\n*', section['content']).end()
            section['content'] = section[
                'content'][:archive_list_start] + old_fff + '\n' + section[
                    'content'][archive_list_start:]

    if not found_section:
        archive_sections[0][
            'content'] += '\n==== ' + old_month + ' ====\n\n' + old_fff + '\n'

    #final content
    archive_joined = ''
    for section in archive_sections:
        archive_joined += section['content']

    for section in sections:
        if section['title'] == 'Latest':
            section['content'] = new_news
        elif section['title'] == 'Archive':
            section['content'] = archive_joined

    end_content = ''
    for section in sections:
        end_content += section['content']

    edit_response = edit_page(session, api_url, edit_token, page_name,
                              end_content, 'New FFF')

    return edit_response.text