Exemplo n.º 1
0
# -*- coding: utf-8 -*-
# !/usr/bin/python
from bs4 import BeautifulSoup
import requests
import mavri

xx = mavri.login('tr.wikipedia', 'Mavrikant')
RAPOR = '{| class="wikitable sortable"\n|-\n! Sayfa !! Damaging !! Reverted'
trwiki = 'https://tr.wikipedia.org'
nextpage = '/w/index.php?title=%C3%96zel:BekleyenDe%C4%9Fi%C5%9Fiklikler&limit=100'
while nextpage != 'DONE':
    soup = BeautifulSoup(requests.get(trwiki + nextpage, cookies=xx.cookies).text, 'html.parser')
    try:
        nextpage = soup.findAll("a", {"class": "mw-nextlink"})[0].get('href')
    except:
        nextpage = 'DONE'

    for line in soup.find("div", {"id": "mw-content-text"}).ul.find_all('li'):
        incele = line.find_all('a')[2].get('href')
        title = line.find_all('a')[0].get('title')
        incele_text = requests.get(trwiki + incele, cookies=xx.cookies).text

        if incele_text.find('diff-multi') == -1:
            diff = incele_text.split('<input id="mw-fr-input-oldid" type="hidden" value="')[1].split('"')[0]
            print title
            # print diff
            damaging = \
                requests.get('http://ores.wmflabs.org/scores/trwiki/damaging/' + str(diff)).json()[str(diff)][
                    'probability'][
                    'true'] * 100
            reverted = \
# -*- coding: utf-8 -*-
# !/usr/bin/python

import re
import socket
import time
from datetime import datetime
import platform

import mavri

wiki = 'tr.wikipedia'
username = '******'
xx = mavri.login(wiki, username)
title = 'Vikipedi:Sürüm gizleme talepleri'
version = 'V3.0g'
summary_ek = " (" + username + ", " + version + " running on " + platform.system() + "), ([[Kullanıcı mesaj:Evrifaessa|hata bildir]])"
section = 1
ignore_list=[]
mpa = dict.fromkeys(range(32))

while 1:
        now = datetime.now()
        content = mavri.content_of_section(wiki, title, section, xx)

        if content != '':
            timestamp = re.findall('\{\{\s*User:Evrifaessa\/SGT\s*\|\s*([^\|\}]*)\s*\|\s*[^\|\}]*\s*\}\}', content)
            informer = re.findall('\{\{\s*User:Evrifaessa\/SGT\s*\|\s*[^\|\}]*\s*\|\s*([^\|\}]*)\s*\}\}', content)
            if timestamp and informer:
                timestamp = timestamp[0]
                informer = informer[0]
Exemplo n.º 3
0
# -*- coding: utf-8 -*-
# !/usr/bin/python

import re
import socket
import time
from datetime import datetime
import platform

import mavri

wiki = 'tr.wikipedia'
xx = mavri.login(wiki, 'KET Bot')
title = 'Vikipedi:Kullanıcı engelleme talepleri'
version = 'V3.0g'
summary_ek = " (KET Bot, " + version + " running on " + platform.system(
) + "), ([[Kullanıcı mesaj:Evrifaessa|hata bildir]])"
section = 1
localignorelist = ["Dr. Coal"]
mpa = dict.fromkeys(range(32))

while 1:
    try:
        datetime_now = datetime.now()
        now = datetime.now()
        content = mavri.content_of_section(wiki, title, section, xx)

        if content != '':
            vandal = re.findall('\{\{\s*[Vv]andal\s*\|\s*([^\}]*)\s*\}\}',
                                content)
Exemplo n.º 4
0
# -*- coding: utf-8 -*-
# !/usr/bin/python

import re

import requests
from bs4 import BeautifulSoup

import mavri

wiki = 'tr.wikipedia'
wikiS = 'trwiki'
xx = mavri.login(wiki, 'Mavrikant Bot')
catNS = requests.get(
        'https://' + wiki + '.org/w/api.php?format=json&utf8=&action=query&meta=siteinfo&siprop=namespaces').json()[
    'query']['namespaces']['14']['*']


def add_category(page):
    if mavri.content_of_page(wiki, page):
        ENpage = mavri.wbgetlanglink(mavri.wikibase_item(wiki, page), 'enwiki')
        if ENpage:
            print ENpage
            ENcat = mavri.categories_on_enwiki(ENpage)
            print ENcat
            cat_to_add = []
            for cat in ENcat:
                ncat = mavri.wbgetlanglink(mavri.wikibase_item('en.wikipedia', 'Category:' + cat), wikiS)
                if ncat != '':
                    cat_to_add.insert(0, ncat)
            print cat_to_add
Exemplo n.º 5
0
# -*- coding: utf-8 -*-
# !/usr/bin/python
from bs4 import BeautifulSoup
import requests
import mavri

xx = mavri.login('tr.wikipedia', 'Evrifaessa')
RAPOR = '{| class="wikitable sortable"\n|-\n! Sayfa !! Damaging !! Goodfaith'
trwiki = 'https://tr.wikipedia.org'
nextpage = '/wiki/%C3%96zel:BekleyenDe%C4%9Fi%C5%9Fiklikler?limit=50'

counter = 0

while nextpage != 'DONE':
    soup = BeautifulSoup(
        requests.get(trwiki + nextpage, cookies=xx.cookies).text,
        'html.parser')
    try:
        nextpage = soup.findAll("a", {"class": "mw-nextlink"})[0].get('href')
    except:
        nextpage = 'DONE'

    for line in soup.find("div", {"id": "mw-content-text"}).ul.find_all('li'):
        incele = line.find_all('a')[2].get('href')
        title = line.find_all('a')[0].get('title')
        incele_text = requests.get(trwiki + incele, cookies=xx.cookies).text

        if incele_text.find('diff-multi') == -1:
            diff = incele_text.split(
                '<input id="mw-fr-input-oldid" type="hidden" value="'
            )[1].split('"')[0]
Exemplo n.º 6
0
# -*- coding: utf-8 -*-
# !/usr/bin/python

import mavri
import time
import re

wiki = 'tr.wikipedia'
xx = mavri.login(wiki, 'KET Bot')
title = 'Vikipedi:Kullanıcı engelleme talepleri'
version = 'V2'
summary_ek = " (WMF-Labs, " + version + ")"
section = 1

while 1:

    content = mavri.content_of_section(wiki, title, section, xx)

    if content != '':
        vandal = re.findall('\{\{\s*[Vv]andal\s*\|\s*([^\}]*)\s*\}\}', content)
        if vandal:
            vandal = vandal[0]
            blocked = mavri.blocked(wiki, vandal)
            if blocked.json()['query']['blocks']:
                by = blocked.json()['query']['blocks'][0]['by']
                reason = blocked.json()['query']['blocks'][0]['reason']
                summary = '[[Özel:Katkılar/' + vandal + '|' + vandal + ']] çıkartıldı. [[Kullanıcı:' + by + '|' + by + ']] - ' + reason + summary_ek
                mavri.section_clear(wiki, title, section, summary, xx)
        else:
            print mavri.section_clear(wiki, title, section, '{{Vandal|XXXX}} içermeyen başlık kaldırıldı.' + summary_ek, xx).text
        section += 1
# -*- coding: utf-8 -*-
# !/usr/bin/python

import re
import socket
import time
from datetime import datetime
import platform

import mavri

wiki = 'tr.wikipedia'
xx = mavri.login(wiki, 'Evrifaessa Bot')
title = 'Vikipedi:Kullanıcı engelleme talepleri'
version = 'V3.0g'
summary_ek = " (Evrifaessa Bot, " + version + " running on " + platform.system(
) + "), ([[Kullanıcı mesaj:Evrifaessa|hata bildir]])"
section = 1
mpa = dict.fromkeys(range(32))

while 1:
    try:
        datetime_now = datetime.now()
        now = datetime.now()
        content = mavri.content_of_section(wiki, title, section, xx)

        if content != '':
            vandal = re.findall('\{\{\s*[Vv]andal\s*\|\s*([^\}]*)\s*\}\}',
                                content)

            if vandal:
Exemplo n.º 8
0
# -*- coding: utf-8 -*-
# !/usr/bin/python

import random
import re

import requests

import mavri

wiki = 'tr.wikipedia'
wikiS = 'trwiki'
xx = mavri.login(wiki, 'Mavrikant Bot')
catNS = requests.get(
    'https://' + wiki +
    '.org/w/api.php?format=json&utf8=&action=query&meta=siteinfo&siprop=namespaces'
).json()['query']['namespaces']['14']['*']


def add_category(page):
    print page
    content = mavri.content_of_page(wiki, page)
    if content:
        if re.findall(r'\[\[\s?' + catNS + '[^\]]*\]\]', content) != []:
            content = re.sub(r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}\s?\n?', '',
                             content)
            content = re.sub(r'\{\{\s?[Uu]ncategorized[^\}]*\}\}\s?\n?', '',
                             content)
            return mavri.change_page(wiki, page, content,
                                     '- Kategorisiz Şablonu', xx)
Exemplo n.º 9
0
# -*- coding: utf-8 -*-
# !/usr/bin/python
from bs4 import BeautifulSoup
import requests
import mavri
import re

xx= mavri.login('www.wikidata','Mavrikant')

wiki='https://tr.wikipedia.org'
template='Şablon:TFF futbolcu'
value= r'\{\{TFF futbolcu\|(\d*)\}\}'
property= 'P2448'

ticontinue = ''
while ticontinue != 'DONE':
    allpages= requests.get(wiki+'/w/api.php?action=query&utf8&format=json&tiprop=title&titles='+template+'&prop=transcludedin&tilimit=500&ticontinue='+str(ticontinue))
    try:
        ticontinue =allpages.json()['continue']['ticontinue']
    except:
        ticontinue = 'DONE'

    for page in allpages.json()['query']['pages'].itervalues().next()['transcludedin']:

        title = page['title']
        content = mavri.content_of_page('tr.wikipedia',title)
        id = re.findall(value, content)[0]
        entity= mavri.wikibase_item('tr.wikipedia',title)

        if  mavri.wbgetclaims(entity, property).text == '{"claims":{}}':
            mavri.wbcreateclaim(entity,property,'value', str(id), xx)