예제 #1
0
def matrix(wiki):
    params = {
        'action': 'sitematrix',
        'format': 'json',
    }
    req = wikitools.api.APIRequest(wiki, params)
    data = req.query()
    for val in data['sitematrix']:
        if val.isdigit():
            for site in data['sitematrix'][val]:
                yield wikitools.Wiki(site['url'])
        elif val == 'specials':
            for site in data['sitematrix'][val]:
                if not 'private' in site:  # Let someone else touch these...
                    yield wikitools.Wiki(site['url'])
예제 #2
0
def main():
    """Script that tests some API calls and workflows used by the UploadWizard.
        Example:
            $ python upload-wizard_tests.py --username some_username --password secret_password
    """

    # Global varibles that are going to be used by the tests.
    global wiki
    global verbosity
    global generate_new_image

    # Parse line arguments
    parser = argparse.ArgumentParser(description="Upload Wizard API smoke tests.")
    parser.add_argument("--api_url", default="https://commons.wikimedia.org/w/api.php",
                        help="URL of wiki API, such as http://example.org/w/api.php")
    parser.add_argument("--username", help="Username for API calls. You can also set MEDIAWIKI_USER")
    parser.add_argument("--password",
                        help="Password for API calls. You can also set MEDIAWIKI_PASSWORD " +
                        "or MEDIAWIKI_PASSWORD_VARIABLE (points to env var with password value)")
    parser.add_argument("-v", "--verbose", type=int, default=0, help="Increase output verbosity")
    parser.add_argument("--gen_new_image", action="store_true", help="Create a new image with current timestamp")
    args = parser.parse_args()

    username = args.username or os.getenv("MEDIAWIKI_USER")
    password = args.password or os.getenv("MEDIAWIKI_PASSWORD") or os.getenv(os.getenv("MEDIAWIKI_PASSWORD_VARIABLE"))

    if username is None or password is None:
        sys.stderr.write(
            "error: username and password required. Pass these values with the corresponding flags or set " +
            "the env variables: MEDIAWIKI_USER and MEDIAWIKI_PASSWORD or " +
            "MEDIAWIKI_PASSWORD_VARIABLE (points to env var with password value)\n")
        exit(1)

    # Create wikitools object
    wiki = wikitools.Wiki(args.api_url)
    generate_new_image = args.gen_new_image
    verbosity = args.verbose

    # Log in user
    wiki.login(username, password)

    if not wiki.isLoggedIn():
        sys.stderr.write("Wrong credentials, please try again.\n")
        exit(1)

    # Switch to directory of script
    abspath = os.path.abspath(__file__)
    dname = os.path.dirname(abspath)
    os.chdir(dname)

    # Run tests
    suite = unittest.TestLoader().loadTestsFromTestCase(TestUploadWizardAPICalls)
    unittest.TextTestRunner(verbosity=verbosity).run(suite)

    # Log out user
    wiki.logout()
예제 #3
0
    def queryApi(self, apiurl, query):
        """
        This function queries the API by running query on apiurl and outputs
        the result in JSON format.

        - apiurl (string): The URL to the API's base.
        - query (dict): A dictionary of API parameters.

        Returns: Dict with the API results.

        TODO: The API query should be reimplemented here so that we do not have
        the wikitools library requirement.
        """
        Wiki = wikitools.Wiki(apiurl)
        if type(query) != dict:
            raise TypeError('Query parameter should be type dict'
                            ', got %s instead' % (type(query)))
        else:
            API = wikitools.APIRequest(Wiki, query)
            return API.query(querycontinue=False)
예제 #4
0
    def __init__(self,
                 title,
                 namespace=False,
                 type="page",
                 action=False):  #should be more agnostic about namespace param
        """
		Instantiate basic variables for the category you're interested in.
		"""
        self.cat_title = "Category:" + title
        self.supercat = "Category:IdeaLab/Ideas/Active"
        self.mem_type = type
        if action:
            self.action = action
# 		print self.type
        if namespace:
            self.mem_namespace = namespace
        else:
            self.mem_namespace = ""
        self.wiki = wikitools.Wiki(grantsbot_settings.apiurl)
        self.wiki.login(grantsbot_settings.username,
                        grantsbot_settings.password)
예제 #5
0
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import MySQLdb
import urllib2
import wikitools
import settings
from BeautifulSoup import BeautifulStoneSoup as bss
from BeautifulSoup import BeautifulSoup as bs

wiki = wikitools.Wiki(settings.apiurl)
wiki.login(settings.username, settings.password)

conn = MySQLdb.connect(host='db67.pmtpa.wmnet',
                       db='jmorgan',
                       read_default_file='~/.my.cnf',
                       use_unicode=1,
                       charset="utf8")
cursor = conn.cursor()

##global variables and output templates
securl = u'http://en.wikipedia.org/w/api.php?action=parse&page=Wikipedia%3ATeahouse%2FHost+landing&prop=sections&format=xml'

#the page where inactive host profiles are displayed
host_breakroom = 'Teahouse/Host_breakroom'
FROM wb_terms
WHERE term_entity_id=?
AND term_entity_type="property"
AND term_language="en"
AND term_type="label"
"""
base = 'Wikidata:Database reports/Property inheritance'

header = "A list of pages which have [[Property:{0}|{0}]], \
but not [[Property:{1}|{1}]]. Limited to the first 100 results. \
Data as of <onlyinclude>{2}</onlyinclude>.\n"

config = ConfigParser.ConfigParser()
config.read([os.path.expanduser('~/.dbreps.ini')])

wiki = wikitools.Wiki('http://www.wikidata.org/w/api.php')
wiki.login(config.get('dbreps', 'username'), config.get('dbreps', 'password'))


def get_label(db, pid):
    cursor = db.cursor()
    id = int(pid.replace('P', ''))
    cursor.execute(label_query, (id, ))
    answer = cursor.fetchone()[0]
    return answer


def replag(db):
    cursor = db.cursor()
    cursor.execute(
        'SELECT UNIX_TIMESTAMP() - UNIX_TIMESTAMP(rc_timestamp) FROM recentchanges ORDER BY rc_timestamp DESC LIMIT 1;'
예제 #7
0
                          'rootpage') + 'Orphaned article deletion discussions'

report_template = u'''
Subpages of [[Wikipedia:Articles for deletion]] that have no incoming links; \
data as of <onlyinclude>%s</onlyinclude>.

{| class="wikitable sortable plainlinks" style="width:100%%; margin:auto;"
|- style="white-space:nowrap;"
! No.
! Page
|-
%s
|}
'''

wiki = wikitools.Wiki(config.get('dbreps', 'apiurl'))
wiki.login(config.get('dbreps', 'username'), config.get('dbreps', 'password'))

conn = MySQLdb.connect(host=config.get('dbreps', 'host'),
                       db=config.get('dbreps', 'dbname'),
                       read_default_file='~/.my.cnf')
cursor = conn.cursor()
cursor.execute('''
/* orphanedafds.py SLOW_OK */
SELECT
  page_title
FROM page
LEFT JOIN pagelinks
ON pl_title = page_title
AND pl_namespace = page_namespace
LEFT JOIN templatelinks
예제 #8
0
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import MySQLdb
import wikitools
import grantsbot_settings
from datetime import datetime
import templates

wiki = wikitools.Wiki(grantsbot_settings.apiurl)
wiki.login(grantsbot_settings.username, grantsbot_settings.password)
conn = MySQLdb.connect(host=grantsbot_settings.host,
                       db=grantsbot_settings.dbname,
                       read_default_file=grantsbot_settings.defaultcnf,
                       use_unicode=1,
                       charset="utf8")
cursor = conn.cursor()

##GLOBAL VARIABLES##
curtime = str(datetime.utcnow())
page_namespace = "User_talk:"

# lists to track who needs a reminder
recipients = []
예제 #9
0
wikidataCodes = set()

#Loop through nations
for rec in db:

    #-1 means that no entry existed the last time we checked
    #0 probably means that we haven't checked if there is an entry yet
    if (rec["WIKIDATA"] > 0):
        qid = Qid(rec["WIKIDATA"])
        wikidataCodes.add(qid.get())

print "Found %d wikidata codes" % len(wikidataCodes)

# Set up WikiData
print "Connecting to WikiData"
site = wikitools.Wiki("http://www.wikidata.org/w/api.php", settings.login.user,
                      settings.login.password)
site.login(settings.login.user, settings.login.password)
print "Logged in?",
print site.isLoggedIn()

#Store qid's for second round of label lookups
secondLookup = {}
for pkey, (pid, abbr) in settings.entityProperties.items():
    secondLookup[pkey] = set()

#Store internal image id's
imageIDs = {}
fileId = 0
#Store image names
imagesFound = {}
for pkey, (pid, abbr) in settings.imageProperties.items():
예제 #10
0
    table_row = u"""
|-
| %s
| [https://gerrit.wikimedia.org/r/#/q/{{urlencode:owner:"%s" status:open}},n,z %s]
| [https://gerrit.wikimedia.org/r/#/q/{{urlencode:owner:"%s" project:^mediawiki/.+ status:open}},n,z %s]
| [https://gerrit.wikimedia.org/r/#/q/{{urlencode:owner:"%s" project:mediawiki/core status:open}},n,z %s]
| [https://gerrit.wikimedia.org/r/#/q/{{urlencode:owner:"%s" status:open label:Code-Review>=0}},n,z %s]
""".strip() % (row[0], row[0], row[1], row[0], row[2], row[0], row[3], row[0],
               row[4])
    output.append(table_row)
    open_total += int(row[1])
    open_mediawiki += int(row[2])
    open_core += int(row[3])
    open_unreviewed += int(row[4])

wiki = wikitools.Wiki(config.get('gerrit-reports', 'wiki_api_url'))
wiki.login(config.get('gerrit-reports', 'wiki_username'),
           config.get('gerrit-reports', 'wiki_password'))

report = wikitools.Page(wiki, report_title)
report_text = report_template % (
    config.get('gerrit-reports', 'wiki_header_template'), '\n'.join(output),
    open_total, open_mediawiki, open_core, open_unreviewed,
    config.get('gerrit-reports', 'wiki_footer_template'))
report_text = report_text.encode('utf-8')
report.edit(report_text,
            summary=config.get('gerrit-reports', 'wiki_edit_summary'),
            bot=1)

cursor.close()
conn.close()
예제 #11
0
import BeautifulSoup as bs
from BeautifulSoup import BeautifulStoneSoup
import urllib2
import wikitools
import re
import MySQLdb
import hb_config

report_title = hb_config.rootpage + '/Questions-recent/%i'

report_template = '''%s

<!-- Fill in the "section" parameter with the question title from the Q&A page -->
{{Wikipedia:Teahouse/Questions-answer|section=%s}}
'''
wiki = wikitools.Wiki(hb_config.apiurl)
wiki.login(hb_config.username, hb_config.password)
conn = MySQLdb.connect(host = hb_config.host, db = hb_config.dbname, read_default_file = hb_config.defaultcnf, use_unicode=1, charset="utf8")
cursor = conn.cursor()

cursor.execute('''
select * from enwiki_p.revision
	where rev_page = 34745517
	and rev_comment like "%*/ new section" and rev_timestamp > DATE_FORMAT(DATE_SUB(NOW(),INTERVAL 2 DAY),'%Y%m%d%H%i%s') order by rand() limit 5;
	''')

page = 1

row = cursor.fetchone()
while 1:
	if not row:
예제 #12
0
def main():
    meta = wikitools.Wiki('https://meta.wikimedia.org/w/api.php')
    for wiki in matrix(meta):
        do_wiki(wiki)
예제 #13
0
! Lp.
! Wikiprojekt
! Edycje (z wyłączeniem stron dyskusji)
! Edycje (włącznie ze stronami dyskusji)
! Edycje (z wyłączeniem stron dyskusji, bez botów)
! Edycje (włącznie ze stronami dyskusji, bez botów)
|-
%s
|}

[[Kategoria:Wikiprojekty]]

[[en:Wikipedia:Database reports/WikiProjects by changes]]
'''

wiki = wikitools.Wiki('http://pl.wikipedia.org/w/api.php')
wiki.login(config.get('dbreps', 'username'), config.get('dbreps', 'password'))

conn = MySQLdb.connect(host='plwiki-p.rrdb.toolserver.org',
                       db='plwiki_p',
                       read_default_file='~/.my.cnf')
cursor = conn.cursor()
cursor.execute('''
/* pl_project_changes.py */
SELECT SUBSTRING_INDEX(page_title, '/', 1) AS project,
       SUM((
         SELECT COUNT(*)
         FROM revision
         WHERE page_id = rev_page
         AND page_namespace = 102
         AND DATEDIFF(NOW(), rev_timestamp) <= 365
예제 #14
0
from email.header import Header
import htmlentitydefs
import urllib
import re
import smtplib
import textwrap
import traceback

import BeautifulSoup
import wikitools

import config

# Establish a few wikis
metawiki_base = 'https://meta.wikimedia.org'
metawiki = wikitools.Wiki(metawiki_base + '/w/api.php')
metawiki.setMaxlag(-1)
enwiki_base = 'https://en.wikipedia.org'
enwiki = wikitools.Wiki(enwiki_base + '/w/api.php')
enwiki.setMaxlag(-1)
enwikt_base = 'https://en.wiktionary.org'
enwikt = wikitools.Wiki(enwikt_base + '/w/api.php')
enwikt.setMaxlag(-1)
enquote_base = 'https://en.wikiquote.org'
enquote = wikitools.Wiki(enquote_base + '/w/api.php')
enquote.setMaxlag(-1)

# Figure out the date
date = datetime.date.today()
year = date.year
day = date.day