def setup_connection(host, user=None, password=None): """ Setup mwclient connection to wiki """ site = mwclient.Site(host, path='/') if user is not None: site.login(user, password) return site
def __init__(self, url, user, pw, tempdir, prevent_save): self._url = url self._site = mwclient.Site(url, path='/') self._user = user self._pw = pw self._tempdir = tempdir self._loggedin = False self._prevent_save = prevent_save
def __init__(self, user=WPPRY_USER, pw=WPPRY_PW, db=WPPRY_DB, host=WPPRY_HOST, port=WPPRY_PORT): self.db = wpp_db2(user=user, pw=pw, db=db, host=host, port=port) self.mw = mwclient.Site("en.wikipedia.org")
def test_force_http(self): # Setting http should work self.httpShouldReturn(self.metaResponseAsJson(), scheme='http') site = mwclient.Site(('http', 'test.wikipedia.org')) assert len(responses.calls) == 1
def login(self, host='en.wikipedia.org', path='/w/', username='******', password='******'): self.site = mwclient.Site(host, path) self.site.login(username, password) print('Logged in to ' + host + path + ' as ' + username + '...')
async def get_the_snpedia_text(snp_id): #site.pages['Rs1002765'].text() site = mwclient.Site(('https', 'bots.snpedia.com'), path='/') txt = site.pages[snp_id].text() #print(txt) wfile = open('./SNPedia/{}.mw'.format(snp_id),'w') wfile.write(txt) wfile.close()
def test_basic_request(self): self.httpShouldReturn(self.metaResponseAsJson()) site = mwclient.Site('test.wikipedia.org') assert 'action=query' in responses.calls[0].request.url assert 'meta=siteinfo%7Cuserinfo' in responses.calls[0].request.url
def test_oauth(self): self.httpShouldReturn(self.metaResponseAsJson()) site = mwclient.Site('test.wikipedia.org', consumer_token='a', consumer_secret='b', access_token='c', access_secret='d') assert isinstance(site.connection.auth, OAuth1)
def __init__(self, host, username, password, **kwargs): conn_args = dict(host=host) if 'path' in kwargs: conn_args['path'] = kwargs['path'] self.connection = mwclient.Site(**conn_args) self.connection.login(username, password)
def test_httpauth_defaults_to_basic_auth(self): self.httpShouldReturn(self.metaResponseAsJson()) site = mwclient.Site('test.wikipedia.org', httpauth=('me', 'verysecret')) assert isinstance(site.connection.auth, requests.auth.HTTPBasicAuth)
def test_user_agent_is_sent(self): # User specified user agent should be sent sent to server self.httpShouldReturn(self.metaResponseAsJson()) site = mwclient.Site('test.wikipedia.org', clients_useragent='MyFabulousClient') assert 'MyFabulousClient' in responses.calls[0].request.headers['user-agent']
def __init__(self, stations: Sequence[List[str]], provinces: Sequence): 'Connect to Chinese Wikipedia.' self.site = mwclient.Site('zh.wikipedia.org') self.template = self.site.pages['T:Infobox China railway station'] self.stations = stations self.names = {s[1]: index for index, s in enumerate(stations)} self.provinces = provinces
def __init__(self, wiki): self.db = MySQLdb.connect(host=wiki + 'wiki.labsdb', user=credentials['user'], passwd=credentials['pass'], db=wiki + 'wiki_p') self.site = mwclient.Site(wiki + '.wikipedia.org') self.site.login(cttbot['user'], cttbot['pass']) self.rep = Reports(self.site, self.db, wiki)
def main(): commons = mwclient.Site('commons.wikimedia.org') commons.login(username=bot_config.USER, password=bot_config.PASS) first = 442 last = 442 for i in range(first, last + 1): id_number = "53Fi" + str(i) upload(commons, id_number)
def test_custom_headers_are_sent(self): # Custom headers should be sent to the server self.httpShouldReturn(self.metaResponseAsJson()) site = mwclient.Site('test.wikipedia.org', custom_headers={'X-Wikimedia-Debug': 'host=mw1099.eqiad.wmnet; log'}) assert 'host=mw1099.eqiad.wmnet; log' in responses.calls[0].request.headers['X-Wikimedia-Debug']
def maybe_edit_wiki(pageText): site = mwclient.Site('wiki.archiveteam.org', path='/') page = site.Pages['CurrentWarriorProject'] if page.text() != pageText: site.login( os.environ['ATWIKIBOT_USERNAME'], os.environ['ATWIKIBOT_PASSWORD']) # Only log in when necessary page.save(pageText)
def get_sample(): site = mw.Site('rosettacode.org', path='/mw/') pages = list(site.Pages['Category:Programming Tasks']) sample = random.sample(pages, 100) for page in sample: r.sadd('samples', page.page_title)
def __init__(self, user="******", pw="wppry", db="wppry", host="127.0.0.1", port=3306): self.db = wpp_db(user=user, pw=pw, db=db, host=host, port=port) self.mw = mwclient.Site("en.wikipedia.org")
def main(bot, trigger, options): cont = 0 if len(options) == 2: wiki = options[0] status = options[1] host = trigger.host host = host.split('/') cont = 1 elif len(options) > 2: wiki = options[0] host = trigger.host host = host.split('/') x = 1 status = '' while x < len(options): status = status + options[x] x = x + 1 cont = 1 else: bot.say(trigger.nick + ": Syntax: .mh wikicode status", trigger.sender) cont = 0 if cont == 1: cloakfile = open( '/data/project/zppixbot/.sopel/modules/config/cloaks.csv', 'r') for line in cloakfile: auth = line.split(',') if host[0] == auth[0]: user = host[1] sulgroup = auth[1] wiki = [wiki, sulgroup] request = [user, status] cont = 1 else: bot.say( trigger.nick + ":This service is only avaiable to users with a Miraheze/Wikimedia Cloaks. " + "See phabricator.wikimedia.org/T234716 for updates.", trigger.sender) cont = 0 if cont == 1: wikiurl = 'example.org' file = open( '/data/project/zppixbot/.sopel/modules/config/statuswikis.csv', 'r') for line in file: data = line.split(',') if data[1] == wiki[0] and wiki[1] == data[2]: wikiurl = data[0] site = mwclient.Site(('https', wikiurl), '/w/') config = configparser.RawConfigParser() config.read('/data/project/zppixbot/.sopel/credentials.txt') try: site.login(config.get('zppixbot_status', 'username'), config.get('zppixbot_status', 'password')) except errors.LoginError as e: print(e) raise ValueError("Login failed.") save_wrap(site, request, bot, trigger)
def mwclient_site(lang): """Given a language code, get the mwclient Site object for that language edition of Wikipedia. Caches Site objects to avoid making them over and over.""" if lang not in MWCLIENT_SITES: MWCLIENT_SITES[lang] = mwclient.Site('{}.wikipedia.org'.format(lang)) return MWCLIENT_SITES[lang]
def test_https_as_default(self): # 'https' should be the default scheme self.httpShouldReturn(self.metaResponseAsJson(), scheme='https') site = mwclient.Site('test.wikipedia.org') assert len(responses.calls) == 1 assert responses.calls[0].request.method == 'GET'
def __init__(self): self.site = mwclient.Site('lol.gamepedia.com', path='/') self.min_games = 20 self.top_leagues = { 'LoL European Championship': 'LEC', 'League Championship Series': 'LCS', 'LoL Champions Korea': 'LCK', 'LoL Pro League': 'LPL' }
def __init__(self, device, buildid): super().__init__() self.device = device self.buildid = buildid self.site = mwclient.Site('www.theiphonewiki.com') self.page_name = self.get_keypage() self.keys = self.get_keys()
def _site_for_host(cls, host, consumer_token, consumer_secret, access_token, access_secret): return mwclient.Site(host, consumer_token=consumer_token, consumer_secret=consumer_secret, access_token=access_token, access_secret=access_secret, clients_useragent='Keystone', force_login=True)
def __init__(self, league: Leagues, year: str, event: str): # Setup client and constants self.site = mwclient.Site('lol.gamepedia.com', path='/') self.league = league[0]['events'][year][event] self.odds_url = league[0]['odds_url'] # Get match results self.historical_results = self._get_historical_results() self.historical_results = self._expand_result_statistics()
def __init__(self, host, username, password): parsed_url = urlparse(host) self.host = host self.site = mwclient.Site((parsed_url.scheme, parsed_url.netloc), path='/') self.username = username self.password = password self.site.login(self.username, self.password)
def test_http_as_default(self): # 'http' should be the default scheme (for historical reasons) self.httpShouldReturn(self.makeMetaResponse(), scheme='http') site = mwclient.Site('test.wikipedia.org') assert len(responses.calls) == 1 assert responses.calls[0].request.method == 'POST'
def test_version(self): # Should parse the MediaWiki version number correctly self.httpShouldReturn(self.makeMetaResponse(version='1.16')) site = mwclient.Site('test.wikipedia.org') assert site.initialized is True assert site.version == (1, 16)
def index(): username = flask.session.get('username', None) pdf_title = None commons_file = None if request.method == 'POST': if username: site = mwclient.Site( 'commons.wikimedia.org', consumer_token=app.config['CONSUMER_KEY'], consumer_secret=app.config['CONSUMER_SECRET'], access_token=flask.session['access_token']['key'], access_secret=flask.session['access_token']['secret']) else: site = mwclient.Site('commons.wikimedia.org') cat = mwclient.listing.Category(site, request.form['category']) os.chdir(os.environ['HOME'] + '/category') if not os.path.isdir(cat.page_title): os.mkdir(cat.page_title) os.chdir(cat.page_title) for page in cat: with open(page.page_title, 'wb') as f: page.download(f) pages_list = [page for page in os.listdir() if page[-4:] in ['.jpg', '.tif']] pages_list.sort() pdf_title = cat.page_title + '.pdf' static_path = os.environ['HOME'] + '/www/python/static/' with open(static_path + pdf_title, 'wb') as pdf_file: pdf_file.write(img2pdf.convert(pages_list)) if username: with open(static_path + pdf_title, 'rb') as pdf_file: try: result = site.upload( file=pdf_file, filename=request.form.get('filename'), description=request.form.get('description'), comment=("PDF made from a [[:category:{}|category]]" " with [[:wikitech:Tool:Merge2pdf|" "merge2pdf]]").format(cat.page_title)) if result['result'] == 'Success': commons_file = result['imageinfo']['descriptionurl'] except Exception as e: print(e) return flask.render_template('index.html', username=username, pdf=pdf_title, commons=commons_file)
def gimme_image(filename,compound_site,pxl,theimage): """This function creates the new image, runs metadata(), and passes along the new image's filename. """ site = mwclient.Site(compound_site) extension = os.path.splitext(theimage)[1] extension_caps = extension[1:].upper() if extension_caps == "JPG": extension_caps = "JPEG" image_1 = site.Images[theimage] image_2 = str(image_1.imageinfo['url']) response = requests.get(image_2) item10 = cStringIO.StringIO(response.content) temp_file = str(uuid.uuid4()) + extension f = open(temp_file,'w') f.write(item10.getvalue()) try: img = Image.open(item10) except (IOError): print "Unable to open image " + theimage + " (aborting)" results = "ERROR" return results basewidth = int(math.sqrt((pxl * float(img.size[0]))/(img.size[1]))) wpercent = (basewidth/float(img.size[0])) hsize = int((float(img.size[1])*float(wpercent))) original_pixel = img.size[0] * img.size[1] modified_pixel = basewidth * hsize pct_chg = 100.0 * (original_pixel - modified_pixel) / float(original_pixel) if pct_chg > 5: img = img.resize((int(basewidth),int(hsize)), Image.ANTIALIAS) img.save(filename + extension) else: print "Looks like we'd have a less than 5% change in pixel counts. Skipping." results = "PIXEL" return results print "Image saved to disk at " + filename + extension results = filename + extension try: metadata(source_path=temp_file,dest_path=results,image=img) print "Image EXIF data copied!" except (IOError, ValueError): print "EXIF copy failed. Oh well - no pain, no gain." filelist = [ f for f in os.listdir(".") if f.startswith(temp_file) ] for fa in filelist: os.remove(fa) return results