def get_concept(concept): w = Wiki() w_data = w.get_for_concept(concept) w.conn.close() y_link = "https://www.youtube.com/embed/1wnE4vF9CQ4" ret = {"summary": w_data[0], "w_link": w_data[1], "v_link": y_link, "subs": w_data[2]} return json.dumps(ret)
def scrape_category(category, include_subcats): wiki = Wiki('http://wiki.eveuniversity.org') pages, subcategories = wiki.pages_by_category(category, include_subcats) subcat_pages = {} for subcat in subcategories: sub_titles = wiki.pages_by_category(subcat, False)[0] subcat_pages[subcat] = sub_titles return pages, subcat_pages
async def on_message(message): if message.author == client.user: return if message.content.startswith('!wiki'): content = message.content[5:] content = re.sub('\s', '_', content) w = Wiki(content) response = w.getSum() await message.channel.send(response) if 'more at' not in response: await message.channel.send('Wanna see more information?(y/n)') msg = await client.wait_for('message') if msg.content == 'y': await message.channel.send('See more at ' + w.getLink())
def main(): screen.shot() question_options = localocr.getText() wiki_search = Wiki(question_options['question'], question_options['options']) wiki_result = wiki_search.getResults() google_search = GoogleSearch() google_search_result = google_search.getResult( question_options['question'], question_options['options']) wiki_index = wiki_result.index(max(wiki_result)) google_index = google_search_result.index(max(google_search_result)) print("WIKI FOUND: " + str(question_options['options'][wiki_index]) + "\t Frequency of Option: " + str(wiki_result[wiki_index])) print("GOOGLE FOUND: " + str(question_options['options'][google_index]) + "\t Number of Results: " + str(google_search_result[google_index]))
def wiki_info(self): request_params = get_request_params() self.__check_prod_params(request_params) name = request_params.get(ProdParam.name.value) return { f"{wiki_info_key}": Wiki.get_wiki_info_according2props(name, "abstracts", "relatedImage") }
def __init__(self): self.discord = discord.Client() self.config = None self.check_config() self.forumdb = Forum(self) self.wikidb = Wiki(self) self.random = Random(self) self.dcmanager = DCManager(self) self.ircmanager = IRCManager(self) # Discord events self.discord.event(self.on_ready) self.discord.event(self.on_message) self.discord.event(self.on_member_join)
def ans(self, text: str): text = text.lower() if self.last_msg == "вики": w = Wiki() self.last_msg = "" return w.get_wiki(text) for i in self.hello: if text == i: self.last_msg = "" return "Привет, я БОТ_ИМЯ!\nЯ умею присылать статью из Википедии(напиши: вики или wiki)" for i in self.whatsup: if text == i: self.last_msg = "" return "У меня всгда все круто, я же рообот\nА у тебя как дела?" for i in self.wiki: if text == i: self.last_msg = "вики" return "Что ты хочешь узнать?" return "Я тебя не понял"
def get_product_info(self, ignore_wiki=False): request_params = get_request_params() self.__check_prod_params(request_params) id = request_params.get(ProdParam.id.value) name = request_params.get(ProdParam.name.value) detail_info = query_product_detail_from_local(id) place_name_in_map = detail_info.get("mapPlace", "") gj_list = detail_info.get(f"{gj_list_key}") if gj_list and len(gj_list) != 0: detail_list = query_gj_detail_from_local(name, gj_list) wc_desc_in_gj = { detail["gjsource"]: detail["gjdesc"] for detail in detail_list if len(detail["gjdesc"]) != 0 } beautify_gj_list = "《" + gj_list.strip(";").replace(";", "》 《") + "》" detail_info[f"{gj_beautify_gj_list_key}"] = beautify_gj_list else: wc_desc_in_gj = {} detail_info[f"{gj_desc_key}"] = wc_desc_in_gj if not ignore_wiki: detail_info[ f"{wiki_info_key}"] = Wiki.get_wiki_info_according2props( name, "abstracts", "relatedImage") # query_wiki_info(name) poemHandler = PoemHandler() detail_info[ f"{related_poems_key}"] = poemHandler.get_poem_info_from_key(name) detail_info[f"{map_location_key}"] = query_map_location_from_local( place_name_in_map) temporal = detail_info.get("temporal") detail_info[f"{wtime_key}"] = "" if temporal: year = temporal.split("(") if len(year) >= 2: detail_info[f"{wtime_key}"] = year[-1].rstrip(")") return detail_info
def __init__(self): self.items=[] self.wiki = Wiki() self.path = os.getcwd()
m = re.search(pattern, content) li = m.group(1).splitlines() ret = [] for i in range(len(li)): if len(li[i]) > 0: if li[i][0] == "*": li[i] = li[i][1:] #remove space li[i] = li[i].strip() ret.append(li[i]) return ret; if __name__ == '__main__': masterBot = 'Utilisateur:Hermit Crab' wiki = Wiki('Utilisateur:Hermit Crab', 'poulpe') #Retrieve workers in main page bots = readBots(wiki.readPage(masterBot)); pBots = []; for b in bots: name = b.strip() name = name[2:len(name)-2] pBots.append(Bot(name,wiki.readPage(name))) exe = Executor(wiki,pBots) exe.run()
# -*- coding: utf-8 -*- from wiki import Wiki from bot import Bot if __name__ == '__main__': wiki = Wiki('Calamar', 'poulpe') content='===Biographie===\n' content+='Pour plus de simplicité utilisez [https://github.com/PierreQuinton/hermit_crab Hermit Crab] !\n' content+='===Références===\n' page='Bacasable' # write some content to page #wiki.writeToPage(content, page, summary='Calamarification') content='\n== Ngrams viewer ==\n' # append some extra content to the page #wiki.writeToPage(content, page, True, summary='Calamarification') # read the page, wow it's the same as wat we wrote ! #newContent=wiki.readPage(page) #print(newContent + '\n\n') #newContent=wiki.readSection('ReplaceBot', 'Replace Words', 2) #print(newContent) #print('\n\n') # search the titles print(wiki.find([page], [r"===.+===", r"==.+=="])) #print('\n\n') # revert the titles of order 2 and 3
def main(): log = logging.getLogger() log.setLevel(logging.DEBUG) filelog = logging.FileHandler(path.join(path.dirname(__file__), 'log.txt')) filelog.setLevel(logging.DEBUG) filelog.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) log.addHandler(filelog) parser = ArgumentParser(description='Find incorrect ships on wiki', prog="wikiships") parser.add_argument('-F', '--file', action='store', help='File to save output to, use "stdout" to print to screen') parser.add_argument('-f', '--format', action='store', default='text', help='Format for the output') parser.add_argument('-o', '--output', action='store', default='stdout', help='How to output text') parser.add_argument('--pause', default=1, type=int, help='Number of seconds to wait between requests to wiki. ' 'Defaults to 30', action='store') parser.add_argument('-u', '--user', action='store', help='Username of the wiki user') parser.add_argument('-p', '--password', action='store', help='Password of the wiki user') args = parser.parse_args() logger.debug('Args: %s', args) args.password try: formatter = getattr(formatters, args.format.capitalize())() except AttributeError: parser.error('Invalid format please choose from {}'\ .format(', '.join([i for i in formatters.available()]))) try: outputter = getattr(outputters, args.output.capitalize()) except AttributeError: parser.error('Invalid output {} please choose from {}'\ .format(args.output, ', '.join([i for i in outputters.available()]))) try: ships = get_ships() except sqlite3.Error: if not query_yes_no('No valid local database, ' 'should it be downloaded (~100mb file)?'): parser.exit() get_database(REMOTE_DATABASE_LOC) print('Done!') ships = get_ships() wiki = Wiki('http://wiki.eveuniversity.org', args.pause) try: user = args.user password = args.password except AttributeError: pass else: if user and password: try: wiki.login(user, password) except RequestError as e: parser.error(e) try: outputter = outputter(args.file, formatter, wiki) except outputters.InvalidSetup as e: parser.error(e) pages, missing_pages = wiki.get_pages(ships.keys()) try: outputter(formatter(pages, ships, missing_pages, args.file)) except EnvironmentError as e: try: filename = e.filename except AttributeError: filename = parser.file parser.error('Error accessing file {}: {}'.format(filename, e.strerror)) except InvalidLocation as e: parser.error('Invalid location {}: {}'.format(args.file, e))
except Exception as error: message( f'ошибка!\nкоманда "qr" завершилась с ошибкой\n{error}' ) if (text and text.startswith("/github") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["github"] == "True")): message(getGitHubAccInfo(text.split()[1])) if (text and text.startswith("/wiki") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["wiki"] == "True")): if text.split()[-1].isdigit(): message( Wiki(text[5:].replace(text.split()[-1], ""), int(text.split()[-1]))) else: message(Wiki(text[5:])) if (text and text.startswith("/курс") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["rate"] == "True")): if len(text.split()) > 1 and text.split()[1] == "-евро": rate = exchangeRate("EUR").value message(f"Курс евро {rate} руб.") elif len( text.split()) > 1 and text.split()[1] == "-доллар": rate = exchangeRate("USD").value message(f"Курс доллара {rate} руб.") else: rateUSD = exchangeRate("USD").value
def GET(self, name): name = unquote(name) return mnml.HttpResponse(layout(esc(name), Wiki.format(wiki.get_page(name), lambda n: "/view/%s" % quote(n)) + '<p><a href="/edit/%s">Edit</a></p>' % quote(name)))
import os import re import codecs import requests import concurrent.futures import time import utils.global_vars from enchant.checker import SpellChecker from enchant.tokenize import URLFilter, EmailFilter, WikiWordFilter, MentionFilter from wiki import Wiki IBEX_ISSUES = "IBEX/issues/" DEV_MANUAL = Wiki("ibex_developers_manual") IBEX_MANUAL = Wiki("IBEX") USER_MANUAL = Wiki("ibex_user_manual") TEST_WIKI = Wiki("ibex_wiki_checker") WIKI_INCLUDELIST = [USER_MANUAL, IBEX_MANUAL, DEV_MANUAL, TEST_WIKI] def strip_between_tags(expression, text): if text is None: return text matches = list(re.finditer(expression, text)) if len(matches) == 0: new_text = text elif len(matches) % 2 != 0: print("Uneven number of {} detected. Doing nothing to be safe".format( expression))
def __init__(self): self.items = [] self.wiki = Wiki() self.youtube = Youtube() self.path = os.getcwd() self.s = schedule.every().saturday.do(self.job)
) if (command.lower().startswith("github") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["github"] == "True")): message(getGitHubAccInfo(command.split()[1])) if (command.lower().startswith("wiki") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["wiki"] == "True")): if command[-1].isdigit(): message( Wiki(" ".join(command.split()[1:-1]), int(command[-1]))) else: message(Wiki(" ".join(command.split()[1:]))) if (command.lower().startswith("курс") and (event.from_user or str(peer_id - 2000000000) in settings and settings[str(peer_id - 2000000000)]["rate"] == "True")): if len(command.split()) > 1 and command.split( )[1] == "-евро": rate = exchangeRate("EUR").value message(f"Курс евро {rate} руб.") elif len(command.split()) > 1 and command.split( )[1] == "-доллар": rate = exchangeRate("USD").value message(f"Курс доллара {rate} руб.")
def view(request, name): name = unquote(name) return newf.Response(layout(esc(name), Wiki.format(wiki.get_page(name), lambda n: "/view/%s" % quote(n)) + '<p><a href="/edit/%s">Edit</a></p>' % quote(name)))
class Scheduler: def __init__(self): self.items=[] self.wiki = Wiki() self.path = os.getcwd() def splitPath(self,petition): names = petition.split("/") names = names[len(names)-1].split(".") return names def enqueue(self, x): print x name = self.splitPath(x)[0] splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition)-1] self.insertOrderListPetition(priority[typePetition], x) def dequeue(self): try: return self.items.pop(0) except: raise ValueError("there are no petitions!") def isEmpty(self): return self.items == [] def process(self): petition = self.dequeue() names = self.splitPath(petition) list = decompress(names[0], petition) listOut = [] os.mkdir(list['dir']+"/result/") for l in list['listDir']: params = self.wiki.search(l, names[0], list['dir']+"/result/") for d in params: listOut.append(d) print listOut compress_file = names[0]+"_out.tar.gz" compress(listOut,list['dir']+"/result/", list['dir']+"/../"+compress_file) os.chdir(self.path) shutil.rmtree(list['dir']) def insertOrderListPetition(self, num, petition): i = 0 if(len(self.items)>0): name = self.splitPath(self.items[i])[0] splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition)-1] numType = priority[typePetition] i+=1 while i < len(self.items) and numType <= num: name = self.splitPath(self.items[i])[0] splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition)-1] numType = priority[typePetition] i+=1 lenA = len(self.items) for j, val in self.items: self.items.insert(lenA-j, self.items[lenA-j-1]) self.items.insert(i, petition)
args = load_args() setup_logging(args.verbose) logging.info("Creating projects.") config_path = args.config with open(config_path) as config_file: config = yaml.safe_load(config_file) logging.info("Loaded config from '{}'".format(config_path)) with open(args.goal_file[0], newline="") as file_: goals_reader = csv.reader(file_, delimiter="\t") goals, goal_fulfillments = read_goals(goals_reader, config["goals"]) if args.year: year = args.year else: year = datetime.date.today().year project_columns = config["project_columns"] wiki = Wiki(config["wiki"], project_columns, args.dry_run, args.overwrite_wiki, year) phab = Phab(config["phab"], args.dry_run) with open(args.project_file[0], newline="") as file_: projects_reader = csv.DictReader(file_, delimiter="\t") single_project_found = False for unsanitized_project_information in projects_reader: project_information = sanitize(unsanitized_project_information) if args.project: if args.project not in ( project_information[project_columns["swedish_name"]], project_information[project_columns["english_name"]]): continue else: single_project_found = True wiki.single_project_info(
app.config['CONTENT_DIR'] = 'content' # app.config['DEBUG'] = options.debug # app.config['CONTENT_DIR'] = options.directory app.config['TITLE'] = 'wiki' # app.config['AUTHENTICATION_METHOD'] = options.authentication_method app.config['AUTHENTICATION_METHOD'] = 'cleartext' app.config['SEARCH_IGNORE_CASE'] = True try: app.config.from_pyfile( os.path.join(app.config.get('CONTENT_DIR'), 'config.py')) except IOError: print("Startup Failure: You need to place a " "config.py in your content directory.") wiki = Wiki(app.config.get('CONTENT_DIR')) users = UserManager(app.config.get('CONTENT_DIR')) users.add_user('admin', 'dev', authentication_method=app.config.get('AUTHENTICATION_METHOD')) loginmanager = LoginManager() loginmanager.init_app(app) loginmanager.login_view = 'user_login' """ Forms ~~~~~ """
def wiki_action(): wiki = Wiki() return wiki.get_wiki_info()
class Scheduler: def callThread(self, onExit, popenArgs): def runInThread(onExit, popenArgs): proc = subprocess.call(popenArgs) #proc.wait() onExit() return thread = threading.Thread(target=runInThread, args=(onExit, popenArgs)) thread.start() return thread def job(self): print "executing job" def onExit(): compress_file = "medicamentos_vademecum_out.tar.gz" listOut = os.listdir("./vademecum/resultsMedicamentos/") compress(listOut, "./vademecum/resultsMedicamentos/", "../out/" + compress_file) shutil.rmtree("./vademecum/resultsMedicamentos/") self.callThread(onExit, ["scrapy", "runspider", "vademecum/medicamentos.py"]) def __init__(self): self.items = [] self.wiki = Wiki() self.youtube = Youtube() self.path = os.getcwd() self.s = schedule.every().saturday.do(self.job) def splitPath(self, petition): names = petition.split("/") names = names[len(names) - 1].split(".") return names def remove_files(self, petition): with open(petition) as f: elem = f.readlines() for e in elem: if os.path.isfile("../out/" + e.strip()): os.remove("../out/" + e.strip()) os.remove(petition) def enqueue(self, x): print schedule.jobs print "enqueue petition" #schedule.run_pending() name = self.splitPath(x)[0] if (name == "remove"): print "remove" print x self.remove_files(x) else: splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition) - 1] self.insertOrderListPetition(priority[typePetition], x) def dequeue(self): try: return self.items.pop(0) except: raise ValueError("there are no petitions!") def isEmpty(self): return self.items == [] def process(self, p=None): petition = self.dequeue() if p is None else p names = self.splitPath(petition) try: list = decompress(names[0], petition) except OSError as osE: print "error with a file" os.chdir(self.path) shutil.rmtree("../out/" + names[0]) self.process(petition) listOut = [] os.mkdir(list['dir'] + "/result/") try: for l in list['listDir']: if names[0].split("_")[1] == "wiki": params = self.wiki.search(l, names[0], list['dir'] + "/result/") if params and len(params) > 0: for d in params: listOut.append(d) compress_file = names[0] + "_out.tar.gz" compress(listOut, list['dir'] + "/result/", list['dir'] + "/../" + compress_file) elif names[0].split("_")[1] == "youtube": self.youtube.search(l, names[0], list['dir'] + "/result/", names[0]) except OSError as osE: print osE print "error with a file" os.chdir(self.path) shutil.rmtree(list['dir']) except Exception as eEx: print eEx os.chdir(self.path) shutil.rmtree(list['dir']) time.sleep(15) self.process(petition) except e: print e print "cannot get resources, check internet connection!" os.chdir(self.path) shutil.rmtree(list['dir']) time.sleep(15) self.process(petition) print "remove" print petition print os.getcwd() os.remove(petition) os.chdir(self.path) def insertOrderListPetition(self, num, petition): i = 0 #NO FUNCIONA: if (len(self.items) > 0): name = self.splitPath(self.items[i])[0] splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition) - 1] numType = priority[typePetition] i += 1 while i < len(self.items) and numType <= num: name = self.splitPath(self.items[i])[0] splitPetition = name.split("_") typePetition = splitPetition[len(splitPetition) - 1] numType = priority[typePetition] i += 1 lenA = len(self.items) for j, val in self.items: self.items.insert(lenA - j, self.items[lenA - j - 1]) self.items.insert(i, petition)
def wiki_action(): from wiki import Wiki wiki = Wiki() return wiki.get_wiki_info()
def view(request, name): name = unquote(name) template("view", name=name, text=Wiki.format(wiki.get_page(name), lambda n: "/view/%s" % quote(n)))
def GET(self, name): name = unquote(name) return template("view", name=name, text=Wiki.format(wiki.get_page(name), lambda n: "/view/%s" % quote(n)))