コード例 #1
0
ファイル: search.py プロジェクト: mccary/minetestbot-modules
def google_ajax(query):
   """Search using AjaxSearch, and return its JSON."""
   uri = 'http://ajax.googleapis.com/ajax/services/search/web'
   args = '?v=1.0&safe=off&q=' + web.urlencode(query)
   data, sc = web.get(uri + args)
   data = str(data, 'utf-8')
   return web.json(data)
コード例 #2
0
ファイル: search.py プロジェクト: mccary/minetestbot-modules
def new_gc(query):
   uri = 'https://www.google.com/search?hl=en&q='
   uri = uri + web.urlencode(query).replace('+', '%2B')
   if '"' in query: uri += '&tbs=li:1'
   data, sc = web.get(uri)
   data = str(data, 'utf-8')
   if "did not match any documents" in data:
      return "0"
   for result in re.compile(r'(?ims)([0-9,]+) results?').findall(data):
      return result
   return None
コード例 #3
0
def mod(phenny, input):
    uri = "http://krock-works.16mb.com/MTstuff/modSearchAPI.php?q="
    text, sc = web.get(uri + web.urlencode(input.group(2)))
    text = str(text, 'utf-8')
    data = web.json(text)
    answer = ""
    if "error" in data:
        answer = data["error"]
    else:
        answer = (data["title"] + " by " + data["author"] + " - " +
                  data["link"])

    phenny.reply(answer)
コード例 #4
0
def mod(phenny, input):
	uri = "https://krock-works.uk.to/minetest/modSearchAPI.php?q="
	text, sc = web.get(uri + web.urlencode(input.group(2)))
	text = str(text, 'utf-8')
	data = web.json(text)
	answer = ""
	if "error" in data:
		answer = data["error"]
	else:
		answer = (data["title"] + 
			" by " + data["author"] +
			" - " + data["link"])

	phenny.reply(answer)
コード例 #5
0
def devwiki(phenny, input):
	term = input.group(2)
	if not term:
		return

	log.log("event", "%s queried Developer Wiki for '%s'" % (log.fmt_user(input), term), phenny)
	term = term.replace(" ", "_")
	term = web.urlencode(term)

	data, scode = web.get(wikiuri_g % term)
	if scode == 404:
		return phenny.say("No such page.")
	data = str(data, "utf-8")

	m = re.search(r_content, data)
	if not m:
		return phenny.say("Sorry, did not find any text to display. Here's the link: %s" % (wikiuri_r % term,))
	data = data[m.span()[1]:]

	mi = re.finditer(r_paragraph, data)
	text = ""
	for m in mi:
		abort = False
		for e in nottext:
			if re.search(e, m.group(1)):
				abort = True
				break
		if abort:
			continue
		text = m.group(1)
		break
	if not text:
		m = re.search(r_headline, data)
		if m:
			text = "<b>" + m.group(1) + "</b>"
		else:
			return phenny.say("Sorry, did not find any text to display. Here's the link: %s" % (wikiuri_r % term,))
	for tf in transforms:
		text = re.sub(tf[0], tf[1], text)
	m = re.search(r_sentenceend, text)
	if m:
		text = text[:m.span()[1]-1]
	phenny.say('"%s" - %s' % (web.decode(text), wikiuri_r % term))
コード例 #6
0
ファイル: scrape.py プロジェクト: gitzero/seo
def baidu_serp(keyword):
	serp = web.curl('http://www.baidu.com/s?wd=%s&rn=100' % (web.urlencode(keyword)))
	return re.findall('<h3 class="t".*?href="(.*?)"', serp)