def search_for(query): results = [] s = alp.Settings() country = s.get("country", "US") args = { 'term': query, 'country': country, 'media': 'all', "entity": "allTrack", 'attribute': 'allTrackTerm', 'limit': '10' } its_api = "http://itunes.apple.com/search" myScraper = alp.Request(its_api, args) theResult = myScraper.request.json() results.extend(theResult["results"]) args = { 'term': query, 'country': country, 'media': 'all', 'entity': 'allArtist', 'attribute': 'allArtistTerm', 'limit': '10' } r = alp.Request(its_api, args) results.extend(r.request.json()["results"]) return results
def do_download(address, name): r = alp.Request(address) r.request.encoding = "utf-8" f = tempfile.NamedTemporaryFile(suffix=".alfredworkflow", dir=tmp, delete=False) f.write(r.request.content) f.close() shutil.copy( f.name, os.path.join(os.path.expanduser("~/Downloads/"), "{0}.alfredworkflow".format(name)))
def cacheIcon(url): iconRequest = alp.Request(url) covercache = alp.cache("covercache") if not os.path.exists(covercache): os.makedirs(covercache) (_, filename) = os.path.split(url) iconPath = os.path.join(covercache, filename) with open(iconPath, "wb") as f: f.write(iconRequest.request.content) return iconPath
def search_for(query): s = alp.Settings() country = s.get("country", "US") args = { 'term': query, 'country': country, 'media': 'software', 'limit': '10' } its_api = "http://itunes.apple.com/search" myScraper = alp.Request(its_api, args) theResult = myScraper.request.json() return theResult['results']
def artworkCache(url): cachePath = alp.cache("artcache") if not os.path.exists(cachePath): os.makedirs(cachePath) (_, filename) = os.path.split(url) artPath = os.path.join(cachePath, filename) if os.path.exists(artPath): return artPath else: artScraper = alp.Request(url) with open(artPath, "wb") as f: f.write(artScraper.request.content) f.close() return artPath
def artworkCache(url): cachePath = alp.cache("artcache") if not os.path.exists(cachePath): os.makedirs(cachePath) (urlFragment, filename) = os.path.split(url) (_, prefix) = os.path.split(urlFragment) prefix = os.path.basename(os.path.normpath(prefix)) filename = prefix + filename artPath = os.path.join(cachePath, filename) if os.path.exists(artPath): return artPath else: artScraper = alp.Request(url) with open(artPath, "wb") as f: f.write(artScraper.request.content) f.close() return artPath
def run(self): try: r = alp.Request(self.target_url, cache=False) except (IntegrityError, ConnectionError) as e: alp.log("Connection to {0} raised exception: {1}.".format( self.target_url, e)) return else: if not self.forced: r.download() else: r.clear_cache() try: j = r.request.json() except Exception as e: alp.log("{0} threw exception {1}.".format(self.target_url, e)) return self.result = dict(json=r.request.json(), candidict=self.candidict, local=self.local)
def do_download(address, name): r = alp.Request(address, cache=False) r.download() r.request.encoding = "utf-8" f = tempfile.NamedTemporaryFile(suffix=".alfredworkflow", dir=tmp, delete=False) f.write(r.request.content) f.close() d_path = os.path.join(os.path.expanduser("~/Downloads"), "{0} - {1}.alfredworkflow".format(stamp, name)) shutil.copy(f.name, d_path) remme = None for wf in cache["cached_workflows"]: cached_name = wf.get("name", None) if cached_name == name: remme = wf break if remme: cache["cached_workflows"].remove(remme) alp.jsonDump(cache, "cache.json") return d_path
def getData(asin): requestList = [ "Operation=ItemLookup", "ItemId=%s" % asin, ] requestDict = encodeRequestList(requestList, "Medium") itemRequest = alp.Request("http://webservices.amazon.com/onca/xml", requestDict) soup = itemRequest.souper() try: imageURL = soup.find("smallimage").url.string imagePath = cacheIcon(imageURL) except Exception: imagePath = "icon.png" try: link = soup.find("detailpageurl").string except Exception: link = "http://www.amazon.com/dp/%s" % asin try: title = soup.find("title").string except Exception: title = "Title Missing" try: author = soup.find("author").string except Exception: author = "Author Missing" try: price = soup.find("listprice").formattedprice.string except Exception: price = "Price Missing" returnDict = { "uid": asin, "arg": link, "title": title, "subtitle": u"%s\u2014%s" % (author, price), "icon": imagePath } return returnDict
def fetch_stats(url, signature, force=False): payload = { "action": "stats", "filter": "last", "limit": 15, "format": "json", "signature": signature } r = alp.Request(url, payload=payload, post=True, cache_for=1800) if force: r.clear_cache() r.download() try: j = r.request.json() except Exception as e: alp.log("Exception: {0}\nurl={1}".format(e, url)) return I(title="Network Error", subtitle=str(e), valid=False) try: count = int(j["stats"]["total_links"]) except Exception as e: alp.log("Exception: {0}\nurl={1}\njson={2}".format(e, url, j)) return I(title="JSON Error", subtitle=str(e), valid=False) if not count > 0: alp.log("Error: No Links\nurl={0}\ncount={1}".format(url, count)) return I(title="No Links!", subtitle="count={0}".format(count), valid=False) links = [] count = 15 if count > 15 else count for i in range(1, count + 1): key = "link_%s" % i links.append(j["links"][key]) return links
def doSearch(): q = sys.argv[1:] q = ' '.join(q) requestList = [ "Operation=ItemSearch", "SearchIndex=KindleStore", "Sort=relevancerank" ] # requestList.append(urllib.urlencode({"Keywords": q})) # kw = q.replace(" ", ",") # requestList.append("Keywords=%s" % kw) requestDict = encodeRequestList(requestList, "ItemIds", keywords=q) searchRequest = alp.Request("http://webservices.amazon.com/onca/xml", requestDict) soup = searchRequest.souper() resultsFeedback = [] if soup.find("error"): e = soup.error.message.string resultsFeedback.append(I(title="Bad Request", subtitle=e, valid=False)) else: asins = soup.find_all("asin") for asin in asins: aResult = getData(asin.string) resultsFeedback.append(I(**aResult)) if len(resultsFeedback) == 0: alp.feedback( I(title="No Results", subtitle="Your query returned 0 results.", valid=False)) else: alp.feedback(resultsFeedback)
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import division, absolute_import import alp import re url = "http://wapp.baidu.com/f?kw=steam" r = alp.Request(url, payload=None, post=False).souper() result = [u'<?xml version="1.0"?>', u'<items>'] i = 0 for link in r.find_all("div", "i"): title = link.find("a").string.encode('utf-8') # print title title = re.sub(r"^\d*\. *", "", title) id = re.search("kz\=(\d*)\&", link.find("a")["href"]).group(1) aurl = u"http://tieba.baidu.com/p/" + id result.append(u'<item uid="baidusearch' + str(i) + u'" arg="' + aurl + u'">') result.append(u'<title>' + title.decode("utf8") + u'</title>') result.append(u'<subtitle>打开这帖</subtitle>') result.append(u'<icon>icon.png</icon>') result.append(u'</item>') i += 1 result.append(u'</items>') xml = ''.join(result) print xml.encode("utf8")
def do_feedback(): flowPath = os.path.split(alp.local())[0] candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") with open(fn) as f: plist = plistlib.readPlist(f) except IOError as e: alp.log("Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append( dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append( dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) updatables = [] all_configured = [] for candidict in candidates: with open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") try: remote_uri = local["remote_json"] local_version = float(local["version"]) local_description = candidict["description"] except Exception as e: alp.log("{0} failed on key: {1}.".format(candidict["name"], e)) continue r = alp.Request(remote_uri) remote = r.request.json if not remote: alp.log("{0} failed to load remote JSON.".format( candidict["name"])) continue should_add = True try: version = float(remote["version"]) download_uri = remote["download_url"] description = remote["description"] except Exception as e: alp.log("{0} failed with error: {1}".format(candidict["name"], e)) should_add = False continue if should_add and local_version < version: updatables.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version)) all_configured.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version, local_d=local_description)) q = alp.args() items = [] if not len(q): if not len(updatables): alp.feedback( I(title="No Updates Available", subtitle="All your workflows are up-to-date.", valid=False)) return update_all = '"update-all"' for updict in updatables: update_all += " \"{0}>{1}>{2}\"".format(updict["name"], updict["path"], updict["download"]) n = len(updatables) upd_sib = "s" if len(updatables) != 1 else "" items.append( I(title="Update All", subtitle="Download and install {0} update{1}".format(n, upd_sib), valid=True, arg=update_all)) for updict in updatables: items.append( I(title=updict["name"], subtitle=u"v{0}\u2014{1}".format(updict["version"], updict["description"]), icon=updict["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( updict["name"], updict["path"], updict["download"]), valid=True)) elif len(q) == 1 and q[0] == "all": for configured in all_configured: items.append( I(title=configured["name"], subtitle=u"v{0}\u2014{1}".format(configured["version"], configured["local_d"]), icon=configured["icon"], valid=False)) else: if q[0] != "all": search = q[0] results = alp.fuzzy_search( search, updatables, key=lambda x: "{0} - {1}".format(x["name"], x["description"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["description"]), icon=result["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( result["name"], result["path"], result["download"]), valid=True)) else: search = q[1] results = alp.fuzzy_search( search, all_configured, key=lambda x: "{0} - {1}".format(x["name"], x["local_d"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["local_d"]), icon=result["icon"], arg="{0} up-to-date.".format(result["name"]), valid=False)) alp.feedback(items)