def do_feedback(): q = alp.args() flowPath = os.path.split(alp.local())[0] cache = alp.jsonLoad("cache.json", default={}) day_secs = 24 * 60 * 60 force = (len(q) > 0 and q[0] == "|force|") t = time.time() if (force): import shutil _c = alp.cache() _s = alp.storage() shutil.rmtree(_c) shutil.rmtree(_s) if (cache.get("cache_time", 0) + day_secs > t) and not force: candidates = cache.get("cached_workflows", []) else: candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False, followlinks=True): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") plist = alp.readPlist(fn) except IOError as e: alp.log("Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append(dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append(dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) new_cache = dict(cache_time=t, cached_workflows=candidates) alp.jsonDump(new_cache, "cache.json") threads = [] for candidict in candidates: try: with codecs.open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") except Exception as e: alp.log("{0} may no longer exist: {1}".format(candidict["name"], e)) continue ot = OopThread(local['remote_json'], force, candidict, local) threads.append(ot) ot.start() manage_threads(threads)
def parse_pps(): """ Return a list of people from PPS """ alp.log('parsing pps') icon = 'pps.png' people_list = [] base = 'http://www.pps.univ-paris-diderot.fr' page = Request(base + '/membres') page.download() page = page.souper() trs = page.select('#contenu2 table')[0].find_all('tr')[1:] for tr in trs: link = tr.find('a') if not link: continue p = {} p['url'] = urljoin(base, link.get('href')) p['name'] = fmt_name(text(link)) p['fuzzy'] = mk_fuzzy(p) p['icon'] = icon tds = tr.find_all('td') if (len(tds) >= 4): p['info'] = 'Office ' + text(tds[2]) \ + ', phone: ' + fmt_phone('01 57 27 ' + text(tds[3])) people_list.append(p) alp.log('done pps') return people_list
def set_tags(tags, path): path = path.encode("utf-8") path = c_char_p(path) if len(tags): encoded_tags = NSMutableArray.arrayWithCapacity_(len(tags)) for tag in tags: encoded_tags.addObject_(NSString.stringWithCString_encoding_(tag.encode("utf-8"), NSUTF8StringEncoding)) (dataToSend, errorString) = NSPropertyListSerialization.dataWithPropertyList_format_options_error_(encoded_tags, kCFPropertyListBinaryFormat_v1_0, NSPropertyListImmutable, None) if errorString: alp.log("Error: {0}".format(errorString)) return if dataToSend: bytes = c_char_p(dataToSend.bytes().tobytes()) else: return "No dataToSend." for key in TAG_KEYS: keyName = key.encode("utf-8") check = libc.setxattr(path, keyName, bytes, dataToSend.length(), 0, XATTR_NOFOLLOW) else: for key in TAG_KEYS: keyName = key.encode("utf-8") check = libc.removexattr(path, keyName, XATTR_NOFOLLOW) if check != 0: return check return 0
def handle_error(title, subtitle, icon = "icon-no.png", debug = ""): """ Output an error message in a form suitable for Alfred to show something. Send the error and any debug info supplied to the log file. """ i = alp.Item(title = title, subtitle = subtitle, icon = icon) alp.feedback(i) alp.log("Handled error: %s, %s\n%s" % (title, subtitle, debug)) sys.exit(0)
def handle_error(title, subtitle, icon="icon-no.png", debug=""): """ Output an error message in a form suitable for Alfred to show something. Send the error and any debug info supplied to the log file. """ i = alp.Item(title=title, subtitle=subtitle, icon=icon) alp.feedback(i) alp.log("Handled error: %s, %s\n%s" % (title, subtitle, debug)) sys.exit(0)
def parse_others(): """ Return a list of manually-added people """ alp.log('parsing others') li = alp.jsonLoad(alp.local('others.json'), []) for p in li: if 'fuzzy' not in p: p['fuzzy'] = mk_fuzzy(p) alp.log('done others') return li
def __initCache(): if cache_file: try: if time.time() - os.path.getctime(cache_file) > CACHE_TIME: os.remove(cache_file) else: with open(cache_file) as cache_fp: cached_series.extend(json.load(cache_fp)) alp.log('Cached series found') alp.log(cached_series) except Exception: pass
def checkCache(delete=False): filepath = alp.cache() f = '%s_requests_cache.sqlite' % alp.bundle() fullpath = os.path.join(filepath,f) if os.path.exists(fullpath): if ((datetime.now() - datetime.fromtimestamp(os.path.getmtime(fullpath))) > timedelta(hours=6)) or delete: try: os.remove(fullpath) alp.log('Successfully removed requests cache') except: alp.log('Problem: Could not remove requests cache') return
def main(): alp_args = alp.args() alp.log(alp_args) try: alp.jsonDump(dict(hubid=alp_args[0]), alp.storage('config.json')) alp.log('Setting json') alp.log(alp.jsonLoad(alp.storage('config.json'))) except Exception as e: alp.log('Unable to save your configuration. Please try again.') alp.log(traceback.format_exc()) raise e return
def stopTimer(): if tkn is None: return 'Please set Token via \'tgl token <TOKEN>\'' else: timer = requests.get('https://www.toggl.com/api/v8/time_entries', auth=(tkn, 'api_token')) if timer.status_code == 200: current = timer.json[len(timer.json)-1] alp.log(current) if 'stop' in current: return 'No currently running timer' else: current['stop'] = toString(datetime.utcnow()) current['duration'] = computeDuration(current) res = requests.put('https://www.toggl.com/api/v8/time_entries/%s' % current['id'], auth=(tkn, 'api_token'), data=json.dumps({'time_entry':current})) return "Stopped current timer %s" % current['description']
def run(self): try: r = alp.Request(self.target_url, cache=False) except (IntegrityError, ConnectionError) as e: alp.log("Connection to {0} raised exception: {1}.".format(self.target_url, e)) return else: if not self.forced: r.download() else: r.clear_cache() try: j = r.request.json() except Exception as e: alp.log("{0} threw exception {1}.".format(self.target_url, e)) return self.result = dict(json=r.request.json(), candidict=self.candidict, local=self.local)
def list_instances(name): """Returns a list of instances with a given name""" items = [] try: if len(name) < 2: items.append( alp.Item( title='Searching', subtitle= 'Please type more then one character to start searching', valid=False)) else: ec2 = boto.connect_ec2() for r in ec2.get_all_instances(): groups = ';'.join([g.name or g.id for g in r.groups]) for instance in r.instances: instance_name = instance.tags.get( 'Name', instance.tags.get('name', '')) if not name.lower() in instance_name.lower(): continue if instance.public_dns_name: arg = 'ssh -i ~/.ssh/%s.pem %s\n' % ( instance.key_name, instance.public_dns_name) else: arg = 'ssh vpc\nssh %s\n' % instance.private_ip_address items.append( alp.Item(title=instance_name, subtitle='[%s]: %s' % (instance.id, groups), valid=True, arg=arg)) if len(items) == 0: items.append( alp.Item(title='No Results Found', subtitle='Please refine your search and try again')) except Exception, e: alp.log(str(e)) items = [ alp.Item(title='Problem Searching', subtitle='%s' % str(e).replace("'", ''), valid=False) ] alp.log(items[0].get())
def list_instances(name): """Returns a list of instances with a given name""" items = [] try: if len(name) <2: items.append(alp.Item( title='Searching', subtitle='Please type more then one character to start searching', valid=False )) else: ec2 = boto.connect_ec2() for r in ec2.get_all_instances(): groups = ';'.join([g.name or g.id for g in r.groups]) for instance in r.instances: instance_name = instance.tags.get('Name', instance.tags.get('name', '')) if not name.lower() in instance_name.lower(): continue if instance.public_dns_name: arg = 'ssh -i ~/.ssh/%s.pem %s\n' % (instance.key_name, instance.public_dns_name) else: arg = 'ssh vpc\nssh %s\n' % instance.private_ip_address items.append(alp.Item( title=instance_name, subtitle='[%s]: %s' % (instance.id, groups), valid=True, arg=arg )) if len(items) == 0: items.append(alp.Item( title='No Results Found', subtitle='Please refine your search and try again' )) except Exception, e: alp.log(str(e)) items = [alp.Item( title='Problem Searching', subtitle='%s' % str(e).replace("'", ''), valid=False )] alp.log(items[0].get())
def run(self): try: r = alp.Request(self.target_url, cache=False) except (IntegrityError, ConnectionError) as e: alp.log("Connection to {0} raised exception: {1}.".format( self.target_url, e)) return else: if not self.forced: r.download() else: r.clear_cache() try: j = r.request.json() except Exception as e: alp.log("{0} threw exception {1}.".format(self.target_url, e)) return self.result = dict(json=r.request.json(), candidict=self.candidict, local=self.local)
def parse_gallium(): """ Return a list of people from Gallium. Only a part of them are teaching at Paris Diderot. """ alp.log('parsing gallium') icon = 'inria.png' people_list = [] base = 'http://gallium.inria.fr' page = Request(base + '/members.html') page.download() page = page.souper() links = page.select('#columnA_2columns a') for link in links: p = {'name': text(link), 'url': urljoin(base, link.get('href'))} p['icon'] = icon p['fuzzy'] = mk_fuzzy(p) people_list.append(p) alp.log('done gallium') return people_list
def find_downloads(): paths = [] downloads = unicode(os.path.expanduser(u"~/Downloads")) for fn in os.listdir(downloads): if not fn == ".DS_Store" and not fn == ".localized": fpath = os.path.join(downloads, fn) if os.path.exists(fpath): paths.append(fpath) else: alp.log(u"Could not add '{0}'---is it a broken symlink?".format(fpath)) returnables = [] for path in paths: path = path.decode("utf-8") path = unicode(path) _, name = os.path.split(path) mod_time = os.path.getctime(path) color = get_finder_label(path) tags = get_tags(path) returnables.append((path, name, mod_time, color, tags)) return returnables
def parse_gallium(): """ Return a list of people from Gallium. Only a part of them are teaching at Paris Diderot. """ alp.log('parsing gallium') icon = 'inria.png' people_list = [] base = 'http://gallium.inria.fr' page = Request(base + '/members.html') page.download() page = page.souper() links = page.select('#columnA_2columns a') for link in links: p = { 'name': text(link), 'url': urljoin(base, link.get('href')) } p['icon'] = icon p['fuzzy'] = mk_fuzzy(p) people_list.append(p) alp.log('done gallium') return people_list
def stopTimer(): if tkn is None: return 'Please set Token via \'tgl token <TOKEN>\'' else: timer = requests.get('https://www.toggl.com/api/v8/time_entries', auth=(tkn, 'api_token')) if timer.status_code == 200: current = timer.json[len(timer.json) - 1] alp.log(current) if 'stop' in current: return 'No currently running timer' else: current['stop'] = toString(datetime.utcnow()) current['duration'] = computeDuration(current) res = requests.put( 'https://www.toggl.com/api/v8/time_entries/%s' % current['id'], auth=(tkn, 'api_token'), data=json.dumps({'time_entry': current})) return "Stopped current timer %s" % current['description']
def fetch_stats(url, signature, force=False): payload = { "action": "stats", "filter": "last", "limit": 15, "format": "json", "signature": signature } r = alp.Request(url, payload=payload, post=True, cache_for=1800) if force: r.clear_cache() r.download() try: j = r.request.json() except Exception as e: alp.log("Exception: {0}\nurl={1}".format(e, url)) return I(title="Network Error", subtitle=str(e), valid=False) try: count = int(j["stats"]["total_links"]) except Exception as e: alp.log("Exception: {0}\nurl={1}\njson={2}".format(e, url, j)) return I(title="JSON Error", subtitle=str(e), valid=False) if not count > 0: alp.log("Error: No Links\nurl={0}\ncount={1}".format(url, count)) return I(title="No Links!", subtitle="count={0}".format(count), valid=False) links = [] count = 15 if count > 15 else count for i in range(1, count+1): key = "link_%s" % i links.append(j["links"][key]) return links
def parse_liafa(): """ Return a list of people from LIAFA. """ alp.log('parsing liafa') icon = 'liafa.png' people_list = [] base = 'http://www.liafa.univ-paris-diderot.fr/' tr_sel = 'blockquote > table tr.fondgristresc' # td:first-child a' page = Request(urljoin(base, '/web9/membreliafa/listalpha_fr.php')) page.download() page = page.souper() for tr in page.select(tr_sel): links = tr.select('td a') if (len(links) == 0): continue u = links[0].get('href') if u == None: continue p = {} tds = tr.select('td.texte') if len(tds) >= 2: p['info'] = 'Office ' + text(tds[1]) \ + ', phone: ' + fmt_phone(text(tds[0])) page = Request(base + u) page.download() page = page.souper() pp = page.select('table.texte li a.bleu') if (pp): pp = pp[0] p['url'] = urljoin(base, pp.get('href')) p['name'] = fmt_name(text(page.select('blockquote h2')[0])) p['icon'] = icon p['fuzzy'] = mk_fuzzy(p) people_list.append(p) alp.log('done liafa') return people_list
def find_downloads(): paths = [] downloads = unicode(os.path.expanduser(u"~/Downloads")) for fn in os.listdir(downloads): if not fn == ".DS_Store" and not fn == ".localized": fpath = os.path.join(downloads, fn) if os.path.exists(fpath): paths.append(fpath) else: alp.log( u"Could not add '{0}'---is it a broken symlink?".format( fpath)) returnables = [] for path in paths: path = path.decode("utf-8") path = unicode(path) _, name = os.path.split(path) mod_time = os.path.getctime(path) color = get_finder_label(path) tags = get_tags(path) returnables.append((path, name, mod_time, color, tags)) return returnables
def set_tags(tags, path): path = path.encode("utf-8") path = c_char_p(path) if len(tags): encoded_tags = NSMutableArray.arrayWithCapacity_(len(tags)) for tag in tags: encoded_tags.addObject_( NSString.stringWithCString_encoding_(tag.encode("utf-8"), NSUTF8StringEncoding)) ( dataToSend, errorString ) = NSPropertyListSerialization.dataWithPropertyList_format_options_error_( encoded_tags, kCFPropertyListBinaryFormat_v1_0, NSPropertyListImmutable, None) if errorString: alp.log("Error: {0}".format(errorString)) return if dataToSend: bytes = c_char_p(dataToSend.bytes().tobytes()) else: return "No dataToSend." for key in TAG_KEYS: keyName = key.encode("utf-8") check = libc.setxattr(path, keyName, bytes, dataToSend.length(), 0, XATTR_NOFOLLOW) else: for key in TAG_KEYS: keyName = key.encode("utf-8") check = libc.removexattr(path, keyName, XATTR_NOFOLLOW) if check != 0: return check return 0
def fetch_stats(url, signature, force=False): payload = { "action": "stats", "filter": "last", "limit": 15, "format": "json", "signature": signature } r = alp.Request(url, payload=payload, post=True, cache_for=1800) if force: r.clear_cache() r.download() try: j = r.request.json() except Exception as e: alp.log("Exception: {0}\nurl={1}".format(e, url)) return I(title="Network Error", subtitle=str(e), valid=False) try: count = int(j["stats"]["total_links"]) except Exception as e: alp.log("Exception: {0}\nurl={1}\njson={2}".format(e, url, j)) return I(title="JSON Error", subtitle=str(e), valid=False) if not count > 0: alp.log("Error: No Links\nurl={0}\ncount={1}".format(url, count)) return I(title="No Links!", subtitle="count={0}".format(count), valid=False) links = [] count = 15 if count > 15 else count for i in range(1, count + 1): key = "link_%s" % i links.append(j["links"][key]) return links
def do_feedback(): flowPath = os.path.split(alp.local())[0] candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") with open(fn) as f: plist = plistlib.readPlist(f) except IOError as e: alp.log("Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append( dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append( dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) updatables = [] all_configured = [] for candidict in candidates: with open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") try: remote_uri = local["remote_json"] local_version = float(local["version"]) local_description = candidict["description"] except Exception as e: alp.log("{0} failed on key: {1}.".format(candidict["name"], e)) continue r = alp.Request(remote_uri) remote = r.request.json if not remote: alp.log("{0} failed to load remote JSON.".format( candidict["name"])) continue should_add = True try: version = float(remote["version"]) download_uri = remote["download_url"] description = remote["description"] except Exception as e: alp.log("{0} failed with error: {1}".format(candidict["name"], e)) should_add = False continue if should_add and local_version < version: updatables.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version)) all_configured.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version, local_d=local_description)) q = alp.args() items = [] if not len(q): if not len(updatables): alp.feedback( I(title="No Updates Available", subtitle="All your workflows are up-to-date.", valid=False)) return update_all = '"update-all"' for updict in updatables: update_all += " \"{0}>{1}>{2}\"".format(updict["name"], updict["path"], updict["download"]) n = len(updatables) upd_sib = "s" if len(updatables) != 1 else "" items.append( I(title="Update All", subtitle="Download and install {0} update{1}".format(n, upd_sib), valid=True, arg=update_all)) for updict in updatables: items.append( I(title=updict["name"], subtitle=u"v{0}\u2014{1}".format(updict["version"], updict["description"]), icon=updict["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( updict["name"], updict["path"], updict["download"]), valid=True)) elif len(q) == 1 and q[0] == "all": for configured in all_configured: items.append( I(title=configured["name"], subtitle=u"v{0}\u2014{1}".format(configured["version"], configured["local_d"]), icon=configured["icon"], valid=False)) else: if q[0] != "all": search = q[0] results = alp.fuzzy_search( search, updatables, key=lambda x: "{0} - {1}".format(x["name"], x["description"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["description"]), icon=result["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( result["name"], result["path"], result["download"]), valid=True)) else: search = q[1] results = alp.fuzzy_search( search, all_configured, key=lambda x: "{0} - {1}".format(x["name"], x["local_d"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["local_d"]), icon=result["icon"], arg="{0} up-to-date.".format(result["name"]), valid=False)) alp.feedback(items)
def handle_error(title, subtitle, icon="icon-no.png", debug=""): i = alp.Item(title=title, subtitle=subtitle, icon=icon) alp.feedback(i) alp.log("Handled error: %s, %s\n%s" % (title, subtitle, debug)) sys.exit(0)
def print_feedback(results): updatables = [] all_configured = [] for r in results: try: j = r['json'] d = r['candidict'] l = r['local'] except Exception as e: alp.log("{0} threw exception {1}".format(d['name'], e)) alp.log(r) try: version = float(j['version']) download_uri = j['download_url'] description = j['description'] except Exception as e: alp.log("{0} failed with error: {1}".format(d['name'], e)) continue try: lv = l['version'] except Exception as e: alp.log("{0} failed with malformed json: {1}".format(d['name'], e)) alp.log(d) continue if lv < version: updatables.append(dict(name=d['name'], description=description, icon=d['icon'], download=download_uri, path=d['path'], version=version)) all_configured.append(dict(name=d['name'], description=description, icon=d['icon'], download=download_uri, path=d['path'], version=version, local_d=d['description'])) items = [] q = alp.args() if not len(q) or (len(q) == 1 and q[0] == "|force|"): if not len(updatables): alp.feedback(I(title="No Updates Available", subtitle="All your workflows are up-to-date.", valid=False)) return update_all = '"update-all"' for updict in updatables: update_all += " \"{0}>{1}>{2}\"".format(updict['name'], updict['path'], updict['download']) n = len(updatables) upd_sib = "s" if n != 1 else "" items.append(I(title="Update All", subtitle="Download {0} update{s}".format(n, s=upd_sib), valid=True, arg=update_all)) for up in updatables: items.append(I(title=up['name'], subtitle=u"v{0}\u2014{1}".format(up['version'], up['description']), icon=up['icon'], arg=u"\"update\" \"{0}>{1}>{2}\"".format(up['name'], up['path'], up['download']), valid=True)) elif len(q) == 1 and q[0] == "|all|": if not len(all_configured): alp.feedback(I(title="No Compatible Workflows", subtitle="No Alleyoop workflows detected", valid=False)) return for c in all_configured: items.append(I(title=c['name'], subtitle=u"v{0}\u2014{1}".format(c['version'], c['local_d']), icon=c['icon'], valid=False)) else: if q[0] != "|all|" and q[0] != "|force|": if not len(results): alp.log("'oop' may be broken.") alp.log("len(all_configured)=0") alp.log("q='{0}'".format(q)) alp.log("updatables='{0}'".format(updatables)) alp.log("results='{0}'".format(results)) alp.feedback(I(title="Error", subtitle="No compatible workflows were found. See debug.log for info.", valid=False)) return search = q[0] results = alp.fuzzy_search(search, updatables, key=lambda x: u"{0} - {1}".format(x['name'], x['local_d'])) for r in results: items.append(I(title=r['name'], subtitle=u"v{0}\u2014{1}".format(r['version'], r['description']), icon=r['icon'], arg="\"update\" \"{0}>{1}>{2}".format(r['name'], r['path'], r['download']), valid=True)) else: if not len(all_configured): alp.log("'oop!' may be broken.") alp.log("len(all_configured)=0") alp.log("q='{0}'".format(q)) alp.log("updatables='{0}'".format(updatables)) alp.log("results='{0}'".format(results)) alp.feedback(I(title="Error", subtitle="No compatible workflows were found. See debug.log for info.", valid=False)) return search = q[1] results = alp.fuzzy_search(search, all_configured, key=lambda x: u"{0} - {1}".format(x['name'], x['local_d'])) for r in results: items.append(I(title=r['name'], subtitle=u"v{0}\u2014{1}".format(r['version'], r['local_d']), valid=False)) if not len(items): items.append(I(title="No Results", subtitle="Your query did not return any results.", valid=False)) alp.feedback(items)
def main(): routes = alp.jsonLoad(alp.local('routes.json'), []) alp_args = alp.args() if len(alp_args) > 0: search = alp.args()[0].lower() routes = filter( lambda route: search in route.get('title').lower() or search in route.get('description', '').lower(), routes) items = map(item, routes) return alp.feedback(items) def item(route): url = 'https://app.hubspot.com%s' % route.get('path') return alp.Item(title=route.get('title'), subtitle=route.get('description'), arg=url, uid=route.get('path'), valid=True) if __name__ == "__main__": try: main() except Exception as e: alp.log(traceback.format_exc()) raise e
def do_feedback(): q = alp.args() flowPath = os.path.split(alp.local())[0] cache = alp.jsonLoad("cache.json", default={}) day_secs = 24 * 60 * 60 force = (len(q) > 0 and q[0] == "|force|") t = time.time() if (force): import shutil _c = alp.cache() _s = alp.storage() shutil.rmtree(_c) shutil.rmtree(_s) if (cache.get("cache_time", 0) + day_secs > t) and not force: candidates = cache.get("cached_workflows", []) else: candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False, followlinks=True): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") plist = alp.readPlist(fn) except IOError as e: alp.log( "Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append( dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append( dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) new_cache = dict(cache_time=t, cached_workflows=candidates) alp.jsonDump(new_cache, "cache.json") threads = [] for candidict in candidates: try: with codecs.open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") except Exception as e: alp.log("{0} may no longer exist: {1}".format( candidict["name"], e)) continue ot = OopThread(local['remote_json'], force, candidict, local) threads.append(ot) ot.start() manage_threads(threads)
def get_comics(query=None): comicData = 'savedata/comicData' pageData = 'savedata/pageData' try: # Open pickled file for reading entries = getData(comicData) # Check for second "screen" if query in entries.keys(): # Get which page we came from page = getData(pageData) feedback = [] item = alp.Item(title='Go Back', subtitle='...', valid=False, icon=get_icon('BACK'), autocomplete=page) feedback.append(item) books = entries[query] for book in books: url = book['link'] subtitle = '%s - %s - %s - %s' % (book['price'], query, book['id'], time.strftime("%-m/%d/%Y", book['date'])) natural = '%s on %s' % (book['title'], time.strftime("%-m/%d/%Y", book['date'])) argument = json_args({"command":"link", "url":url, 'natural':natural}) item = alp.Item(title = book['title'], subtitle = subtitle, valid = True, icon = get_icon(query), arg = argument ) feedback.append(item) return alp.feedback(feedback) if query == 'this' or query == 'next': t = 'inside loop %s' % query alp.log(t) # Check cache and delete if too old checkCache() # Set the correct url based on query if query == 'this': url = "http://www.previewsworld.com/Home/1/1/71/952" # New Releases else: url = "http://www.previewsworld.com/Home/1/1/71/954" # Upcoming Releases # Get the comics dictionary entries = getComics(url) # Save the entries for next script f = open(comicData, 'w') pickle.dump(entries, f) f.close() # Save so we can navigate between pages f = open(pageData,'w') pickle.dump(query,f) f.close() # Build the feedback Array feedback = [] keys = entries.keys() # Build the Date item firstkey = keys[0] date = entries[firstkey][0]['date'] title = 'Comics for %s' % time.strftime("%-m/%-d/%Y", date) if query == 'this': auto = 'next' else: auto = 'this' item = alp.Item(title = title, valid = False, icon = get_icon('CALENDAR') , autocomplete = auto) feedback.append(item) for key in entries.keys(): # Create the subtitle sub = 'Show Comics for %s' % string.capwords(key) # Build the Alfred item item = alp.Item(title = string.capwords(key), subtitle = sub, uid = key, valid = False, autocomplete = string.capwords(key), icon = get_icon(key) ) # Put item in feedback array feedback.append(item) else: feedback = [] item = alp.Item(title='This weeks comics?', valid=False, autocomplete='this', icon=get_icon('FORWARD')) feedback.append(item) item = alp.Item(title='Next weeks comics?', valid=False, autocomplete='this', icon=get_icon('FORWARD')) feedback.append(item) return alp.feedback(feedback) except IOError: f = open(comicData, 'w') pickle.dump({'reload':'data'}, f) f.close() f = open(pageData, 'w') pickle.dump('this', f) f.close() except: for error in sys.exc_info(): log = 'Unexpected Error: %s' % error alp.log(log) # Build the Alfred item item = alp.Item(title = 'Something went wrong...', subtitle = 'Check the logs', uid = 'Error', valid = False, autocomplete = None, icon = None ) feedback = [item] # Generate feedback and send to Alfred return alp.feedback(feedback)
def print_feedback(results): updatables = [] all_configured = [] for r in results: try: j = r['json'] d = r['candidict'] l = r['local'] except Exception as e: alp.log("{0} threw exception {1}".format(d['name'], e)) alp.log(r) try: version = float(j['version']) download_uri = j['download_url'] description = j['description'] except Exception as e: alp.log("{0} failed with error: {1}".format(d['name'], e)) continue try: lv = l['version'] except Exception as e: alp.log("{0} failed with malformed json: {1}".format(d['name'], e)) alp.log(d) continue if lv < version: updatables.append( dict(name=d['name'], description=description, icon=d['icon'], download=download_uri, path=d['path'], version=version)) all_configured.append( dict(name=d['name'], description=description, icon=d['icon'], download=download_uri, path=d['path'], version=version, local_d=d['description'])) items = [] q = alp.args() if not len(q) or (len(q) == 1 and q[0] == "|force|"): if not len(updatables): alp.feedback( I(title="No Updates Available", subtitle="All your workflows are up-to-date.", valid=False)) return update_all = '"update-all"' for updict in updatables: update_all += " \"{0}>{1}>{2}\"".format(updict['name'], updict['path'], updict['download']) n = len(updatables) upd_sib = "s" if n != 1 else "" items.append( I(title="Update All", subtitle="Download {0} update{s}".format(n, s=upd_sib), valid=True, arg=update_all)) for up in updatables: items.append( I(title=up['name'], subtitle=u"v{0}\u2014{1}".format(up['version'], up['description']), icon=up['icon'], arg=u"\"update\" \"{0}>{1}>{2}\"".format( up['name'], up['path'], up['download']), valid=True)) elif len(q) == 1 and q[0] == "|all|": if not len(all_configured): alp.feedback( I(title="No Compatible Workflows", subtitle="No Alleyoop workflows detected", valid=False)) return for c in all_configured: items.append( I(title=c['name'], subtitle=u"v{0}\u2014{1}".format(c['version'], c['local_d']), icon=c['icon'], valid=False)) else: if q[0] != "|all|" and q[0] != "|force|": if not len(results): alp.log("'oop' may be broken.") alp.log("len(all_configured)=0") alp.log("q='{0}'".format(q)) alp.log("updatables='{0}'".format(updatables)) alp.log("results='{0}'".format(results)) alp.feedback( I(title="Error", subtitle= "No compatible workflows were found. See debug.log for info.", valid=False)) return search = q[0] results = alp.fuzzy_search( search, updatables, key=lambda x: u"{0} - {1}".format(x['name'], x['local_d'])) for r in results: items.append( I(title=r['name'], subtitle=u"v{0}\u2014{1}".format(r['version'], r['description']), icon=r['icon'], arg="\"update\" \"{0}>{1}>{2}".format( r['name'], r['path'], r['download']), valid=True)) else: if not len(all_configured): alp.log("'oop!' may be broken.") alp.log("len(all_configured)=0") alp.log("q='{0}'".format(q)) alp.log("updatables='{0}'".format(updatables)) alp.log("results='{0}'".format(results)) alp.feedback( I(title="Error", subtitle= "No compatible workflows were found. See debug.log for info.", valid=False)) return search = q[1] results = alp.fuzzy_search( search, all_configured, key=lambda x: u"{0} - {1}".format(x['name'], x['local_d'])) for r in results: items.append( I(title=r['name'], subtitle=u"v{0}\u2014{1}".format(r['version'], r['local_d']), valid=False)) if not len(items): items.append( I(title="No Results", subtitle="Your query did not return any results.", valid=False)) alp.feedback(items)
import alp try: path = alp.args()[0] if path == "auto": path = "" s = alp.Settings() s.set(**{"budget_path": path}) if path == "": print "YNAB budget path set to automatic" else: print "YNAB budget path set to %s" % path except Exception, e: alp.log("Oh no, an exception while saving configuration:", e)
def do_action(): args = alp.args() storedFiles = alp.jsonLoad("files.json", default=[]) storedPaths = alp.jsonLoad("paths.json", default=[]) if args[0] == "undo": action = args[1] arg = args[2] if action == "add": storedFiles.remove(arg) alp.jsonDump(storedFiles, "files.json") print "Forgot {0}".format(arg) elif action == "touch": the_dir = subprocess.check_output(["osascript", "getFinder.applescript"]) the_dir = the_dir[:-1] the_file = os.path.join(the_dir, arg) try: os.remove(the_file) except Exception as e: alp.log("Failed to delete: {0}".format(e)) print "Failed to delete: {0}".format(e) elif action == "path": storedPaths.remove(arg) alp.jsonDump(storedPaths, "paths.json") print "Forgot {0}".format(arg) elif action == "at": try: os.remove(arg) except Exception as e: alp.log("Failed to delete: {0}".format(e)) print "Failed to delete: {0}".format(e) else: print "Deleted {0}".format(arg) else: action = args[0] arg = args[1] if action == "add": storedFiles.append(arg) alp.jsonDump(storedFiles, "files.json") elif action == "path": storedPaths.append(arg) alp.jsonDump(storedPaths, "paths.json") elif action == "touch": the_dir = subprocess.check_output(["osascript", "getFinder.applescript"]) the_dir = the_dir[:-1] target = os.path.join(the_dir, arg) cmd = ["touch", target] subprocess.call(cmd) if len(args) == 3 and args[2] == "open": cmd = ["open", target] subprocess.call(cmd) elif action == "at": cmd = ["touch", arg] subprocess.call(cmd) if len(args) == 3 and args[2] == "open": cmd = ["open", arg] subprocess.call(cmd) else: print "Created {0}".format(arg)
def handle_error(title, subtitle, icon = "icon-no.png", debug = ""): i = alp.Item(title = title, subtitle = subtitle, icon = icon) alp.feedback(i) alp.log("Handled error: %s, %s\n%s" % (title, subtitle, debug)) sys.exit(0)
return 'New Timer Started at %s' % datetime.utcnow().isoformat() def executeFunction(args): cmd = args[2] if cmd == 'start': return startTimer(' '.join(args[3:])) elif cmd == 'token': settings.set(token=args[3]) return 'Token has been set to %s' % args[3] elif cmd == 'stop': return stopTimer() else: return startTimer(' '.join(args[2:])) alp.log(sys.argv) if sys.argv[1] not in ACTION_STRINGS: print alp.feedback(ACTIONS) else: if sys.argv[1] == 'start': item = alp.Item(title='Start Timer \'%s\'' % ' '.join(sys.argv[2:]), subtitle='Start a new Toggl Timer', valid=(len(sys.argv) > 2), arg='start %s' % ' '.join(sys.argv[2:])) print alp.feedback([item]) elif sys.argv[1] == 'stop': item = alp.Item(title='Stop Timer', subtitle='Stop the current Toggl Timer', valid=True, autocomplete='stop', arg='stop') print alp.feedback([item]) elif sys.argv[1] == 'token': item = alp.Item(title='Set Token to \'%s\'' % ' '.join(sys.argv[2:]), subtitle='Set your Toggl Token', valid=(len(sys.argv) == 3), arg='token %s' % ' '.join(sys.argv[2:])) print alp.feedback([item]) elif sys.argv[1] == 'timers': print fetchTimers() elif sys.argv[1] == 'execute':
import traceback import alp def main(): alp_args = alp.args() alp.log(alp_args) try: alp.jsonDump(dict(hubid=alp_args[0]), alp.storage('config.json')) alp.log('Setting json') alp.log(alp.jsonLoad(alp.storage('config.json'))) except Exception as e: alp.log('Unable to save your configuration. Please try again.') alp.log(traceback.format_exc()) raise e return if __name__ == "__main__": try: main() except Exception as e: alp.log(traceback.format_exc()) raise e
def do_action(): args = alp.args() storedFiles = alp.jsonLoad("files.json", default=[]) storedPaths = alp.jsonLoad("paths.json", default=[]) if args[0] == "undo": action = args[1] arg = args[2] if action == "add": storedFiles.remove(arg) alp.jsonDump(storedFiles, "files.json") print "Forgot {0}".format(arg) elif action == "touch": the_dir = subprocess.check_output( ["osascript", "getFinder.applescript"]) the_dir = the_dir[:-1] the_file = os.path.join(the_dir, arg) try: os.remove(the_file) except Exception as e: alp.log("Failed to delete: {0}".format(e)) print "Failed to delete: {0}".format(e) elif action == "path": storedPaths.remove(arg) alp.jsonDump(storedPaths, "paths.json") print "Forgot {0}".format(arg) elif action == "at": try: os.remove(arg) except Exception as e: alp.log("Failed to delete: {0}".format(e)) print "Failed to delete: {0}".format(e) else: print "Deleted {0}".format(arg) else: action = args[0] arg = args[1] if action == "add": storedFiles.append(arg) alp.jsonDump(storedFiles, "files.json") elif action == "path": storedPaths.append(arg) alp.jsonDump(storedPaths, "paths.json") elif action == "touch": the_dir = subprocess.check_output( ["osascript", "getFinder.applescript"]) the_dir = the_dir[:-1] target = os.path.join(the_dir, arg) cmd = ["touch", target] subprocess.call(cmd) if len(args) == 3 and args[2] == "open": cmd = ["open", target] subprocess.call(cmd) elif action == "at": cmd = ["touch", arg] subprocess.call(cmd) if len(args) == 3 and args[2] == "open": cmd = ["open", arg] subprocess.call(cmd) else: print "Created {0}".format(arg)
def executeFunction(args): cmd = args[2] if cmd == 'start': return startTimer(' '.join(args[3:])) elif cmd == 'token': settings.set(token=args[3]) return 'Token has been set to %s' % args[3] elif cmd == 'stop': return stopTimer() else: return startTimer(' '.join(args[2:])) alp.log(sys.argv) if sys.argv[1] not in ACTION_STRINGS: print alp.feedback(ACTIONS) else: if sys.argv[1] == 'start': item = alp.Item(title='Start Timer \'%s\'' % ' '.join(sys.argv[2:]), subtitle='Start a new Toggl Timer', valid=(len(sys.argv) > 2), arg='start %s' % ' '.join(sys.argv[2:])) print alp.feedback([item]) elif sys.argv[1] == 'stop': item = alp.Item(title='Stop Timer', subtitle='Stop the current Toggl Timer', valid=True, autocomplete='stop', arg='stop')