def act(): args = alp.args() if args[0] == "configure": url = args[1] sig = args[2] s = alp.Settings() s_dict = {"url": url} s.set(**s_dict) kc = alp.Keychain("yourls_stats") if not kc.retrievePassword("signature"): kc.storePassword("signature", sig) else: kc.modifyPassword("signature", sig) info = alp.readPlist(alp.local("info.plist")) objs = info['objects'] kw = None for o in objs: if o['type'] == "alfred.workflow.input.scriptfilter": kw = o['config']['keyword'] break if kw: scpt = "tell application \"Alfred 2\" to search \"{0}\"".format(kw) call(["osascript", "-e", scpt]) print "Yourls Configured" elif args[0] == "copy": print args[1]
def save(tags): conn = sqlite3.connect(alp.local(join=DB)) c = conn.cursor() url = subprocess.check_output('pbpaste') # existed c.execute('SELECT * FROM urls WHERE url = ?', (url, )) row = c.fetchone() url_id = None if not row: # fetch favicon google_favicon_service = 'http://www.google.com/s2/favicons?%s' params = urllib.urlencode({'domain_url': url}) icon = "" try: icon = urllib.urlopen(google_favicon_service % params).read() except: pass c.execute('INSERT INTO urls VALUES (NULL, ?, ?)', (url, sqlite3.Binary(icon))) url_id = c.lastrowid else: url_id = row[0] for tag in tags: c.execute('INSERT INTO tags VALUES (NULL, ?, ?)', (url_id, tag)) conn.commit() conn.close() # for notification print url
def __init__(self): routes = alp.jsonLoad(alp.local('routes.json'), []) try: config = alp.jsonLoad(alp.storage('config.json')) except Exception: config = {} alp.jsonDump(config, alp.storage('config.json')) self.hubid = config.get('hubid') alp_args = alp.args() args_len = len(alp_args) if args_len > 0: # Allow resetting HubId. config_mode = alp_args[0].isdigit() if self.hubid is None or config_mode: hubid = alp_args[0] return alp.feedback(alp.Item(title='Press Ctrl + Enter to set your HubId to %s' % hubid, arg=hubid, uid=hubid)) search = alp_args[0].lower() routes = filter(lambda route: search in route.get('title').lower() or search in route.get('description', '').lower(), routes) elif self.hubid is None: return alp.feedback([config_item()]) items = map(self.build_item, routes) return alp.feedback(items)
def do_feedback(): q = alp.args() flowPath = os.path.split(alp.local())[0] cache = alp.jsonLoad("cache.json", default={}) day_secs = 24 * 60 * 60 force = (len(q) > 0 and q[0] == "|force|") t = time.time() if (force): import shutil _c = alp.cache() _s = alp.storage() shutil.rmtree(_c) shutil.rmtree(_s) if (cache.get("cache_time", 0) + day_secs > t) and not force: candidates = cache.get("cached_workflows", []) else: candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False, followlinks=True): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") plist = alp.readPlist(fn) except IOError as e: alp.log("Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append(dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append(dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) new_cache = dict(cache_time=t, cached_workflows=candidates) alp.jsonDump(new_cache, "cache.json") threads = [] for candidict in candidates: try: with codecs.open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") except Exception as e: alp.log("{0} may no longer exist: {1}".format(candidict["name"], e)) continue ot = OopThread(local['remote_json'], force, candidict, local) threads.append(ot) ot.start() manage_threads(threads)
def main(): cmds = { 'init': 'initialize the DB', 'readme': 'show the project\'s README' } argv = sys_argv argc = len(argv) items = [] if argc > 1: argv = argv[:1] + argv[1].split() argc = len(argv) if argv[1] == '>': if argc > 2: for c in alp.fuzzy_search(argv[2], cmds.keys()): t = cmd_title(c) i = Item(title=t, autocomplete='> '+c, \ subtitle=cmds[c], valid=True, arg=cmd_arg(c)) items.append(i) else: for c, st in cmds.iteritems(): t = cmd_title(c) i = Item(title=t, autocomplete='> '+c, \ subtitle=st, valid=True, arg=cmd_arg(c)) items.append(i) alp.feedback(items) return li = get_list() for p in li: if 'fuzzy' not in p: p['fuzzy'] = p['name'] u2ascii = lambda s: unidecode(s.decode('utf-8')) qry = ' '.join(map(u2ascii, argv[1:])).strip() ppl = alp.fuzzy_search(qry, li, lambda x: x['fuzzy']) for p in ppl: kw = {} kw['title'] = p['name'] kw['autocomplete'] = p['name'] kw['subtitle'] = p['info'] if 'info' in p else '' if 'url' in p: kw['valid'] = True kw['arg'] = url_arg(p['url']) else: kw['valid'] = False if 'icon' in p: kw['icon'] = alp.local('icons/'+p['icon']) #kw['fileIcon'] = True items.append(Item(**kw)) alp.feedback(items)
def init_db(): conn = sqlite3.connect(alp.local(join=DB)) c = conn.cursor() c.execute('''CREATE TABLE IF NOT EXISTS tags (id INTEGER PRIMARY KEY, url_id NUMERIC, tag TEXT);''') c.execute('''CREATE TABLE IF NOT EXISTS urls (id INTEGER PRIMARY KEY, url TEXT UNIQUE, icon BLOB);''') conn.commit() conn.close()
def do_feedback(): args = alp.args() path = args[0] feedback = [] for n, color in COLORS.items(): icon = os.path.join(alp.local(), "icons", "{0}.png".format(color)) feedback.append(I(title=color, subtitle=u"Set Finder label to {0}".format(color), icon=icon, valid=True, arg=u"\"{0}\" \"{1}\"".format(path, n))) alp.feedback(feedback)
def main(): routes = alp.jsonLoad(alp.local('routes.json'), []) alp_args = alp.args() if len(alp_args) > 0: search = alp.args()[0].lower() routes = filter(lambda route: search in route.get('title').lower() or search in route.get('description', '').lower(), routes) items = map(item, routes) return alp.feedback(items)
def parse_others(): """ Return a list of manually-added people """ alp.log('parsing others') li = alp.jsonLoad(alp.local('others.json'), []) for p in li: if 'fuzzy' not in p: p['fuzzy'] = mk_fuzzy(p) alp.log('done others') return li
def main(): routes = alp.jsonLoad(alp.local('routes.json'), []) alp_args = alp.args() if len(alp_args) > 0: search = alp.args()[0].lower() routes = filter( lambda route: search in route.get('title').lower() or search in route.get('description', '').lower(), routes) items = map(item, routes) return alp.feedback(items)
def search(tags): conn = sqlite3.connect(alp.local(join=DB)) conn.row_factory = sqlite3.Row c = conn.cursor() rows = [] for tag in tags: c.execute(''' SELECT DISTINCT urls.* FROM urls JOIN tags ON tags.url_id = urls.id WHERE tags.tag LIKE ? ''', ('%'+tag+'%', )) rows += c.fetchall() items = [] for row in rows: icon = row['icon'] sha224 = hashlib.sha224(icon).hexdigest() icon_path = alp.local(join=os.path.join('icon_cache', sha224)) if not os.path.exists(icon_path): with open(icon_path, 'w') as f: f.write(icon) c.execute('SELECT * FROM tags WHERE url_id = ?', (row['id'],)) url_tags = c.fetchall() item = alp.Item( title=row['url'], subtitle=" ".join(map(lambda tag: tag['tag'], url_tags)), valid=True, icon=icon_path, arg=row['url'] ) items.append(item) alp.feedback(items)
def do_feedback(): args = alp.args() path = args[0] feedback = [] for n, color in COLORS.items(): icon = os.path.join(alp.local(), "icons", "{0}.png".format(color)) feedback.append( I(title=color, subtitle=u"Set Finder label to {0}".format(color), icon=icon, valid=True, arg=u"\"{0}\" \"{1}\"".format(path, n))) alp.feedback(feedback)
def _create_light_icon(self, lid, light_data): """Creates a 1x1 PNG icon of light's RGB color and saves it to the local dir. """ # Create a color converter & helper converter = rgb_cie.Converter() color_helper = rgb_cie.ColorHelper() hex_color = converter.xyToHEX( light_data['state']['xy'][0], light_data['state']['xy'][1], float(light_data['state']['bri']) / 255 ) f = open(alp.local('icons/%s.png' % lid), 'wb') w = png.Writer(1, 1) w.write(f, [color_helper.hexToRGB(hex_color)]) f.close()
def udpateDatabaseFromKippt(): """ Update the SQlite database with all clips from Kippt. """ global settings # Check if database exists; if not, create it. if not os.path.exists(os.path.join(alp.local(), settings["sqliteDB"])): createDatabase() clips = readAllClips() if clips == None: return conn = lite.connect(settings["sqliteDB"]) with conn: cur = conn.cursor() # Remove old clips cur.execute("DELETE FROM Clips") # Add all clips for clip in clips: title = clip["title"] subtitle = clip["title"] url = clip["url"] app_url = clip["app_url"] notes = clip["notes"] sql = "INSERT INTO Clips VALUES(NULL," sql += "\"%s\"," % title sql += "\"%s\"," % subtitle sql += "\"%s\"," % url sql += "\"%s\"," % app_url sql += "\"%s\"" % notes sql += ")" cur.execute(sql)
def do_feedback(): flowPath = os.path.split(alp.local())[0] candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") with open(fn) as f: plist = plistlib.readPlist(f) except IOError as e: alp.log("Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append( dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append( dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) updatables = [] all_configured = [] for candidict in candidates: with open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") try: remote_uri = local["remote_json"] local_version = float(local["version"]) local_description = candidict["description"] except Exception as e: alp.log("{0} failed on key: {1}.".format(candidict["name"], e)) continue r = alp.Request(remote_uri) remote = r.request.json if not remote: alp.log("{0} failed to load remote JSON.".format( candidict["name"])) continue should_add = True try: version = float(remote["version"]) download_uri = remote["download_url"] description = remote["description"] except Exception as e: alp.log("{0} failed with error: {1}".format(candidict["name"], e)) should_add = False continue if should_add and local_version < version: updatables.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version)) all_configured.append( dict(name=candidict["name"], description=description, icon=candidict["icon"], download=download_uri, path=candidict["path"], version=version, local_d=local_description)) q = alp.args() items = [] if not len(q): if not len(updatables): alp.feedback( I(title="No Updates Available", subtitle="All your workflows are up-to-date.", valid=False)) return update_all = '"update-all"' for updict in updatables: update_all += " \"{0}>{1}>{2}\"".format(updict["name"], updict["path"], updict["download"]) n = len(updatables) upd_sib = "s" if len(updatables) != 1 else "" items.append( I(title="Update All", subtitle="Download and install {0} update{1}".format(n, upd_sib), valid=True, arg=update_all)) for updict in updatables: items.append( I(title=updict["name"], subtitle=u"v{0}\u2014{1}".format(updict["version"], updict["description"]), icon=updict["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( updict["name"], updict["path"], updict["download"]), valid=True)) elif len(q) == 1 and q[0] == "all": for configured in all_configured: items.append( I(title=configured["name"], subtitle=u"v{0}\u2014{1}".format(configured["version"], configured["local_d"]), icon=configured["icon"], valid=False)) else: if q[0] != "all": search = q[0] results = alp.fuzzy_search( search, updatables, key=lambda x: "{0} - {1}".format(x["name"], x["description"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["description"]), icon=result["icon"], arg="\"update\" \"{0}>{1}>{2}\"".format( result["name"], result["path"], result["download"]), valid=True)) else: search = q[1] results = alp.fuzzy_search( search, all_configured, key=lambda x: "{0} - {1}".format(x["name"], x["local_d"])) for result in results: items.append( I(title=result["name"], subtitle=u"v{0}\u2014{1}".format(result["version"], result["local_d"]), icon=result["icon"], arg="{0} up-to-date.".format(result["name"]), valid=False)) alp.feedback(items)
def do_feedback(): q = alp.args() flowPath = os.path.split(alp.local())[0] cache = alp.jsonLoad("cache.json", default={}) day_secs = 24 * 60 * 60 force = (len(q) > 0 and q[0] == "|force|") t = time.time() if (force): import shutil _c = alp.cache() _s = alp.storage() shutil.rmtree(_c) shutil.rmtree(_s) if (cache.get("cache_time", 0) + day_secs > t) and not force: candidates = cache.get("cached_workflows", []) else: candidates = [] for dirpath, dirnames, filenames in os.walk(flowPath, topdown=False, followlinks=True): for aFile in filenames: if aFile == "update.json": try: fn = os.path.join(dirpath, "Info.plist") if not os.path.exists(fn): fn = os.path.join(dirpath, "info.plist") plist = alp.readPlist(fn) except IOError as e: alp.log( "Exception: Info.plist not found ({0}).".format(e)) continue else: name = plist["name"] local_description = plist["description"] the_json = os.path.join(dirpath, aFile) the_icon = os.path.join(dirpath, "icon.png") if name != "Alleyoop": candidates.append( dict(name=name, json=the_json, icon=the_icon, path=dirpath, description=local_description)) else: downloads_path = os.path.expanduser("~/Downloads/") candidates.append( dict(name=name, json=the_json, icon=the_icon, path=downloads_path, description=local_description)) new_cache = dict(cache_time=t, cached_workflows=candidates) alp.jsonDump(new_cache, "cache.json") threads = [] for candidict in candidates: try: with codecs.open(candidict["json"]) as f: local = json.load(f, encoding="utf-8") except Exception as e: alp.log("{0} may no longer exist: {1}".format( candidict["name"], e)) continue ot = OopThread(local['remote_json'], force, candidict, local) threads.append(ot) ot.start() manage_threads(threads)
from kippt.kippt import Kippt import alp import os import sqlite3 as lite import utils ## # Constants ## SETTINGS_FILE = "settings.plist" SETTINGS_PATH = os.path.join(alp.local(), SETTINGS_FILE) DEFAULT_SETTINGS = dict(username="", sqliteDB="kippt.sqlite", lastUpdate="", updateClips="false", firstRun="true", credentialsChanged="false", passwordSet="false") # Global settings object settings = None def createSettingsFile(): """ Creates the default settings file, if it doesn't exists. """ # Check if the settings file exists try: with open(SETTINGS_PATH):
def do_feedback(): q = alp.args() if len(q) and "+" in q[0][0:2]: to_add = q[0].split("+", 1)[1] if "\"" in to_add: to_add = to_add.replace("\"", "\\\"") alp.feedback( I(title="Add Task", subtitle=to_add, arg=u"parse \"{0}\"".format(to_add), valid=True)) return things_data = do_things_dump() icons = {"list": os.path.join(alp.local(), "images", "area.png"), \ "project": os.path.join(alp.local(), "images", "project.png"), \ "task": os.path.join(alp.local(), "images", "check.png")} items = [] if not len(q) or q[0] == "": for area in things_data["list"]: de_area = area.encode("ascii", "ignore") icon = os.path.join(alp.local(), "images", "{0}.png".format(de_area)) if not os.path.exists(icon): icon = icons["list"] if "\"" in area: clean = area.replace("\"", "\\\"") else: clean = area items.append( I(title=area, arg=u"show list \"{0}\"".format(clean), icon=icon, uid=area, valid=True)) for project in things_data["project"]: if "\"" in project: clean = project.replace("\"", "\\\"") else: clean = project items.append( I(title=project, arg=u"show project \"{0}\"".format(clean), icon=icons["project"], uid=project, valid=True)) for task in things_data["task"]: if task: if "\"" in task: clean = task.replace("\"", "\\\"") else: clean = task split_task = task.encode("ascii", "ignore").split(" ")[0] items.append( I(title=task, arg=u"show \"to do\" \"{0}\"".format(clean), icon=icons["task"], uid=u"task-{0}".format(split_task), valid=True)) else: q = q[0] items.append( I(title="Add Task", subtitle=q, arg=u"parse \"{0}\"".format(q), valid=True)) for k, v in things_data.iteritems(): things = alp.fuzzy_search(q, v) for thing in things: uid = k if k != "task" else u"task-{0}".format( thing.split(" ")[0]) icon = icons[k] if not thing in ["Inbox", "Logbook", "Next", \ "Projects", "Scheduled", "Someday", "Today"] else os.path.join(alp.local(), "images", "{0}.png".format(thing)) arg = k if k != "task" else "to do" if "\"" in thing: clean_thing = thing.replace("\"", "\\\"") else: clean_thing = thing items.append( I(title=thing, arg=u"show \"{0}\" \"{1}\"".format(arg, clean_thing), icon=icon, uid=uid, valid=True)) alp.feedback(items)
from kippt.kippt import Kippt import alp import os import sqlite3 as lite import utils ## # Constants ## SETTINGS_FILE = "settings.plist" SETTINGS_PATH = os.path.join(alp.local(), SETTINGS_FILE) DEFAULT_SETTINGS = dict( username="", sqliteDB="kippt.sqlite", lastUpdate="", updateClips="false", firstRun="true", credentialsChanged="false", passwordSet="false" ) # Global settings object settings = None def createSettingsFile(): """ Creates the default settings file, if it doesn't exists. """ # Check if the settings file exists
def do_feedback(): q = alp.args() if len(q) and "+" in q[0][0:2]: to_add = q[0].split("+", 1)[1] if "\"" in to_add: to_add = to_add.replace("\"", "\\\"") alp.feedback(I(title="Add Task", subtitle=to_add, arg=u"parse \"{0}\"".format(to_add), valid=True)) return things_data = do_things_dump() icons = {"list": os.path.join(alp.local(), "images", "area.png"), \ "project": os.path.join(alp.local(), "images", "project.png"), \ "task": os.path.join(alp.local(), "images", "check.png")} items = [] if not len(q) or q[0] == "": for area in things_data["list"]: de_area = area.encode("ascii", "ignore") icon = os.path.join(alp.local(), "images", "{0}.png".format(de_area)) if not os.path.exists(icon): icon = icons["list"] if "\"" in area: clean = area.replace("\"", "\\\"") else: clean = area items.append(I(title=area, arg=u"show list \"{0}\"".format(clean), icon=icon, uid=area, valid=True)) for project in things_data["project"]: if "\"" in project: clean = project.replace("\"", "\\\"") else: clean = project items.append(I(title=project, arg=u"show project \"{0}\"".format(clean), icon=icons["project"], uid=project, valid=True)) for task in things_data["task"]: if task: if "\"" in task: clean = task.replace("\"", "\\\"") else: clean = task split_task = task.encode("ascii", "ignore").split(" ")[0] items.append(I(title=task, arg=u"show \"to do\" \"{0}\"".format(clean), icon=icons["task"], uid=u"task-{0}".format(split_task), valid=True)) else: q = q[0] items.append(I(title="Add Task", subtitle=q, arg=u"parse \"{0}\"".format(q), valid=True)) for k, v in things_data.iteritems(): things = alp.fuzzy_search(q, v) for thing in things: uid = k if k != "task" else u"task-{0}".format(thing.split(" ")[0]) icon = icons[k] if not thing in ["Inbox", "Logbook", "Next", \ "Projects", "Scheduled", "Someday", "Today"] else os.path.join(alp.local(), "images", "{0}.png".format(thing)) arg = k if k != "task" else "to do" if "\"" in thing: clean_thing = thing.replace("\"", "\\\"") else: clean_thing = thing items.append(I(title=thing, arg=u"show \"{0}\" \"{1}\"".format(arg, clean_thing), icon=icon, uid=uid, valid=True)) alp.feedback(items)