Пример #1
0
def delete(arg):
    import urllib,urllib2
    import main

    config = main.config_data()
    try:
        user = config['pinboard_username']
        token = config['pinboard_token']
    except:
        print "Setup not complete\npbauth username:token"
        sys.exit(0)

    deleted_url = main.deleted_url_data()
    history = main.launch_history()
    try:
        url = 'https://api.pinboard.in/v1/posts/delete?format=json&auth_token=%s:%s&url=%s'%(user,token,urllib.quote(arg))
        data = urllib2.urlopen(url).read()
        ret = json.loads(data)
        if ret['result_code']=='done':
            print "%s deleted"%urlparse.urlparse(arg)[1]
            deleted_url.append(arg)
            with open(os.path.join(alfred.work(False),'deleted-url.json'),'w+') as f:
                json.dump(deleted_url,f)

            if arg in history:
                del history[arg]
                with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
                    json.dump(history,f)
        else:
            print ret['result_code']
    except:
        print "Error"
Пример #2
0
def delete(arg):
    import urllib,urllib2
    import main

    config = main.config_data()
    try:
        user = config['pinboard_username']
        token = config['pinboard_token']
    except:
        print "Setup not complete\npbauth username:token"
        sys.exit(0)

    deleted_url = main.deleted_url_data()
    history = main.launch_history()
    try:
        url = 'https://api.pinboard.in/v1/posts/delete?format=json&auth_token=%s:%s&url=%s'%(user,token,urllib.quote(arg))
        data = urllib2.urlopen(url).read()
        ret = json.loads(data)
        if ret['result_code']=='done':
            print "%s deleted"%urlparse.urlparse(arg)[1]
            deleted_url.append(arg)
            with open(os.path.join(alfred.work(False),'deleted-url.json'),'w+') as f:
                json.dump(deleted_url,f)

            if arg in history:
                del history[arg]
                with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
                    json.dump(history,f)
        else:
            print ret['result_code']
    except:
        print "Error"
Пример #3
0
def reload():
    imageregex = re.compile(r"img.*src=\"(.*?)\"")
    config = main.config_data()

    d = feedparser.parse('http://macnews.tistory.com/rss')
    items = []
    for e in d.entries:
        try:
            imageurl = imageregex.search(e.description)
            if imageurl:
                url = imageurl.group(1)
                filepath = os.path.join(alfred.work(True), os.path.split(url)[1])
                if not os.path.exists(filepath):
                    urllib.urlretrieve(url, filepath)
                    cmd = "sips -z 72 72 '%s'" % filepath
                    subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
                imageurl = filepath
            else:
                imageurl = u"icon.png"
        except:
            imageurl = u"icon.png"
        items.append({'title':e.title,'published':e.published,'link':e.link,'image':imageurl})

    with open(os.path.join(alfred.work(True), 'rss-cache.json'), 'w+') as myFile:
        myFile.write(json.dumps(items))

    config['last_updated'] = int(time.time())
    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))

    print "Reloading BackToTheMac completed..."
Пример #4
0
def makeItem(itemData, itemIdx, itemPos):
    mo = roItem.search(itemData, itemPos)
    if mo is None or mo.lastindex is None:
        return (None, None);

    url = urllib.quote(mo.group(2), ":/&?=")  # .replace(" ", "%20")
    name = parser.unescape(mo.group(3))
    imageUrl = mo.group(4)
    price = parser.unescape(mo.group(5))
    itemPos = mo.end()

    if price == "":
        title = name
    else:
        title = "%s (%s)" % (name, price)
    # subTitle = price
    subTitle = 'View "%s" on Steam' % name

    # to make alfred not to remember same uid
    _uid = str(itemIdx + 1) + "." + str(int(time.time() * 100.0))

    filepath = ""
    if imageUrl and useIcon:  # cache image
        idx = imageUrl.find("=")
        if idx == -1:
            imageFileName = roImageName.sub(replImageName, imageUrl)
        else:
            imageFileName = imageUrl[idx + 1:] + ".jpg"

        filepath = os.path.join(alfred.work(True), imageFileName)
        if not os.path.exists(filepath):
            urllib.urlretrieve(imageUrl, filepath)

    item = alfred.Item(title=title, subtitle=subTitle, attributes={'uid': alfred.uid(_uid), 'arg': url}, icon=filepath)
    return (item, itemPos)
def prioritize(id, com):
    # read todo.txt
    path = alfred.work(False) + '/todo.txt'
    file = open(path)
    tasks = file.readlines()
    file.close()
    
    pos = int(id) - 1
    task = tasks[pos]
    
    if com == 'prioritize':      # prioritize
        if task[0] == '(' and task[2] == ')':
            if task[1] != 'A':
                tasks[pos] = '(' + chr(ord(task[1])-1) + task[2:]
        else:
            tasks[pos] = '(A) ' + task
    else:               # deprioritize
        if task[0] == '(' and task[2] == ')':
            if task[1] == 'C':
                tasks[pos] = task[4:]
            else:
                tasks[pos] = '(' + chr(ord(task[1])+1) + task[2:]
    
    file = open(path, 'w')
    file.writelines(tasks)
    file.close()
Пример #6
0
def config_data():
    try:
        config = json.loads(
            open(os.path.join(alfred.work(False), 'config.json')).read())
    except:
        config = {}
    return config
Пример #7
0
def history_data():
    try:
        return json.loads(
            open(os.path.join(alfred.work(False),
                              'search-history.json')).read())
    except IOError:
        return []
Пример #8
0
def launch_history():
    try:
        return json.loads(
            open(os.path.join(alfred.work(False),
                              'launch-history.json')).read())
    except IOError:
        return {}
Пример #9
0
def update_history(category, q, nums):
    if q == "" or nums == 0: return
    if category != "all" and category != 'tags': return

    history = history_data()
    now = int(time.time())
    found = None

    for h in history:
        sh = h[1].replace(' ', '')
        sq = q.replace(' ', '')
        if (sh in sq or sq in sh) and now - h[3] <= UPDATE_BOOKMARK_THRESHOLD:
            if not h[4]: history.remove(h)
        elif sh == sq:
            found = h
        elif now - h[3] > DELETE_OLDBOOKMARK_THRESHOLD:
            if not h[4]: history.remove(h)

    if found:
        found[2:4] = (nums, now)
    else:
        if category == "all":
            history.append(["pba", q, nums, now, False])
        elif category == "tags":
            history.append(["pbtag", q, nums, now, False])

    with open(os.path.join(alfred.work(False), 'search-history.json'),
              'w+') as myFile:
        myFile.write(json.dumps(history))
Пример #10
0
def update_history(category,q,nums):
    if q=="" or nums==0: return
    if category != "all" and category != 'tags': return
    
    history = history_data()
    now = int(time.time())
    found = None

    for h in history:
        sh = h[1].replace(' ', '')
        sq = q.replace(' ', '')
        if (sh in sq or sq in sh) and now-h[3] <= UPDATE_BOOKMARK_THRESHOLD:
            if not h[4]: history.remove(h)
        elif sh == sq:
            found = h
        elif now-h[3] > DELETE_OLDBOOKMARK_THRESHOLD:
            if not h[4]: history.remove(h)

    if found:
        found[2:4] = (nums,now)
    else:
        if category == "all":
            history.append(["pba",q,nums,now,False])
        elif category == "tags":
            history.append(["pbtag",q,nums,now,False])

    with open(os.path.join(alfred.work(False), 'search-history.json'), 'w+') as myFile:
        myFile.write(json.dumps(history))
Пример #11
0
def delete_history(arg):
    import main

    history = main.launch_history()
    if arg in history:
        del history[arg]
        with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
            json.dump(history,f)
Пример #12
0
def delete_history(arg):
    import main

    history = main.launch_history()
    if arg in history:
        del history[arg]
        with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
            json.dump(history,f)
    def __init__(self):
        """ Setup """

        # Read bundle info and config path
        self.placeholder = ''
        for x in alfred.preferences['objects']:
            if x['type'] == 'alfred.workflow.input.scriptfilter':
                self.placeholder = x['config']['title']
        self.config_path = os.path.join(alfred.work(False), config_filename)
    def __init__(self):
        """ Setup """

        # Read bundle info and config path
        self.placeholder = ''
        for x in alfred.preferences['objects']:
            if x['type'] == 'alfred.workflow.input.scriptfilter':
                self.placeholder = x['config']['title']
        self.config_path = os.path.join(alfred.work(False), config_filename)
def clean_index(v='2'):
    # local storage
    pkl = os.path.join(alfred.work(volatile=False), 'index%s.pkl' % v)
    tmp = pkl + '.tmp'
    if fetch(tmp):
        os.rename(tmp, pkl)
        return True
    else:
        return False
def clean_index(v='2'):
    # local storage
    pkl = os.path.join(alfred.work(volatile=False), 'index%s.pkl' % v)
    tmp = pkl + '.tmp'
    if fetch(tmp):
        os.rename(tmp, pkl)
        return True
    else:
        return False
Пример #17
0
def pbauthpocket(q):
    ret = pocket.getRequestCode()
    
    config = config_data()
    config['pocket_request_code'] = ret['code']
    
    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))
    
    result = [alfred.Item(title='Login!', subtitle='Login with Pocket.com (you will be taken to pocket.com)', attributes={'arg':ret['code'],'uid':alfred.uid(0)}, icon="icon.png")]
    alfred.write(alfred.xml(result))
Пример #18
0
def update_history(arg):
    import main
    history = main.launch_history()
    now = int(time.time())
    if arg in history:
        history[arg][0] += 1
        history[arg][1] = now
    else:
        history[arg] = [1, now]
    with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
        json.dump(history,f)
Пример #19
0
def cache_favicon(image_data, uid, last_updated):
    cache_dir = os.path.join(alfred.work(True), FAVICONS_CACHE)
    if not os.path.isdir(cache_dir):
        os.makedirs(cache_dir)
    icon_file = os.path.join(cache_dir, str(uid))
    if not os.path.isfile(icon_file) or last_updated > os.path.getmtime(icon_file):
        with open(icon_file, 'w') as f:
            f.write(image_data)
        os.utime(icon_file, (time.time(), last_updated))

    return (icon_file, {'type': 'png'})
Пример #20
0
def pbauthpocket(q):
    ret = pocket.getRequestCode()
    
    config = config_data()
    config['pocket_request_code'] = ret['code']
    
    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))
    
    result = [alfred.Item(title='Login!', subtitle='Login with Pocket.com (you will be taken to pocket.com)', attributes={'arg':ret['code'],'uid':alfred.uid(0)}, icon="icon.png")]
    alfred.write(alfred.xml(result))
Пример #21
0
def cache_favicon(image_data, uid, last_updated):
    cache_dir = os.path.join(alfred.work(True), FAVICONS_CACHE)
    if not os.path.isdir(cache_dir):
        os.makedirs(cache_dir)
    icon_file = os.path.join(cache_dir, str(uid))
    if not os.path.isfile(icon_file) or last_updated > os.path.getmtime(icon_file):
        with open(icon_file, 'w') as f:
            f.write(image_data)
        os.utime(icon_file, (time.time(), last_updated))

    return (icon_file, {'type': 'png'})
Пример #22
0
def update_history(arg):
    import main
    history = main.launch_history()
    now = int(time.time())
    if arg in history:
        history[arg][0] += 1
        history[arg][1] = now
    else:
        history[arg] = [1, now]
    with open(os.path.join(alfred.work(False),'launch-history.json'),'w+') as f:
        json.dump(history,f)
Пример #23
0
def fetch_sections(whatis, max_age=604800):
    cache = path.join(alfred.work(volatile=True), u'sections.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < max_age):
        return set(json.load(open(cache, 'r')))
    sections = set([])
    pattern = re.compile(r'\(([^()]+)\)$')
    for page in whatis.iterkeys():
        sre = pattern.search(page)
        if sre:
            sections.add(sre.group(1))
    json.dump(list(sections), open(cache, 'w'))
    return sections
Пример #24
0
def copy_db(name, profile):
    cache = os.path.join(alfred.work(True), name)
    if os.path.isfile(cache) and time.time() - os.path.getmtime(cache) < CACHE_EXPIRY:
        return cache

    db_file = os.path.join(os.path.expanduser(profile), name)
    try:
        shutil.copy(db_file, cache)
    except:
        raise IOError(u'Unable to copy Google Chrome history database from {}'.format(db_file))

    return cache
Пример #25
0
def fetch_sections(whatis, max_age=604800):
    cache = path.join(alfred.work(volatile=True), u'sections.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < max_age):
        return set(json.load(open(cache, 'r')))
    sections = set([])
    pattern = re.compile(r'\(([^()]+)\)$')
    for page in whatis.iterkeys():
        sre = pattern.search(page)
        if sre:
            sections.add(sre.group(1))
    json.dump(list(sections), open(cache, 'w'))
    return sections
Пример #26
0
def copy_db(name, profile):
    cache = os.path.join(alfred.work(True), name)
    if os.path.isfile(cache) and time.time() - os.path.getmtime(cache) < CACHE_EXPIRY:
        return cache

    db_file = os.path.join(os.path.expanduser(profile), name)
    try:
        shutil.copy(db_file, cache)
    except:
        raise IOError(u'Unable to copy Safari history database from {}'.format(db_file))

    return cache
Пример #27
0
def reload():
    imageregex = re.compile(r"img.*src=\"(.*?)\"")
    config = main.config_data()

    d = feedparser.parse('http://macnews.tistory.com/rss')
    items = []
    for e in d.entries:
        try:
            imageurl = imageregex.search(e.description)
            if imageurl:
                url = imageurl.group(1)
                filepath = os.path.join(alfred.work(True),
                                        os.path.split(url)[1])
                if not os.path.exists(filepath):
                    urllib.urlretrieve(url, filepath)
                    cmd = "sips -z 72 72 '%s'" % filepath
                    subprocess.check_output(cmd,
                                            stderr=subprocess.STDOUT,
                                            shell=True)
                imageurl = filepath
            else:
                imageurl = u"icon.png"
        except:
            imageurl = u"icon.png"
        items.append({
            'title': e.title,
            'published': e.published,
            'link': e.link,
            'image': imageurl
        })

    with open(os.path.join(alfred.work(True), 'rss-cache.json'),
              'w+') as myFile:
        myFile.write(json.dumps(items))

    config['last_updated'] = int(time.time())
    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))

    print "Reloading BackToTheMac completed..."
def results(arg):
    uid = 0
    # read todo.txt
    path = alfred.work(False) + '/todo.txt'
    file = open(path)
    tasks = file.read().splitlines()
    file.close()

    for task in tasks:
        uid += 1
        # filter with keyword
        if arg in task:
            yield alfred.Item({'uid': alfred.uid(uid), 'arg': str(uid)}, task, u'Enter to Prioritize this task! Press ⌥ to Deprioritize it', 'EBD226C2-1E22-4F65-BD43-556E6EF3C463.png')
Пример #29
0
def star(arg):
    import main
    starred_url = main.starred_url_data()

    if arg in starred_url:
        starred_url.remove(arg)
        print "unmark %s" % urlparse.urljoin(arg,'/')
    else:
        starred_url.append(arg)
        print "mark %s" % urlparse.urljoin(arg,'/')
    
    with open(os.path.join(alfred.work(False),'starred-url.json'),'w+') as f:
        json.dump(starred_url,f)
Пример #30
0
def fetch_whatis(max_age=604800):
    cache = path.join(alfred.work(volatile=True), u'whatis.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < max_age):
        return json.load(open(cache, 'r'))
    raw_pages = subprocess.check_output(['/usr/bin/man', '-k', '-Pcat', '.'])
    pagelist = map(lambda x: map(lambda y: y.strip(), x.split(' - ', 1)),
                   clean_ascii(raw_pages).splitlines())
    whatis = {}
    for (pages, description) in pagelist:
        for page in pages.split(', '):
            whatis[page] = description
    json.dump(whatis, open(cache, 'w'))
    return whatis
Пример #31
0
def star(arg):
    import main
    starred_url = main.starred_url_data()

    if arg in starred_url:
        starred_url.remove(arg)
        print "unmark %s" % urlparse.urljoin(arg,'/')
    else:
        starred_url.append(arg)
        print "mark %s" % urlparse.urljoin(arg,'/')
    
    with open(os.path.join(alfred.work(False),'starred-url.json'),'w+') as f:
        json.dump(starred_url,f)
Пример #32
0
def fetch_whatis(max_age=604800):
    cache = path.join(alfred.work(volatile=True), u'whatis.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < max_age):
        return json.load(open(cache, 'r'))
    raw_pages = subprocess.check_output(['/usr/bin/man', '-k', '-Pcat', '.'])
    pagelist  = map(
        lambda x: map(lambda y: y.strip(), x.split(' - ', 1)),
        clean_ascii(raw_pages).splitlines()
    )
    whatis = {}
    for (pages, description) in pagelist:
        for page in pages.split(', '):
            whatis[page] = description
    json.dump(whatis, open(cache, 'w'))
    return whatis
Пример #33
0
def fetch_known_hosts(_path, alias='~/.ssh/known_hosts'):
    master = path.expanduser(_path)
    if not path.isfile(master):
        return
    cache = path.join(alfred.work(volatile=True), 'known_hosts.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        with open(path.expanduser(_path), 'r') as known_hosts:
            for line in known_hosts:
                results.update(line.split()[0].split(','))
    except IOError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #34
0
def fetch_known_hosts(_path, alias='~/.ssh/known_hosts'):
    master = path.expanduser(_path)
    if not path.isfile(master):
        return
    cache = path.join(alfred.work(volatile=True), 'known_hosts.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        with open(path.expanduser(_path), 'r') as known_hosts:
            for line in known_hosts:
                results.update(line.split()[0].split(','))
    except IOError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
def fetch_ssh_keys(_path, alias='~/.ssh/'):
    master = path.expanduser(_path)
    if not path.isdir(master):
        return
    cache = path.join(alfred.work(volatile=True), 'ssh_keys.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    for subdir, dirs, files in walk(path.expanduser(_path)):
        for filename in files:
            if filename.endswith(".pub"):
                results.add(filename[:-4])
    with open("1.log", "a") as f:
        f.write(path.expanduser(_path) + str(results))
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #36
0
def pbauth(q):
    try:
        (user, token) = q.split(':')
    except:
        print 'Invalid Token'
        sys.exit(0)

    config = config_data()

    config['pinboard_username'] = user
    config['pinboard_token'] = token

    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))

    print "Authentication Token Saved"
def fetch_ssh_keys(_path, alias='~/.ssh/'):
    master = path.expanduser(_path)
    if not path.isdir(master):
        return
    cache = path.join(alfred.work(volatile=True), 'ssh_keys.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    for subdir, dirs, files in walk(path.expanduser(_path)):
        for filename in files:
            if filename.endswith(".pub"):
                results.add(filename[:-4])
    with open("1.log", "a") as f:
        f.write(path.expanduser(_path) + str(results))
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #38
0
def fetch_poster(poster_uri):
    poster_name = u'_%s.%s' % (u'_'.join(
        poster_uri.split('/')[4:6]), poster_uri.split('.')[-1])
    cache = path.join(alfred.work(volatile=True), poster_name)
    if path.isfile(cache):
        return cache
    try:
        r = requests.get(poster_uri, timeout=POSTER_TIMEOUT)
    except requests.exceptions.Timeout:
        return 'icon.png'
    if r.status_code != 200 or not r.headers['Content-Type'].startswith(
            'image/'):
        return 'icon.png'
    with open(cache, 'wb') as cache_file:
        cache_file.write(r.content)
    return cache
Пример #39
0
def pbauth(q):
    try:
        (user,token) = q.split(':')
    except:
        print 'Invalid Token'
        sys.exit(0)

    config = config_data()
        
    config['pinboard_username'] = user
    config['pinboard_token'] = token

    with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
        myFile.write(json.dumps(config))

    print "Authentication Token Saved"
Пример #40
0
def fetch_poster(poster_uri):
    poster_name = u'_%s.%s' % (
        u'_'.join(poster_uri.split('/')[4:6]),
        poster_uri.split('.')[-1]
    )
    cache = path.join(alfred.work(volatile=True), poster_name)
    if path.isfile(cache):
        return cache
    try:
        r = requests.get(poster_uri, timeout=POSTER_TIMEOUT)
    except requests.exceptions.Timeout:
        return 'icon.png'
    if r.status_code != 200 or not r.headers['Content-Type'].startswith('image/'):
        return 'icon.png'
    with open(cache, 'wb') as cache_file:
        cache_file.write(r.content)
    return cache
Пример #41
0
def fetch_ssh_config(_path, alias='~/.ssh/ssh_config'):
    master = path.expanduser(_path)
    if not path.isfile(master):
        return
    cache = path.join(alfred.work(volatile=True), 'ssh_config.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        with open(path.expanduser(_path), 'r') as ssh_config:
            results.update(x for line in ssh_config if line.startswith('Host ')
                           for x in line.split()[1:]
                           if not ('*' in x or '?' in x or '!' in x))
    except IOError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #42
0
def fetch_hosts(_path, alias='/etc/hosts'):
    master = path.expanduser(_path)
    if not path.isfile(master):
        return
    cache = path.join(alfred.work(volatile=True), 'hosts.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        with open(_path, 'r') as etc_hosts:
            for line in (x for x in etc_hosts if not x.startswith('#')):
                results.update(line.split()[1:])
        results.discard('broadcasthost')
    except IOError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #43
0
def fetch_hosts(_path, alias='/etc/hosts'):
    master = path.expanduser(_path)
    if not path.isfile(master):
        return
    cache = path.join(alfred.work(volatile=True), 'hosts.1.json')
    if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        with open(_path, 'r') as etc_hosts:
            for line in (x for x in etc_hosts if not x.startswith('#')):
                results.update(line.split()[1:])
        results.discard('broadcasthost')
    except IOError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #44
0
def fetch_bonjour(_service, alias='Bonjour', timeout=0.1):
    cache = path.join(alfred.work(volatile=True), 'bonjour.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < 60):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        from pybonjour import DNSServiceBrowse, DNSServiceProcessResult
        from select import select
        bj_callback = lambda s, f, i, e, n, t, d: results.add('%s.%s' % (n.lower(), d[:-1]))
        bj_browser = DNSServiceBrowse(regtype=_service, callBack=bj_callback)
        select([bj_browser], [], [], timeout)
        DNSServiceProcessResult(bj_browser)
        bj_browser.close()
    except ImportError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #45
0
def fetch_ssh_config(_path, alias='~/.ssh/ssh_config'):
    master = path.expanduser(_path)
    if path.isfile(master):
        cache = path.join(alfred.work(volatile=True), 'ssh_config.1.json')
        if path.isfile(cache) and path.getmtime(cache) > path.getmtime(master):
            return (json.load(open(cache, 'r')), alias)
        else:
            results = set([])
            try:
                with open(path.expanduser(_path), 'r') as ssh_config:
                    for line in (x for x in ssh_config if x.startswith('Host ')):
                        results.update((x for x in line.split()[1:] if not ('*' in x or '?' in x or '!' in x)))
            except IOError:
                pass
            json.dump(list(results), open(cache, 'w'))
            return (results, alias)
    else:
        return ([], alias)
Пример #46
0
def fetch_bonjour(_service, alias='Bonjour', timeout=0.1):
    cache = path.join(alfred.work(volatile=True), 'bonjour.1.json')
    if path.isfile(cache) and (time() - path.getmtime(cache) < 60):
        return (json.load(open(cache, 'r')), alias)
    results = set()
    try:
        from pybonjour import DNSServiceBrowse, DNSServiceProcessResult
        from select import select
        bj_callback = lambda s, f, i, e, n, t, d: results.add('%s.%s' % (
            n.lower(), d[:-1]))
        bj_browser = DNSServiceBrowse(regtype=_service, callBack=bj_callback)
        select([bj_browser], [], [], timeout)
        DNSServiceProcessResult(bj_browser)
        bj_browser.close()
    except ImportError:
        pass
    json.dump(list(results), open(cache, 'w'))
    return (results, alias)
Пример #47
0
    def getAuthToken(self):
        config=main.config_data()
        try:
            code = config['pocket_request_code']
        except:
            logger.info("getAuthToken invalid code")
            sys.exit(0)

        logger.info("request code is" + code)
        req_data = json.dumps({
                    "consumer_key": pocket.CONSUMER_KEY, "code": code
                })
        logger.info("Trying to get auth token")
        try:
            resp_data = pocket.makeRequest(req_data, pocket.POCKET_API_URL + 'authorize/')
            logger.info('Token received! :'+ resp_data["access_token"])
            config['pocket_access_code']=resp_data["access_token"]
            with open(os.path.join(alfred.work(False), 'config.json'), 'w+') as myFile:
                myFile.write(json.dumps(config))
            logger.info("Logged in as "+ resp_data["username"])
        except Exception:
            logger.error("Could not login - something went wrong")
Пример #48
0
def rss_data():
    try:
        return json.loads(open(os.path.join(alfred.work(True), 'rss-cache.json')).read())
    except:
        return []
# -*- coding: utf-8 -*-
import glob
import os
import re
import sqlite3
import time

import alfred

_MAX_RESULTS = 20
_CACHE_EXPIRY = 24 * 60 * 60 # in seconds
_CACHE = alfred.work(True)

def combine(operator, iterable):
    return u'(%s)' % (u' %s ' % operator).join(iterable)

def icon(db, faviconid):
    if not faviconid:
        return
    data = db.execute(u'select data from moz_favicons where id=%d' % faviconid).fetchone()
    if not data:
        return
    icon = os.path.join(_CACHE, 'icon-%d.png' % faviconid)
    if (not os.path.exists(icon)) or ((time.time() - os.path.getmtime(icon)) > _CACHE_EXPIRY):
        open(icon, 'wb').write(data[0])
    return icon

def places(profile):
    profile = (d for d in glob.glob(os.path.expanduser(profile)) if os.path.isdir(d)).next()
    return os.path.join(profile, 'places.sqlite')
Пример #50
0
        alfred.Item(
            title=u"Not Found",
            subtitle=u"Try another search term",
            attributes={
                'uid':
                alfred.uid(0),
                'arg':
                u"macappstore://ax.search.itunes.apple.com/WebObjects/MZSearch.woa/wa/search?q=%s"
                % searchTerm
            },
            icon=u"icon.png"))

for (idx, e) in enumerate(itertools.islice(resultData, MAX_RESULT)):
    if ALBUTM_ICON:
        imageurl = e['artworkUrl60']
        filepath = os.path.join(alfred.work(True), str(e['trackId']) + ".png")
        if not os.path.exists(filepath):
            urllib.urlretrieve(e['artworkUrl60'], filepath)
        imageurl = filepath
    else:
        imageurl = u"icon.png"

    try:
        averageUserRating = e['averageUserRating']
    except KeyError:
        averageUserRating = u"no data"

    subtitle = "%s, Price: %s, Raiting : %s" % (
        e['artistName'], e['formattedPrice'], averageUserRating)
    results.append(
        alfred.Item(title=e['trackName'],
Пример #51
0
def logfile():
    return os.path.join(alfred.work(False), 'monkeypatch_log.txt')
Пример #52
0
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

imageregex = re.compile(r"img.*src=\"(.*?)\"")
MAX_RESULTS = 9

results = []

d = feedparser.parse('http://macnews.tistory.com/rss')
for (idx, e) in enumerate(itertools.islice(d.entries, MAX_RESULTS)):
    try:
        imageurl = imageregex.search(e.description)
        if imageurl:
            url = imageurl.group(1)
            filepath = os.path.join(alfred.work(True), os.path.split(url)[1])
            if not os.path.exists(filepath):
                urllib.urlretrieve(url, filepath)
                cmd = "sips -z 72 72 '%s'" % filepath
                subprocess.check_output(cmd,
                                        stderr=subprocess.STDOUT,
                                        shell=True)
            imageurl = filepath
        else:
            imageurl = u"icon.png"
    except:
        imageurl = u"icon.png"

    results.append(
        alfred.Item(title=e.title,
                    subtitle=e.published,
Пример #53
0
# -*- coding: utf-8 -*-
import glob
import os
import re
import sqlite3
import time

import alfred

_MAX_RESULTS = 20
_CACHE_EXPIRY = 24 * 60 * 60  # in seconds
_CACHE = alfred.work(True)


def combine(operator, iterable):
    return u'(%s)' % (u' %s ' % operator).join(iterable)


def icon(favicons_db, url_hash):
    if not url_hash:
        return

    result = favicons_db.execute(u"""\
select moz_icons.id, moz_icons.data from moz_icons
inner join moz_icons_to_pages on moz_icons.id = moz_icons_to_pages.icon_id
inner join moz_pages_w_icons on moz_icons_to_pages.page_id = moz_pages_w_icons.id
where moz_pages_w_icons.page_url_hash = '%s'
order by moz_icons.id asc limit 1""" % url_hash).fetchone()
    if not result:
        return
    (id, data) = result
Пример #54
0
def pref_path():
    return os.path.join(alfred.work(False), "config.yaml")
def get_index(v='2'):
    # local storage
    pkl = os.path.join(alfred.work(volatile=False), 'index%s.pkl' % v)
    # fetch if non existing locally
    fetch(pkl)
    return cPickle.load(open(pkl))
Пример #56
0
def default_todo_path():
    return os.path.join(alfred.work(False), "todo.yaml")
Пример #57
0
def starred_url_data():
    try:
        return json.loads(
            open(os.path.join(alfred.work(False), 'starred-url.json')).read())
    except IOError:
        return []
Пример #58
0
def logfile():
    return os.path.join(alfred.work(False), 'monkeypatch_log.txt')
Пример #59
0
data = urllib.urlopen(url, params).read()
resultData = json.loads(data)['results']

results = []

results.append(alfred.Item(title=u"Search iTunes for songs matching \"%s\"" % "".join(searchTerm),
                           subtitle=u"Search iTunes",
                           attributes= {'uid':alfred.uid(0),
                                        'arg':u"itms://itunes.apple.com/WebObjects/MZStore.woa/wa/search?term=%s"% searchTerm[0] },
                           icon=u"icon.png"
                           ))

for (idx,e) in enumerate(itertools.islice(resultData, MAX_RESULT)):
    if ALBUM_ICON:
        imageurl = e['artworkUrl60']
        filepath = os.path.join(alfred.work(True), str(e['trackId'])+".png")
        if not os.path.exists(filepath):
            urllib.urlretrieve(e['artworkUrl60'], filepath)
        imageurl = filepath
    else:
        imageurl = u"icon.png"

    try:
        formatKind = e['kind'].title().replace('-', ' ')
    except KeyError:
        formatKind = u"no kind"

    try:
        trackPrice = e['trackPrice']
        if trackPrice < 0:
            trackPrice = u'Album Only'