def run(self, action, env=False): action = Action(action) if not filter(lambda x: x.name == action.name, self.actions): raise Exception('Action {0} not found. Instance changed state?' .format(action)) task_name = 'exec{0}'.format(hash(self.vagrantfile_path)) if action.name in ('rdp', 'ssh'): run_vagrant('{action} {machine_id}' .format(action=action.name, machine_id=self.key)) return cur_dir = os.path.dirname(os.path.realpath(__file__)) exec_path = os.path.join(cur_dir, 'execute.py') cmd = ['python', exec_path, '--action', action.name] if action.flags: cmd += ['-f'] + action.flags if not env: cmd += ['--name', self.name] logger.debug('Running in background: %s', cmd) new_env = os.environ.copy() new_env['HOME'] = os.path.expanduser('~') new_env['PATH'] = Workflow().settings['PATH']['VAR'] run_in_background(task_name, cmd, env=new_env, cwd=self.vagrantfile_path)
def main(wf): import os # Argument parsing configuration parser = argparse.ArgumentParser() parser.add_argument('--setlang', dest='lang', nargs='?', default=0) parser.add_argument('query', nargs='?', default=None) args = parser.parse_args(wf.args) if args.lang == '1': global lang lang = 1 # Set global lang # Call different functions filtering by argument if 'showmanga:' in args.query: # Search and show list of results mid = args.query.split('showmanga:')[1] # Parse the unique manga id search_by_mid(wf, mid) elif 'downloadmanga:' in args.query: # Perform a massive download of selected manga mid = args.query.split('downloadmanga:')[1] # TODO check if there is at least 1 element notify.notify("Background download started!", "Multithreading download started. You will be notified when completed.", sound='Submarine') wf.add_item("Download started", "At the end you will be notified in the notification center.", valid=False, arg="", autocomplete="") # Autocomplete will restore the clear command # Run massive download in background (~ subprocess.call) run_in_background('download', ['/usr/bin/python', wf.workflowfile('mangaedenAPI.py'), 'dmanga:' + mid]) wf.send_feedback() elif 'dmanga:' in args.query: # TODO better error handling while background download is running mid = args.query.split('dmanga:')[1] download_manga(mid) else: # Search on mangaeden json list query = args.query.lower() search_by_query(wf, query)
def main(wf): """Run workflow Script Filter. Args: wf (workflow.Workflow): Current Workflow object. """ global ureg ureg = UnitRegistry(wf.decode(DEFAULT_UNIT_DEFINITIONS)) ureg.default_format = 'P' wf.magic_arguments['appkey'] = open_currency_instructions if not len(wf.args): return query = wf.args[0] # .lower() log.debug('query : %s', query) handle_update(wf) # Create data files if necessary bootstrap(wf) # Add workflow and user units to unit registry register_units() # Notify of available update if wf.update_available: wf.add_item('A newer version is available', 'Action this item to download & install the new version', autocomplete='workflow:update', icon=ICON_UPDATE) # Load cached data exchange_rates = wf.cached_data(CURRENCY_CACHE_NAME, max_age=0) if exchange_rates: # Add exchange rates to conversion database register_exchange_rates(exchange_rates) if not wf.cached_data_fresh(CURRENCY_CACHE_NAME, CURRENCY_CACHE_AGE): # Update currency rates cmd = ['/usr/bin/python', wf.workflowfile('currency.py')] run_in_background('update', cmd) wf.rerun = 0.5 if is_running('update'): wf.rerun = 0.5 if exchange_rates is None: # No data cached yet wf.add_item(u'Fetching exchange rates…', 'Currency conversions will be momentarily possible', icon=ICON_INFO) else: wf.add_item(u'Updating exchange rates…', icon=ICON_INFO) return convert(query)
def generate_all_icons(): """Callback for magic argument""" if background.is_running('icongen'): return 'Generation already in progress.' background.run_in_background( 'icongen', ['/usr/bin/python', wf.workflowfile('icons.py')] ) return 'Starting icon generation. This may take up to 15 minutes.'
def test_run_in_background(self): """Run in background""" cmd = ['sleep', '1'] run_in_background('test', cmd) sleep(0.5) self.assertTrue(is_running('test')) self.assertTrue(os.path.exists(self._pidfile('test'))) self.assertEqual(run_in_background('test', cmd), None) sleep(0.6) self.assertFalse(is_running('test')) self.assertFalse(os.path.exists(self._pidfile('test')))
def test_run_in_background(self): """Run in background""" cmd = ['sleep', '1'] assert run_in_background('test', cmd) == 0 assert is_running('test') assert os.path.exists(_pidfile('test')) # Already running assert run_in_background('test', cmd) is None sleep(1.1) # wait for job to finish assert not is_running('test') assert not os.path.exists(_pidfile('test'))
def update(self, force=False): """Load contacts from cache and update cached data if old.""" # Load cached contacts self.contacts = wf.cached_data('contacts', max_age=0) if self.contacts is None: self.contacts = {} # Update if required if not wf.cached_data_fresh('contacts', MAX_CACHE_AGE) or force: log.debug('Updating contacts cache ...') cmd = ['/usr/bin/python', wf.workflowfile('update_contacts.py')] run_in_background('update-contacts', cmd)
def background_sync(): from workflow.background import run_in_background task_id = 'sync' # Only runs if another sync is not already in progress run_in_background(task_id, [ '/usr/bin/env', 'python', workflow().workflowfile('alfred-wunderlist-workflow.py'), 'pref sync background', '--commit' ])
def backgroundSync(): from workflow.background import run_in_background from wunderlist.util import workflow # Only runs if another sync is not already in progress run_in_background('sync', [ '/usr/bin/env', 'python', workflow().workflowfile('alfred-wunderlist-workflow.py'), ':pref sync', '--commit' ])
def update_data(update_method): """Update project data from 10.000ft""" log.debug('Starting update') cmd = ['/usr/bin/python', wf.workflowfile('update.py')] if update_method == 'force': cmd.append('--update') cmd.append('force') # Update projects data log.debug('Run update command : {}'.format(cmd)) run_in_background('update', cmd) return 0
def do_import_search(wf, url): """Parse URL for OpenSearch config.""" ctx = Context(wf) # ICON_IMPORT = ctx.icon('import') ICONS_PROGRESS = [ ctx.icon('progress-1'), ctx.icon('progress-2'), ctx.icon('progress-3'), ctx.icon('progress-4'), ] data = wf.cached_data('import', None, max_age=0, session=True) if data: error = data['error'] search = data['search'] # Clear cache data wf.cache_data('import', None, session=True) wf.cache_data('import-status', None, session=True) if error: wf.add_item(error, icon=ICON_ERROR) wf.send_feedback() return it = wf.add_item(u'Add "{}"'.format(search['name']), u'↩ to add search', valid=True, icon=search['icon']) for k, v in search.items(): it.setvar(k, v) else: progress = int(os.getenv('progress') or '0') i = progress % len(ICONS_PROGRESS) picon = ICONS_PROGRESS[i] log.debug('progress=%d, i=%d, picon=%s', progress, i, picon) wf.setvar('progress', progress + 1) if not is_running('import'): run_in_background('import', ['./searchio', 'fetch', url]) status = wf.cached_data('import-status', None, max_age=0, session=True) title = status or u'Fetching OpenSearch Configuration …' wf.rerun = 0.2 wf.add_item(title, u'Results will be shown momentarily', icon=picon) wf.send_feedback()
def main(wf): args = Args(wf.args) actions = wf.cached_data('actions', None, max_age=0) if wf.update_available: # Add a notification to top of Script Filter results wf.add_item(u'New version available', u'Action this item to install the update', autocomplete='workflow:update', icon=ICON_INFO) if not wf.cached_data_fresh('actions', max_age=CACHE_MAX_AGE): cmd = ['/usr/bin/python', wf.workflowfile('alfredhelp.py'), '--scan'] run_in_background(u'scan', cmd) if is_running(u'scan'): wf.add_item( title=u'Scanning alfred workflows...', valid=False, icon=ICON_INFO ) if args.show_keywords and actions: if args.query: actions = wf.filter(args.query, actions, key=search_key) for action in actions: argument = action.keyword if action.add_space: argument += u' ' wf.add_item( title=u'{keyword} - {title}'.format(keyword=action.keyword, title=action.title), subtitle=action.subtitle, icon=action.icon, arg=argument, valid=True ) elif args.scan: def get_posts(): return scan(path.join(wf.alfred_env['preferences'], 'workflows')) wf.cached_data('actions', get_posts, max_age=CACHE_MAX_AGE) scan(path.join(wf.alfred_env['preferences'], 'workflows')) wf.send_feedback() return 0
def main(wf): parser = argparse.ArgumentParser() parser.add_argument('--browse', dest = 'browse', nargs = '?', default = None) parser.add_argument('--iterm', dest = 'iterm', nargs = argparse.REMAINDER, default = None) args = parser.parse_args(wf.args) if args.browse: out = ['open', args.browse] run_in_background('browse', out) return 0 if args.iterm: out = ['osascript', 'open_iterm.scpt', ' '.join(args.iterm)] run_in_background('iterm', out) return 0
def main(workflow): parser = argparse.ArgumentParser() parser.add_argument("--set-token", dest="api_token", nargs="?", default=None) parser.add_argument("query", nargs="?", default=None) arguments = parser.parse_args(workflow.args) if arguments.api_token: workflow.save_password("hipchat_api_token", arguments.api_token) return 0 try: api_token = workflow.get_password("hipchat_api_token") except PasswordNotFound: workflow.add_item( "No API key set.", "Please use hcsettoken to set your Hipchat API token.", valid=False, icon=ICON_WARNING ) workflow.send_feedback() return 0 users = workflow.cached_data("users", None, max_age=0) if not workflow.cached_data_fresh("users", max_age=60): # 60s cmd = ["/usr/bin/python", workflow.workflowfile("update.py")] run_in_background("update", cmd) if is_running("update"): logger.debug("updating users") if arguments.query and users: users = workflow.filter(arguments.query, users, key=search_terms_for_user, min_score=20) if not users: workflow.add_item("Whoops, no users found", icon=ICON_WARNING) workflow.send_feedback() return 0 for user in users: status_icon = get_status_icon(user["presence"]["show"] if user["presence"] else None) workflow.add_item( user["name"], user["email"], arg=actionTemplate.format(user["mention_name"]), valid=True, icon=status_icon ) workflow.send_feedback() logger.debug("returned {} results".format(len(users)))
def test_kill(self): """Kill""" assert kill('test') is False cmd = ['sleep', '1'] assert run_in_background('test', cmd) == 0 assert is_running('test') assert kill('test') is True sleep(0.3) # give process time to exit assert not is_running('test')
def search_vault_for_query(self, query): """ Search the LastPass vault for an optional passed query. """ results = self.wf.cached_data("vault_items", None, max_age=0) # Start updae script if cache is too old or doesn't exist: age = int(self.wf.settings["general"]["cache_bust"]) if not self.wf.cached_data_fresh("vault_items", age): cmd = ["/usr/bin/python", self.wf.workflowfile("update.py")] run_in_background("update", cmd) # If a query is passed, filter the results: if query and results: results = self.wf.filter(query, results, self.search_item_fields, match_on=MATCH_ALL ^ MATCH_ALLCHARS) self.log.debug("Search results: {}".format(results)) return results
def run(self, wf): """Run workflow.""" self.wf = wf wf.args # check for magic args self.keyword = self.wf.settings.get('keyword', DEFAULT_KEYWORD) args = docopt(__doc__) log.debug(u'args : %r', args) # Open Help file if args.get('--helpfile'): return self.do_open_help_file() # Perform search self.query = wf.decode(args.get('<query>') or '') # List Smart Folders with custom keywords if args.get('--config'): return self.do_configure_folders() # Was a configured folder passed? folder = wf.decode(args.get('--folder') or '') # Get list of Smart Folders. Update in background if necessary. self.folders = self.wf.cached_data('folders', max_age=0) if self.folders is None: self.folders = [] # Update folder list if it's old if not self.wf.cached_data_fresh('folders', CACHE_AGE_FOLDERS): log.debug('updating list of Smart Folders in background...') run_in_background('folders', ['/usr/bin/python', self.wf.workflowfile('cache.py')]) if is_running('folders'): self.wf.rerun = 0.5 # Has a specific folder been specified? if folder: return self.do_search_in_folder(folder) return self.do_search_folders()
def main(wf): parser = argparse.ArgumentParser() parser.add_argument('--baseurl', dest='baseurl', nargs='?', default=None) parser.add_argument('--delimiter', dest='delimiter', nargs='?', default=None) parser.add_argument('--directory', dest='directory', nargs='?', default=None) parser.add_argument('--username', dest='username', nargs='?', default=None) parser.add_argument('--password', dest='password', nargs='?', default=None) parser.add_argument('--avatars', dest='avatars', action='store_true', default=None) parser.add_argument('--edit', dest='edit', action='store_true', default=None) args = parser.parse_args(wf.args) if args.baseurl: wf.settings['baseurl'] = args.baseurl.rstrip('/') return 0 if args.delimiter: wf.settings['delimiter'] = args.delimiter return 0 if args.directory: wf.settings['directory'] = args.directory.rstrip('/') return 0 if args.username: wf.settings['username'] = args.username return 0 if args.password: wf.save_password('stash_password', args.password) return 0 if args.avatars: cmd = ['/usr/bin/python', wf.workflowfile('update_avatars.py')] run_in_background('update-avatars', cmd) return 0 if args.edit: out = ['open', wf.settings_path] run_in_background('edit', out) return 0
def update(self, force=False): """Load apps from cache, update if required""" self.all_email_apps = self.wf.cached_data('all_apps', max_age=0) self.system_default_app = self.wf.cached_data('system_default_app', max_age=0) if self.all_email_apps is None: self.all_email_apps = [] if self.system_default_app is None: self.system_default_app = {} do_update = False if force: do_update = True elif not self.wf.cached_data_fresh('all_apps', MAX_APP_CACHE_AGE): do_update = True elif not self.wf.cached_data_fresh('system_default_app', MAX_APP_CACHE_AGE): do_update = True # Update if required if do_update: log.debug('Updating application caches ...') cmd = ['/usr/bin/python', self.wf.workflowfile('update_apps.py')] run_in_background('update-apps', cmd)
def main(wf): # Check if first time try: count = int(os.environ['count']) first_time = False except: count = 0 first_time = True if first_time: wf.rerun = 0.5 wf.store_data('download_percent', 0) wf.store_data('phase', 'downloading') wf.store_data('emoji_count', 0) wf.add_item('Starting background process') run_in_background('bg', ['/usr/bin/python', wf.workflowfile('src/bg_downloader.py')]) else: build_wf_entry(wf) wf.setvar('count', count) wf.send_feedback()
def _update(self, force=False): """Update cached data""" log.debug('Updating workflow lists...') args = ['/usr/bin/python', self.wf.workflowfile('update_workflows.py')] if force: args.append('--force-update') log.debug('update command : {}'.format(args)) retcode = run_in_background('update', args) if retcode: log.debug('Update failed with code {}'.format(retcode)) print('Update failed') return 1 print('Updating workflow list…'.encode('utf-8')) return 0
def main(wf): #################################################################### # Get init data #################################################################### parser = argparse.ArgumentParser() parser.add_argument('query', nargs='?', default=None) args = parser.parse_args(wf.args) password = util.getPassword(wf) url = util.getURL(wf) #################################################################### # Fetch all data in background if the query is empty #################################################################### if args.query == None: if not is_running('update'): cmd = ['/usr/bin/python', wf.workflowfile('update_data.py')] run_in_background('update', cmd) data = util.getData(wf, 'device_tracker') def search_key_for_post(post): """Generate a string search key for a post""" item = data[post] elements = [] elements.append(item['name']) # title of post elements.append(item['friendly_name']) elements.append(item['entity_id']) elements.append(item['state']) elements.append(getDisplayValue(item['state'])) return u' '.join(elements) def wrapper(): return data posts = wf.cached_data('allDevices', wrapper, max_age=1) # If script was passed a query, use it to filter posts if args.query and data: posts = wf.filter(args.query, data, key=search_key_for_post, min_score=20) if not posts: # we have no data to show, so show a warning and stop wf.add_item('No posts found', icon=ICON_WARNING) wf.send_feedback() return 0 # Loop through the returned posts and add an item for each to # the list of results for Alfred #for post in posts: for post in posts: #sys.stderr.write("post : " + str(post) + '\n') item = data[post] ICON = getIcon(item['state']) subtitle = '' if item['longitude'] is not '': subtitle = 'longitude: ' + item[ 'longitude'] + ', latitude:' + item['latitude'] wf.add_item(title=item['friendly_name'] + ' : ' + getDisplayValue(item['state']), subtitle=subtitle, valid=False, arg=item['entity_id'], icon=ICON) # Send the results to Alfred as XML wf.send_feedback() return 0
def prefetch(wf, uid, path): job_name = 'dropbox_prefetch_%s' % get_hash(uid, path) cmd = ['/usr/bin/python', wf.workflowfile('dropbox_prefetch.py'), uid, path] run_in_background(job_name, cmd)
def main(wf): parser = argparse.ArgumentParser() parser.add_argument("-i", "--installed", action="store_true") parser.add_argument("-a", "--all", action="store_true") parser.add_argument("-d", "--dlc", action="store_true") parser.add_argument("-u", "--user", action="store", type=str, default=None) if sys.stdin.isatty(): (options, query) = parser.parse_known_args(wf.args) else: (options, query) = parser.parse_known_args(shlex.split(wf.args[0])) if options.user: wf.settings['steam_user'] = options.user if not wf.settings.get('steam_user', None): wf.add_item('No steam username set', 'Use -u [username] to set your username!', valid=False, icon=ICON_WARNING) wf.send_feedback() return 0 query = " ".join(map(lambda x: pipes.quote(x), query)) if not wf.cached_data_fresh('games_%s' % wf.settings.get('steam_user'), 12 * 60 * 60): run_in_background('update', ['/usr/bin/python', wf.workflowfile('background.py')]) if is_running('update'): wf.add_item('Updating Steam games...', icon=ICON_INFO) games = wf.cached_data('games_%s' % wf.settings.get('steam_user'), None, max_age=0) if games: if query: games = wf.filter(query, games, key=lambda x: x['name']) if not options.dlc: games = filter(lambda x: 'dlc' not in x or not x['dlc'], games) if not options.all: games = filter(lambda x: 'mac' not in x or x['mac'], games) if options.installed: # This will check the primary steam install location # Need to figure out how to parse the libraryfolders.vdf filter to get # additional locations. Appear to be '\t"\d+"\t+"[PATH TO FOLDER]"' lines... games = filter(lambda x: exists(expanduser("%s/appmanifest_%s.acf" % (DEFAULT_STEAM_LIBRARY, x['id']))), games) for game in games: icon = wf.cachefile("icon_%s.png" % game['id']) wf.add_item(game['name'], uid=game['id'], valid=True, arg=game['id'], icon=icon if exists(icon) else None) if not games: wf.add_item('No %smatches%s' % ('installed ' if options.installed else '', ' for %s' % query if query else ''), 'Try searching with --all for all games or --dlc to see DLC', icon=ICON_INFO) else: if not is_running('update'): wf.add_item('Unable to retrieve your games from Steam', 'Is your custom URL really "%s"?' % wf.settings.get('steam_user')) wf.send_feedback() return 0
def do_search_in_folder(self, folder): """List/search contents of a specific Smart Folder. Sends results to Alfred. :param folder: name or path of Smart Folder :type folder: ``unicode`` """ log.info(u'searching folder "%s" for "%s" ...', folder, self.query) files = [] folder_path = None for name, path in self.folders: if path == folder: folder_path = path break elif name == folder: folder_path = path break else: return self._terminate_with_error( u"Unknown folder '{}'".format(folder), 'Check your configuration with `smartfolders`') # Get contents of folder; update if necessary key = cache_key(folder_path) files = self.wf.cached_data(key, max_age=0) if files is None: files = [] if not self.wf.cached_data_fresh(key, CACHE_AGE_CONTENTS): run_in_background(key, ['/usr/bin/python', self.wf.workflowfile('cache.py'), '--folder', folder_path]) if is_running(key): self.wf.rerun = 0.5 if self.query: files = self.wf.filter(self.query, files, key=os.path.basename, min_score=10) if not files: if not self.query: self._add_message('Empty Smart Folder', icon=ICON_WARNING) else: self._add_message('No matching results', 'Try a different query', icon=ICON_WARNING) else: for i, path in enumerate(files): title = os.path.basename(path) subtitle = path.replace(os.getenv('HOME'), '~') self.wf.add_item(title, subtitle, uid=path, arg=path, valid=True, icon=path, icontype='fileicon', type='file') if (i + 1) == MAX_RESULTS: break self.wf.send_feedback()
def save_to_history_list(query): data = {'query': query} store(TEMP_JSON, data) run_in_background('save_history', ['/usr/bin/python', \ wf.workflowfile('savehistory.py')])
def main(wf): if wf.first_run: kill_notifier() statuses = ['all', 'active', 'pending', 'paused', 'waiting', 'done', 'error', 'removed', 'stopped'] actions = ['reveal', 'rm', 'url', 'pause', 'resume'] settings = ['rpc', 'secret', 'limit', 'limitup', 'limitnum', 'clear', 'add', 'quit', 'stat', 'help', 'pauseall', 'resumeall'] commands = actions + settings command = 'reveal' status = 'all' param = '' if len(wf.args) == 1: if wf.args[0] in commands: command = wf.args[0] elif wf.args[0] in statuses: status = wf.args[0] else: param = wf.args[0:] elif len(wf.args) > 1: if wf.args[0] in settings: command = wf.args[0] param = wf.args[1] #settings take one param only elif wf.args[0] in actions: command = wf.args[0] param = wf.args[1:] #actions can take multiple param to filter the result elif wf.args[0] in statuses: status = wf.args[0] param = wf.args[1:] #statuses can take multiple param to filter the result else: param = wf.args[0:] if command not in settings: if command == 'pause': status = 'active' elif command == 'resume': status = 'incomplete' if get_rpc(): get_tasks(command, status, param) else: if command == 'rpc': wf.add_item('Set Aria2\'s RPC Path', 'Set the path to ' + param, arg=u'--rpc-setting ' + param, valid=True) elif command == 'secret': wf.add_item('Set Aria2\'s RPC Secret', 'Set the secret to ' + param, arg=u'--secret-setting ' + param, valid=True) elif command == 'add': wf.add_item('Add new download: ' + param, arg='--add ' + param, valid=True) elif command == 'clear': wf.add_item('Clear all stopped download?', arg='--clear', valid=True) elif command == 'pauseall': wf.add_item('Pause all active downloads?', arg='--pauseall', valid=True) elif command == 'resumeall': wf.add_item('Resume all paused downloads?', arg='--resumeall', valid=True) elif command == 'help': wf.add_item('Need some help?', arg='--help', valid=True) elif command == 'quit': wf.add_item('Quit Aria2?', arg='--quit', valid=True) elif command == 'limit': limit_speed('download', param) elif command == 'limitup': limit_speed('upload', param) elif command == 'limitnum': limit_num(param) elif command == 'stat': get_stats() if wf.update_available: wf.add_item('New version available', 'Action this item to install the update', autocomplete='workflow:update') wf.send_feedback() if not is_running('notifier'): cmd = ['/usr/bin/python', wf.workflowfile('notifier.py')] run_in_background('notifier', cmd)
def update(): cmd = ['/usr/bin/python', wf.workflowfile('update.py')] run_in_background('update', cmd)
def main(wf): log = wf.logger uploader_backend = os.getenv(u'UPLOADER_BACKEND', 'aws').lower() log.debug('main start, using backend %s' % uploader_backend) is_uploading = False bg_name = wf.stored_data('bg_name') upload_started = wf.stored_data('upload_started') log.debug('upload_started: %s' % upload_started) log.debug('bg_name: %s' % bg_name) if bg_name: is_uploading = is_running(bg_name) should_copy_file = all(( not is_uploading, not upload_started, )) or uploader_backend == 'imgur' log.debug('should_copy_file: %s' % should_copy_file) file_path = None if should_copy_file: proc = subprocess.Popen( ['osascript', 'clipboard.scpt'], stdout=subprocess.PIPE, ) proc.wait() file_path = proc.stdout.readline().strip() if not file_path: wf.add_item(title=u'Unable to upload', subtitle='copy a file or image to clipboard', valid=False, icon=ICON_ERROR) wf.send_feedback() return 0 url = error = None if uploader_backend == 'imgur': url, error = imgur_uploader(wf, file_path) elif not is_uploading and not upload_started: log.debug('uploader is not running') wf.store_data('uploaded_bytes', 0) bg_name = hashlib.md5(file_path).hexdigest() log.debug(file_path) if not file_path: wf.add_item(title=u'Unable to upload', subtitle='copy a file or image to clipboard', valid=False, icon=ICON_ERROR) wf.store_data('upload_started', False) else: wf.rerun = 0.5 wf.store_data('bg_name', bg_name) wf.store_data('upload_started', True) run_in_background(bg_name, [ '/usr/bin/python', wf.workflowfile('aws_uploader.py'), file_path ]) wf.add_item("Upload file", "Uploading in progress ...") else: bg_name = wf.stored_data('bg_name') if is_running(bg_name): wf.rerun = 0.5 uploaded_bytes = int(wf.stored_data('uploaded_bytes') or 0) total_bytes = int(wf.stored_data('total_bytes')) percentage = 100.0 * uploaded_bytes / total_bytes wf.add_item("Upload file", "Uploading in progress, %2.1f%% done." % (percentage)) else: """Last case""" wf.store_data('upload_started', False) url = wf.stored_data('upload_url') wf.store_data('upload_url', None) error = wf.stored_data('upload_error') wf.store_data('upload_error', None) if error and not is_uploading: wf.add_item(title=u'Unable to upload', subtitle=error, valid=False, icon=ICON_ERROR) if url and not is_uploading: wf.add_item(title=u'Copy url', subtitle=url, arg=url, valid=True, icon=ICON_CLIPBOARD, quicklookurl=url) md_image_url = u'![]({})'.format(url) wf.add_item(title=u'Copy url as markdown image', subtitle=md_image_url, arg=md_image_url, icon=ICON_CLIPBOARD, valid=True) rst_image_url = u'.. image:: {}'.format(url) wf.add_item(title=u'Copy url as rst image', subtitle=rst_image_url, icon=ICON_CLIPBOARD, arg=rst_image_url, valid=True) md_url = u'[Link Text]({})'.format(url) wf.add_item(title=u'Copy url as markdown link', subtitle=md_url, arg=md_url, icon=ICON_CLIPBOARD, valid=True) rst_url = u'`Link Text <{}>`_'.format(url) wf.add_item(title=u'Copy url as rst link', subtitle=rst_url, icon=ICON_CLIPBOARD, arg=rst_url, valid=True) wf.send_feedback()
def main(wf): #################################################################### # Get init data #################################################################### parser = argparse.ArgumentParser() parser.add_argument('query', nargs='?', default=None) args = parser.parse_args(wf.args) password = util.getPassword(wf) url = util.getURL(wf) #################################################################### # Fetch all data in background if the query is empty #################################################################### if args.query == None: if not is_running('update'): cmd = ['/usr/bin/python', wf.workflowfile('update_data.py')] run_in_background('update', cmd) data = util.getData(wf, 'light') def search_key_for_post(post): """Generate a string search key for a post""" item = data[post] elements = [] elements.append(item['name']) # title of post elements.append(item['friendly_name']) elements.append(item['entity_id']) return u' '.join(elements) def wrapper(): return data posts = wf.cached_data('allLights', wrapper, max_age=1) # If script was passed a query, use it to filter posts if args.query and data: posts = wf.filter(args.query, data, key=search_key_for_post, min_score=20) if not posts: # we have no data to show, so show a warning and stop wf.logger.info("hi") wf.add_item('No posts found', icon=ICON_WARNING) wf.send_feedback() return 0 # Loop through the returned posts and add an item for each to # the list of results for Alfred #for post in posts: for post in posts: #sys.stderr.write("post : " + str(post) + '\n') item = data[post] subtitle = '' if item['state'] != 'unavailable': if item['state'] == 'on': ICON = icon.getIcon('light-on', 'w') subtitle = '<Enter> to turn OFF light' else: ICON = icon.getIcon('light-off', 'b') subtitle = '<Enter> to turn ON light' wf.add_item(title=item['friendly_name'], subtitle=subtitle, valid=True, arg=item['entity_id'], icon=ICON) # Send the results to Alfred as XML wf.send_feedback() return 0
def main(wf): """Run workflow.""" from docopt import docopt args = docopt(__doc__, wf.args) log.debug('args : %r', args) # Run Script actions # ------------------------------------------------------------------ if args.get('--post'): open_url(os.getenv('post_url')) return if args.get('--comments'): open_url(os.getenv('comments_url')) return if args.get('--subreddit'): remember_subreddit() open_url(os.getenv('subreddit_url')) return #################################################################### # Background tasks #################################################################### # Update cached list of top subreddits if args.get('--update'): log.info('updating list of top subreddits ...') update_top_subreddits() log.info('updated list of top subreddits.') return # Search using API and cache results if args.get('--search'): name = wf.decode(args.get('--search')) key = '--search-{}'.format(cache_key(name)) log.info('searching API for %r ...', name) subreddits = search_subreddits(name) wf.cache_data(key, subreddits) log.info('API returned %d subreddits for %r', len(subreddits), name) # Tidy up cache in a background task to keep things snappy clear_cache() return # Update cached list of top subreddits if not is_running('top') and \ not wf.cached_data_fresh('__top', TOP_CACHE_MAX_AGE): run_in_background('top', ['/usr/bin/python', 'reddit.py', '--update']) #################################################################### # Script Filter #################################################################### # Workflow updates # ------------------------------------------------------------------ if wf.update_available: wf.add_item('A newer version is available', '↩ to install update', autocomplete='workflow:update', icon=ICON_UPDATE) # Show popular subreddits # ------------------------------------------------------------------ query = args.get('<query>') log.debug('query=%r', query) if query == '': return show_top() # Show subreddit or posts # ------------------------------------------------------------------ name, slash, query = parse_query(query) if not name: wf.add_item('Invalid query', 'Try a different query', icon=ICON_WARNING) wf.send_feedback() return 0 # Search for matching subreddit # ------------------------------------------------------------------ if not slash: return show_search(name) # Browse/search within subreddit # ------------------------------------------------------------------ return show_posts(name, query)
def show_search(name): """List subreddits matching `name`.""" top = wf.cached_data('__top', max_age=0) or [] history = wf.cached_data('__history', max_age=0) or [] key = '--search-{}'.format(cache_key(name)) # Load cached results for name or start search in background cached = wf.cached_data(key, None, SEARCH_CACHE_MAX_AGE) or [] if not cached and not is_running('search'): run_in_background( 'search', ['/usr/bin/python', 'reddit.py', '--search', name.encode('utf-8')]) wf.rerun = 0.3 log.debug('loaded subreddits: %d history, %d top, %d cached', len(history), len(top), len(cached)) if is_running('search'): wf.rerun = 0.3 subreddits = history other = top + cached seen = {sr['name'] for sr in history} for sr in other: if sr['name'] in seen: continue subreddits.append(sr) seen.add(sr['name']) # Filter results because Reddit's search is super-crappy subreddits = wf.filter(name, subreddits, key=lambda sr: sr['name'], min_score=30) if not subreddits: if is_running('search'): wf.add_item('Loading from API …', 'Hang in there') else: wf.add_item('No matching subreddits found', 'Try a different query', icon=ICON_WARNING) wf.send_feedback() return # Cache all subreddits in case we need to "remember" one results = {sr['name']: sr for sr in subreddits} wf.cache_data('--last', results, session=True) # List all matching subreddits for sr in subreddits: log.debug(repr(sr)) url = sr['url'] it = wf.add_item(sr['name'], sr['title'], autocomplete='{}/'.format(sr['name']), arg=url, uid=sr['name'], quicklookurl=url, valid=True, icon=ICON_REDDIT) # Export subreddit to ENV in case we want to save it it.setvar('subreddit_name', sr['name']) it.setvar('subreddit_title', sr['title']) it.setvar('subreddit_type', sr['type']) it.setvar('subreddit_url', url) it.setvar('argv', '-s') wf.send_feedback() return
def main(wf): # build argument parser to parse script args and collect their # values parser = argparse.ArgumentParser() # add an optional (nargs='?') --setkey argument and save its # value to 'apikey' (dest). This will be called from a separate "Run Script" # action with the API key parser.add_argument('--setkey', dest='apikey', nargs='?', default=None) parser.add_argument('--seturl', dest='apiurl', nargs='?', default=None) parser.add_argument('query', nargs='?', default=None) # parse the script's arguments args = parser.parse_args(wf.args) #################################################################### # Save the provided API key or URL #################################################################### # decide what to do based on arguments if args.apikey: # Script was passed an API key log.info("Setting API Key") wf.save_password('gitlab_api_key', args.apikey) return 0 # 0 means script exited cleanly if args.apiurl: log.info("Setting API URL to {url}".format(url=args.apiurl)) wf.settings['api_url'] = args.apiurl return 0 #################################################################### # Check that we have an API key saved #################################################################### try: wf.get_password('gitlab_api_key') except PasswordNotFound: # API key has not yet been set wf.add_item('No API key set.', 'Please use glsetkey to set your GitLab API key.', valid=False, icon=ICON_WARNING) wf.send_feedback() return 0 #################################################################### # View/filter GitLab Projects #################################################################### query = args.query projects = wf.cached_data('projects', None, max_age=0) if wf.update_available: # Add a notification to top of Script Filter results wf.add_item('New version available', 'Action this item to install the update', autocomplete='workflow:update', icon=ICON_INFO) # Notify the user if the cache is being updated if is_running('update') and not projects: wf.rerun = 0.5 wf.add_item('Updating project list via GitLab...', subtitle=u'This can take some time if you have a large number of projects.', valid=False, icon=ICON_INFO) # Start update script if cached data is too old (or doesn't exist) if not wf.cached_data_fresh('projects', max_age=3600) and not is_running('update'): cmd = [sys.executable, wf.workflowfile('update.py')] run_in_background('update', cmd) wf.rerun = 0.5 # If script was passed a query, use it to filter projects if query and projects: projects = wf.filter(query, projects, key=search_for_project, min_score=20) if not projects: # we have no data to show, so show a warning and stop wf.add_item('No projects found', icon=ICON_WARNING) wf.send_feedback() return 0 # Loop through the returned posts and add an item for each to # the list of results for Alfred for project in projects: wf.add_item(title=project['name_with_namespace'], subtitle=project['path_with_namespace'], arg=project['web_url'], valid=True, icon=None, uid=project['id']) # Send the results to Alfred as XML wf.send_feedback()
def main(wf): #################################################################### # Get init data #################################################################### parser = argparse.ArgumentParser() parser.add_argument('query', nargs='?', default=None) args = parser.parse_args(wf.args) password = util.getPassword(wf); url = util.getURL(wf); #################################################################### # Fetch all data in background if the query is empty #################################################################### if args.query == None: if not is_running('update'): cmd = ['/usr/bin/python', wf.workflowfile('update_data.py')] run_in_background('update', cmd) data = util.getData(wf, 'automation') def search_key_for_post(post): """Generate a string search key for a post""" item = data[post] elements = [] elements.append(item['name']) # title of post elements.append(item['friendly_name']) elements.append(item['entity_id']) return u' '.join(elements) def wrapper(): return data posts = wf.cached_data('allAutomations', wrapper, max_age=1) # If script was passed a query, use it to filter posts if args.query and data: posts = wf.filter(args.query, data, key=search_key_for_post, min_score=20) if not posts: # we have no data to show, so show a warning and stop wf.add_item('No posts found', icon=ICON_WARNING) wf.send_feedback() return 0 if wf.update_available: # Add a notification to top of Script Filter results wf.add_item('New version available', 'Action this item to install the update', autocomplete='workflow:update', icon=ICON_INFO) # Loop through the returned posts and add an item for each to # the list of results for Alfred #for post in posts: for post in posts: sys.stderr.write("post : " + str(post) + '\n') item = data[post]; subtitle = '<Enter> to trigger automation' wf.add_item(title=item['friendly_name'], subtitle=subtitle, valid=True, arg=item['entity_id'], #arg='https://browall.duckdns.org:8123/api/services/automation/trigger?api_password=DrumNBass1111', icon=icon.getIcon('mdi:home','w')) # Send the results to Alfred as XML wf.send_feedback() return 0;
def do_search_in_folder(self, folder): """List/search contents of a specific Smart Folder. Sends results to Alfred. :param folder: name or path of Smart Folder :type folder: ``unicode`` """ log.info(u'searching folder "%s" for "%s" ...', folder, self.query) files = [] folder_path = None for name, path in self.folders: if path == folder: folder_path = path break elif name == folder: folder_path = path break else: return self._terminate_with_error( u"Unknown folder '{}'".format(folder), 'Check your configuration with `smartfolders`') # Get contents of folder; update if necessary key = cache_key(folder_path) files = self.wf.cached_data(key, max_age=0) if files is None: files = [] if not self.wf.cached_data_fresh(key, CACHE_AGE_CONTENTS): run_in_background(key, [ '/usr/bin/python', self.wf.workflowfile('cache.py'), '--folder', folder_path ]) if is_running(key): self.wf.rerun = 0.5 if self.query: files = self.wf.filter(self.query, files, key=os.path.basename, min_score=10) if not files: if not self.query: self._add_message('Empty Smart Folder', icon=ICON_WARNING) else: self._add_message('No matching results', 'Try a different query', icon=ICON_WARNING) else: for i, path in enumerate(files): title = os.path.basename(path) subtitle = path.replace(os.getenv('HOME'), '~') self.wf.add_item(title, subtitle, uid=path, arg=path, valid=True, icon=path, icontype='fileicon', type='file') if (i + 1) == MAX_RESULTS: break self.wf.send_feedback()
def main(wf): # build argument parser to parse script args and collect their # values parser = argparse.ArgumentParser() # add an optional query and save it to 'query' parser.add_argument('query', nargs='?', default=None) # parse the script's arguments args = parser.parse_args(wf.args) log.debug("args are " + str(args)) # update query post extraction query = args.query if args.query else '' words = query.split(' ') if query else [] # list of commands client_commands = { 'reconnect': { 'command': 'reconnect' }, 'block': { 'command': 'block' }, 'unblock': { 'command': 'unblock', } } device_commands = { 'upgrade': { 'command': 'upgrade' }, 'reboot': { 'command': 'reboot' }, 'clients': { 'command': 'clients', 'arguments': ['dummy'] } } radius_commands = { 'delete': { 'command': 'delete' }, } fwrule_commands = { 'enable': { 'command': 'enable' }, 'disable': { 'command': 'disable' } } command_params = {'clients': {'dummy': 'dummy'}} config_commands = { 'update': { 'title': 'Update clients and devices', 'subtitle': 'Update the clients and devices from the controller', 'autocomplete': 'update', 'args': ' --update', 'icon': ICON_SYNC, 'valid': True }, 'unifios': { 'title': 'Set controller to UniFi OS', 'subtitle': 'Set the controller type to UniFiOS not the regular UniFi controller', 'autocomplete': 'unifios', 'args': ' --unifios', 'icon': ICON_WEB, 'valid': True }, 'upwd': { 'title': 'Set username and password', 'subtitle': 'Set controller username and password', 'autocomplete': 'upwd', 'args': ' --username ' + (words[1] if len(words) > 1 else '') + ' --password ' + (words[2] if len(words) > 2 else ''), 'icon': ICON_WEB, 'valid': len(words) > 2 }, 'site': { 'title': 'Set site', 'subtitle': 'Set site for controller commands', 'autocomplete': 'site', 'args': ' --site ' + (words[1] if len(words) > 1 else ''), 'icon': ICON_WEB, 'valid': len(words) > 1 }, 'freq': { 'title': 'Set device and client update frequency', 'subtitle': 'Every (x) seconds, the clients and stats will be updated', 'autocomplete': 'freq', 'args': ' --freq ' + (words[1] if len(words) > 1 else ''), 'icon': ICON_WEB, 'valid': len(words) > 1 }, 'ip': { 'title': 'Set controller IP', 'subtitle': 'Set IP for controller commands', 'autocomplete': 'ip', 'args': ' --ip ' + (words[1] if len(words) > 1 else ''), 'icon': ICON_WEB, 'valid': len(words) > 1 }, 'sort': { 'title': 'Set sort order for clients', 'subtitle': 'Set sort order for client commands', 'autocomplete': 'sort', 'args': ' --sort ' + (words[1] if len(words) > 1 else ''), 'icon': ICON_WEB, 'valid': len(words) > 1 }, 'reinit': { 'title': 'Reinitialize the workflow', 'subtitle': 'CAUTION: this deletes all devices, clients and credentials...', 'autocomplete': 'reinit', 'args': ' --reinit', 'icon': ICON_BURN, 'valid': True }, 'workflow:update': { 'title': 'Update the workflow', 'subtitle': 'Updates workflow to latest github version', 'autocomplete': 'workflow:update', 'args': '', 'icon': ICON_SYNC, 'valid': True } } # add config commands to filter add_config_commands(wf, query, config_commands) if (add_prereq(wf, args)): wf.send_feedback() return 0 freq = int( wf.settings['unifi_freq']) if 'unifi_freq' in wf.settings else 86400 # Is cache over 1 hour old or non-existent? if not wf.cached_data_fresh('device', freq): run_in_background( 'update', ['/usr/bin/python', wf.workflowfile('command.py'), '--update']) if is_running('update'): # Tell Alfred to run the script again every 0.5 seconds # until the `update` job is complete (and Alfred is # showing results based on the newly-retrieved data) wf.rerun = 0.5 # Add a notification if the script is running wf.add_item('Updating clients and devices...', icon=ICON_INFO) # If script was passed a query, use it to filter posts elif query: # retrieve cached clients and devices clients = wf.cached_data('client', max_age=0) devices = wf.cached_data('device', max_age=0) radius = wf.cached_data('radius', max_age=0) fwrules = wf.cached_data('fwrule', max_age=0) device_map = get_device_map(devices) items = [{ 'list': clients, 'commands': client_commands, 'id': 'mac', 'filter': search_key_for_client }, { 'list': devices, 'commands': device_commands, 'id': 'mac', 'filter': search_key_for_device }, { 'list': radius, 'commands': radius_commands, 'id': '_id', 'filter': search_key_for_radius }, { 'list': [] if not fwrules else fwrules, 'commands': fwrule_commands, 'id': '_id', 'filter': search_key_for_fwrule }] for item in items: parts = extract_commands(args, item['list'], item['filter']) query = parts['query'] item_list = get_filtered_items(query, item['list'], item['filter']) # since this i now sure to be a client/device query, fix args if there is a client/device command in there command = parts['command'] if 'command' in parts else '' params = parts['params'] if 'params' in parts else [] if item_list: if 1 == len(item_list) and (not command or command not in item['commands']): # Single client only, no command or not complete command yet so populate with all the commands single = item_list[0] name = single['_display_name'] cmd_list = list( filter(lambda x: x.startswith(command), item['commands'].keys())) cmd_list.sort() log.debug('parts.' + single['_type'] + '_command is ' + command) for command in cmd_list: if 'upgrade' == command and 'upgradable' in single and not single[ 'upgradable']: continue wf.add_item( title=name, subtitle=command.capitalize() + ' ' + name, arg=' --' + item['id'] + ' "' + single[item['id']] + '" --command-type ' + single['_type'] + ' --command ' + command + ' --command-params ' + (' '.join(params)), autocomplete=name + ' ' + command, valid=bool( 'arguments' not in item['commands'][command] or params), icon=single['_icon']) elif 1 == len(item_list) and (command and command in item['commands'] and command in command_params): single = item_list[0] if 'clients' == command: # show all the details of clients item_list.extend( sorted(get_device_clients(wf, single), key=lambda x: x['_display_name'])) else: # single client and has command already - populate with params? name = single['_display_name'] param_list = command_params[command][ 'values'] if 'values' in command_params[ command] else [] param_start = params[0] if params else '' param_list = list( filter(lambda x: x.startswith(param_start), param_list)) param_list.sort() check_regex = False if not param_list and command_params[params]['regex']: param_list.append(parts.client_params[0].lower()) check_regex = True for param in param_list: wf.add_item( title=name, subtitle='Turn ' + name + ' ' + command + ' ' + param, arg=' --' + item['id'] + ' "' + single[item['id']] + '" --command-type ' + single['_type'] + ' --command ' + command + ' --command-params ' + param, autocomplete=name + ' ' + command, valid=bool(not check_regex or re.match( command_params[command]['regex'], param)), icon=single['_icon']) # Loop through the returned clients and add an item for each to # the list of results for Alfred for single in item_list: name = single['_display_name'] item_type = single['_type'] wf.add_item( title=name, subtitle=get_item_subtitle(single, item_type, device_map), arg=' --' + item['id'] + ' "' + single[item['id']] + '" --command-type ' + item_type + ' --command ' + command + ' --command-params ' + (' '.join(params)), autocomplete=name, valid=False, icon=single['_icon']) # Send the results to Alfred as XML wf.send_feedback() return 0
def refresh_list(wf): if not is_running("pocket_refresh"): cmd = ["/usr/bin/python", wf.workflowfile("pocket_refresh.py")] run_in_background("pocket_refresh", cmd)
def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) def _cask_installed(): return cask_installed(wf) def _cask_configured(): return cask_configured(wf) if not wf.cached_data('cask_installed', _cask_installed, max_age=0): wf.add_item('Cask does not seem to be installed!', 'Hit enter to see what you need to do...', arg='open http://caskroom.io/ && exit', valid=True, icon='cask.png') wf.add_item('I trust this workflow', 'Hit enter to run `brew install caskroom/cask/brew-cask`' ' to install cask...', arg='brew install caskroom/cask/brew-cask', valid=True, icon='cask.png') # delete cached file wf.cache_data('cask_installed', None) elif not wf.cached_data('cask_configured', _cask_configured, max_age=0): wf.add_item('Cask does not seem to be properly configured!', 'Hit enter to see what you need to do...', arg=OPEN_HELP, valid=True, icon='cask.png') config = next(a for a in cask_actions.ACTIONS if a.name == 'config') wf.add_item(config['name'], config['description'], uid=config['name'], autocomplete=config['autocomplete'], arg=config['arg'], valid=config['valid'], icon=helpers.get_icon(wf, 'chevron-right')) query = wf.args[0] if len(wf.args) else None if query and query.startswith('config'): edit_settings(wf) # delete cached file wf.cache_data('cask_configured', None) else: # extract query query = wf.args[0] if len(wf.args) else None if query and query.startswith('install'): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Install cask', arg='brew cask install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and any(query.startswith(x) for x in ['search', 'home']): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_casks(wf, query): name = formula.split(' ')[0] wf.add_item(formula, 'Uninstall cask', arg='brew cask uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('config'): edit_settings(wf) wf.add_item('`settings.json` has been opened.', autocomplete='', icon=helpers.get_icon(wf, 'info')) else: actions = cask_actions.ACTIONS # filter actions by query if query: actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('cask_refresh.py')] run_in_background('cask_refresh', cmd)
def main(wf): if DISPLAY_DETAILS or DISPLAY_THUMBNAILS: from pyquery import PyQuery as pq args = wf.args searchString = ' '.join(args) if (len(args) > 0): res = wf.cached_data(searchString, max_age=0) if res is None: res = get_filmaffinity_suggestions(searchString).json() wf.cache_data(searchString, res) for result in res['results']: if not is_result_type_movie(result): continue # defaults filepath = os.path.join('.', ICON_DEFAULT) valid = True details = "" # quick access film_id_str = str(result['id']) if DISPLAY_DETAILS: details = wf.cached_data(film_id_str, max_age=0) if details is None: run_in_background( 'update_details_' + film_id_str, [ '/usr/bin/python', wf.workflowfile('update_details.py'), film_id_str ] ) details = "Loading details... " wf.rerun = REFRESH_RATE if DISPLAY_THUMBNAILS: filepath = cache.get(film_id_str) if filepath is None: run_in_background( 'update_thumbnail_' + film_id_str, ['/usr/bin/python', wf.workflowfile('update_thumbnails.py'), json.dumps(result)] ) details += "Loading thumbnail... " wf.rerun = REFRESH_RATE wf.add_item( title=result['value'].encode('ascii', 'replace'), subtitle=details, arg=get_url_for_film_id(result['id']), valid=valid, icon=filepath ) # Default option to search if no result found wf.add_item( title="Search", subtitle="Search filmaffinity for " + " ".join(args), arg="https://www.filmaffinity.com/es/advsearch2.php?q=" + urllib.quote(searchString), valid=True, icon=ICON_DEFAULT ) # --- # Send output to Alfred. You can only call this once. # Well, you *can* call it multiple times, but subsequent calls # are ignored (otherwise the JSON sent to Alfred would be invalid). # ---- wf.send_feedback()
def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if not helpers.brew_installed(): helpers.brew_installation_instructions(wf) else: # extract query query = wf.args[0] if len(wf.args) else None if (not query and len( wf.cached_data('brew_outdated_formulae', get_outdated_formulae, max_age=3600)) > 0): wf.add_item('Some of your formulae are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if query and query.startswith('install'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Install formula.', arg='brew install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('search'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Open formula on GitHub.', arg='brew info --github %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Uninstall formula.', arg='brew uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Open formula on GitHub.', arg='brew info --github %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('pin'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Pin formula.', arg='brew pin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('unpin'): for formula in filter_pinned_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Unpin formula.', arg='brew unpin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('cat'): for formula in filter_all_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Display the source to this formula.', arg='brew cat %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('outdated'): for formula in filter_outdated_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Upgrade formula.', arg='brew upgrade %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('info'): wf.add_item(get_info(), autocomplete='', icon=helpers.get_icon(wf, 'info')) elif query and query.startswith('commands'): for command in get_commands(wf, query): wf.add_item(command, 'Run this command.', arg='brew %s' % command, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) else: actions = brew_actions.ACTIONS if len( wf.cached_data('brew_pinned_formulae', get_pinned_formulae, max_age=3600)) > 0: actions.append({ 'name': 'Unpin', 'description': 'Unpin formula.', 'autocomplete': 'unpin ', 'arg': '', 'valid': False, }) # filter actions by query if query: actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd)
def main(wf): """Run workflow script.""" # Parse input wf.args args = docopt.docopt(__doc__, version=wf.version) query = args.get('<query>') or b'' query = wf.decode(query).strip() types = args.get('--type') log.debug('args=%r', args) # ----------------------------------------------------------------- # Update cached DFX data if args.get('--update'): return do_update() # ----------------------------------------------------------------- # Script Filter # Load cached entries first and start update if they've # expired (or don't exist) entries = wf.cached_data(DFX_CACHE_KEY, max_age=0) if not entries or not wf.cached_data_fresh(DFX_CACHE_KEY, MAX_CACHE_AGE): if not is_running('update'): run_in_background( 'update', ['/usr/bin/python', wf.workflowfile('dfx.py'), '--update']) # Tell Alfred to re-run the Script Filter if cache is being updated if is_running('update'): wf.rerun = 1 # No data in cache yet. Show warning and exit. if entries is None: wf.add_item('Waiting for Default Folder X data…', 'Please try again in a second or two', icon=ICON_WARNING) wf.send_feedback() return # Filter entries if types != ['all']: log.debug('Filtering for types : %r', types) entries = [e for e in entries if e.type in types] # Remove duplicates and non-existent files entries = [e for e in set(entries) if os.path.exists(e.path)] # Filter data against query if there is one if query: total = len(entries) entries = wf.filter(query, entries, lambda e: e.name, min_score=30) log.info('%d/%d entries match `%s`', len(entries), total, query) # Prepare Alfred results if not entries: wf.add_item('Nothing found', 'Try a different query?', icon=ICON_WARNING) for e in entries: if types == ['all']: title = prefix_name(e) else: title = e.name wf.add_item(title, e.pretty_path, arg=e.path, uid=e.path, copytext=e.path, largetext=e.path, type='file', valid=True, icon=e.path, icontype='fileicon') wf.send_feedback() return 0
def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) def _cask_installed(): return cask_installed(wf) def _cask_configured(): return cask_configured(wf) if not wf.cached_data('cask_installed', _cask_installed, max_age=0): wf.add_item('Cask does not seem to be installed!', 'Hit enter to see what you need to do...', arg='open http://caskroom.io/ && exit', valid=True, icon='cask.png') wf.add_item('I trust this workflow', 'Hit enter to run `brew tap caskroom/cask` to get cask...', arg='brew tap caskroom/cask', valid=True, icon='cask.png') # delete cached file wf.cache_data('cask_installed', None) elif not wf.cached_data('cask_configured', _cask_configured, max_age=0): wf.add_item('Cask does not seem to be properly configured!', 'Hit enter to see what you need to do...', arg=OPEN_HELP, valid=True, icon='cask.png') config = next(a for a in cask_actions.ACTIONS if a.name == 'config') wf.add_item(config['name'], config['description'], uid=config['name'], autocomplete=config['autocomplete'], arg=config['arg'], valid=config['valid'], icon=helpers.get_icon(wf, 'chevron-right')) query = wf.args[0] if len(wf.args) else None if query and query.startswith('config'): edit_settings(wf) # delete cached file wf.cache_data('cask_configured', None) else: # extract query query = wf.args[0] if len(wf.args) else None if (not query and len(wf.cached_data('cask_outdated_casks', get_outdated_casks, max_age=3600)) > 0): wf.add_item('Some of your casks are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if query and query.startswith('install'): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Install cask', arg='brew cask install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and any(query.startswith(x) for x in ['search', 'home']): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_casks(wf, query): name = formula.split(' ')[0] wf.add_item(formula, 'Uninstall cask', arg='brew cask uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('outdated'): for formula in filter_outdated_casks(wf, query): name = formula.split(' ')[0] wf.add_item(formula, 'Upgrade cask', arg='brew cask upgrade %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('config'): edit_settings(wf) wf.add_item('`settings.json` has been opened.', autocomplete='', icon=helpers.get_icon(wf, 'info')) else: actions = cask_actions.ACTIONS # filter actions by query if query: actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('cask_refresh.py')] run_in_background('cask_refresh', cmd)
if len(ACTIONS) > 0: for action in ACTIONS: WF.add_item( action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=get_icon("chevron-right") ) else: WF.add_item( "No action found for '%s'" % query, autocomplete="", icon=get_icon("info") ) if len(WF._items) == 0: WF.add_item( "No formula found for '%s'" % query[query.find(" ") + 1:], autocomplete="%s " % query[:query.find(" ")], icon=get_icon("info") ) WF.send_feedback() # refresh cache cmd = ['/usr/bin/python', WF.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd)
def main(wf): arg = wf.args[0].split(" ") term = arg[0].strip() search = ' '.join(arg[1:]) show_results = True try: get_github_token(wf) except workflow.PasswordNotFound: wf.add_item("Set a GitHub token with 'gg_set <token>'", icon="icons/error.png") wf.send_feedback() sys.exit() # Get last update date/time. last_update = wf.stored_data('last_update') if last_update: diff = (datetime.now() - last_update).seconds # Update daily. if diff > 60*60*24: if not is_running(u"update_gists"): run_in_background('update_gists',['/usr/bin/python', wf.workflowfile('update_gists.py')]) wf.add_item('Gist update triggered', icon="icons/download.png") initial_load() gists = wf.stored_data('gists') n_starred = wf.stored_data('n_starred') n_forked = wf.stored_data('n_forked') n_public = wf.stored_data('n_public') n_private = wf.stored_data('n_private') lang = "" # multi-file gist language filtering tag_set = wf.stored_data('tag_counts') lang_set = wf.stored_data('language_counts') results = [] if term == "": show_results = False # List workflow commands. wf.add_item(u"Create gist from clipboard", valid=True, arg='__new_gist', icon="icons/gist.png") wf.add_item(u"Starred (%s)" % n_starred, autocomplete=u"\u2605 ", icon="icons/star.png") wf.add_item(u"Forked (%s)" % n_forked, autocomplete=u"\u2442 ", icon="icons/forked.png") # Forked not presently supported. wf.add_item(u"Tags", autocomplete="#", icon="icons/tag.png") wf.add_item(u"Language", autocomplete="$", icon="icons/language.png") wf.add_item(u"Private (%s)" % n_private, autocomplete="Private ", icon="icons/private.png") wf.add_item(u"Public (%s)" % n_public, autocomplete="Public ", icon="icons/public.png") if last_update is None: last_update_msg = "Never" else: last_update_msg = last_update.strftime("%Y-%m-%d %H:%M") wf.add_item(u"Update (last update: {})".format(last_update_msg), autocomplete="Update", icon="icons/download.png") elif term == "Update": if not is_running(u"update_gists"): run_in_background('update_gists',['/usr/bin/python', wf.workflowfile('update_gists.py')]) wf.add_item('Updating gists...', subtitle=u"Update runs in the backround.", valid=True, icon="icons/download.png") wf.send_feedback() sys.exit() elif term.startswith("#") and term.replace("#", "") not in tag_set and len(search) == 0: show_results = False for tag, count in tag_set.items(): if tag.lower().startswith(term.lower().replace("#","")): results.append(0) # Prevent no results found from being shown. wf.add_item("{tag} ({count})".format(**locals()), autocomplete="#" + tag + " ", icon="icons/tag.png") elif term.replace("#", "") in tag_set: # List gists with given tag. tag = term.split(" ")[0].replace("#", "") results = [x for x in gists if tag in x["tags"]] elif term.startswith("$") and term.replace("$", "") not in lang_set and len(search) == 0: show_results = False for lang, count in lang_set.items(): if lang.lower().startswith(term.lower().replace("$","")): results.append(0) # Prevent no results found from being shown. wf.add_item("{lang} ({count})".format(**locals()), autocomplete = "${} ".format(lang), icon="icons/{}.png".format(lang.lower())) elif term.replace("$", "") in lang_set: # List gists with given language. language = term.split(" ")[0].replace("$", "").lower() results = [x for x in gists if language == x["language"].lower()] elif term == "Public": # List public gists. results = [x for x in gists if x["public"]] elif term == "Private": # List private gists. results = [x for x in gists if not x["public"]] elif term == u"\u2442" or term == "Forked": # List forked gists. results = [x for x in gists if x["forked"]] elif term == u"\u2605" or term == "Starred": # List starred gists. results = [x for x in gists if x["starred"]] else: # Perform full-text search in gists. search = term + " " + search results = gists if show_results: if search != "": results = wf.filter(search, results, lambda x: x["description"] + ' '.join(x["tags"]) + ' '.join(x["files"].keys())) for gist in results: filename, f = gist['files'].items()[0] filename, extension = os.path.splitext(filename) extension = extension.strip(".") if lang == "" or f["language"] == lang: if len(gist['content']) > 10000: gist['content'] = u"Gist is too long to copy; use ⌘ key to open URL" wf.add_item(gist['description'], filename + " | " + gist["content"].replace("\n", "")[0:100], arg=gist["html_url"] + "@@@gist@@@" + gist["content"], copytext=gist["url"], valid=False, icon="icons/error.png".format(extension.lower())) else: wf.add_item(gist['description'], filename + " | " + gist["content"].replace("\n", "")[0:100], arg=gist["html_url"] + "@@@gist@@@" + gist["content"], copytext=gist["url"], valid=True, icon="icons/{}.png".format(extension.lower())) if len(results) == 0 and term != "": wf.add_item("No results found", valid=True, icon="icons/error.png") wf.send_feedback()
def main(wf): """Run workflow Script Filter. Args: wf (workflow.Workflow): Current Workflow object. Returns: int: Exit status. """ if not len(wf.args): return 1 query = wf.args[0] # .lower() log.debug('query : %s', query) # Add workflow and user units to unit registry register_units() # Notify of available update if wf.update_available: wf.add_item('A newer version is available', 'Action this item to download & install the new version', autocomplete='workflow:update', icon=ICON_UPDATE) # Load cached data exchange_rates = wf.cached_data(CURRENCY_CACHE_NAME, max_age=0) if exchange_rates: # Add exchange rates to conversion database register_exchange_rates(exchange_rates) if not wf.cached_data_fresh(CURRENCY_CACHE_NAME, CURRENCY_CACHE_AGE): # Update currency rates cmd = ['/usr/bin/python', wf.workflowfile('currency.py')] run_in_background('update', cmd) if is_running('update'): if exchange_rates is None: # No data cached yet wf.add_item('Fetching exchange rates…', 'Currency conversions will be momentarily possible', icon=ICON_INFO) else: wf.add_item('Updating exchange rates…', icon=ICON_INFO) error = None conversion = None try: conversion = convert(query, decimal_places=wf.settings.get( 'decimal_places', 2)) except UndefinedUnitError as err: log.critical('Unknown unit : %s', err.unit_names) error = 'Unknown unit : {0}'.format(err.unit_names) except DimensionalityError as err: log.critical('Invalid conversion : %s', err) error = "Can't convert from {0} {1} to {2} {3}".format( err.units1, err.dim1, err.units2, err.dim2) except ValueError as err: log.critical('Invalid query : %s', err) error = err.message except Exception as err: log.exception('%s : %s', err.__class__, err) error = err.message if not error and not conversion: error = 'Conversion input not understood' if error: # Show error wf.add_item(error, 'For example: 2.5cm in | 178lb kg | 200m/s mph', valid=False, icon=ICON_WARNING) else: # Show result wf.add_item(conversion, valid=True, arg=conversion, copytext=conversion, largetext=conversion, icon='icon.png') wf.send_feedback() log.debug('finished') return 0
def refresh_list(): # pragma: no cover if not is_running('pocket_refresh'): cmd = ['/usr/bin/python', WF.workflowfile('pocket_refresh.py')] run_in_background('pocket_refresh', cmd)
def main(wf): from query_exchange import query_exchange_server from query_google import query_google_calendar import pytz from pytz import timezone from datetime import timedelta, datetime from settings import get_value_from_settings_with_default_boolean, get_value_from_settings_with_default_int import time # Check to see if updates are available if wf.update_available: wf.add_item('A newer version is available', '↩ to install update', icon='update-available.png', arg='update', valid=True) # Parse and log the query variable query = None if len(wf.args): query = wf.args[0] log.debug('query : {!r}'.format(query)) # Get date offset args = wf.args date_offset = 0 if len(args) > 0: date_offset = int(args[0]) #Start calculating elapsed time - displayed in results page at end action_start_time = time.time() # Find out cache time cache_time = get_value_from_settings_with_default_int( wf, 'cache_time', 9000) morning = timezone("US/Eastern").localize( datetime.today().replace(hour=0, minute=0, second=1) + timedelta(days=date_offset)) night = timezone("US/Eastern").localize( datetime.today().replace(hour=23, minute=59, second=59) + timedelta(days=date_offset)) def get_week(today): day_of_week = today.weekday() to_beginning_of_week = timedelta(days=day_of_week) beginning_day = today - to_beginning_of_week to_end_of_week = timedelta(days=6 - day_of_week) end_day = today + to_end_of_week beginning_of_week = timezone("US/Eastern").localize( beginning_day.replace(hour=0, minute=0, second=1)) end_of_week = timezone("US/Eastern").localize( end_day.replace(hour=23, minute=59, second=59)) return (beginning_of_week, end_of_week) if date_offset == 10: morning, night = get_week(datetime.today()) # Outlook needs a different time format than google it would appear start_outlook = morning.astimezone(pytz.utc) end_outlook = night.astimezone(pytz.utc) start_google = morning.astimezone(pytz.utc).isoformat() stop_google = night.astimezone(pytz.utc).isoformat() log.info("%s\t\t\t%s", start_google, stop_google) def google_wrapper(): """A wrapper around doing a google query so this can be used with a cache function""" return query_google_calendar(wf, start_google, stop_google, date_offset) def exchange_wrapper(): """Wrapper around outlook query so can be used with caching""" return query_exchange_server(wf, start_outlook, end_outlook, date_offset) # Format date text for displays date_text = night.strftime("%A %B %d, %Y") date_text_numeric = night.strftime("%m/%d/%y") if date_offset == 10: date_text = morning.strftime("%A") + " – " + night.strftime("%A") date_text_numeric = morning.strftime( "%m/%d/%y") + " – " + night.strftime("%m/%d/%y") # Build Cache Keys exchange_cache_key = get_cache_key('exchange', date_offset) google_cache_key = get_cache_key('google', date_offset) log.debug("-- FG: CacheKey (Google) " + google_cache_key) log.debug("-- FG: CacheKey (Exchange) " + exchange_cache_key) # Check which calendars to use from settings use_exchange = get_value_from_settings_with_default_boolean( wf, 'use_exchange', False) use_google = get_value_from_settings_with_default_boolean( wf, 'use_google', False) if not use_google and not use_exchange: wf.add_item('Calendars are disabled', 'use the tc command to setup a calendar', icon=ICON_INFO, arg="tc") wf.send_feedback() return log.debug("Max Age: %i Cache Age Google: %i Exchange: %i", cache_time, wf.cached_data_age(google_cache_key), wf.cached_data_age(exchange_cache_key)) # Check cache status google_fresh = wf.cached_data_fresh(google_cache_key, max_age=cache_time) exchange_fresh = wf.cached_data_fresh(exchange_cache_key, max_age=cache_time) # Determine whether cache data is being shown or "live" data showing_cached_data = True if use_google: showing_cached_data &= google_fresh if use_exchange: showing_cached_data &= exchange_fresh event_count = 0 error_state = False log.debug('--FG: Use Exchange:' + str(use_exchange)) log.debug('--FG: Exchange Fresh:' + str(exchange_fresh)) if use_exchange: # If the cache is fresh we need to do a bg refresh - because who knows what has happened # If the cache is stale then directly query the exchange server if exchange_fresh: log.debug('--FG: Loading Exchange events from Cache') #Extract the cached events exchange_events = wf.cached_data(exchange_cache_key, max_age=0) log.debug(str(exchange_events)) # Run update in the background if not is_running('update_exchange'): cmd = [ '/usr/bin/python', wf.workflowfile('query_exchange.py'), start_outlook.strftime("%Y-%m-%d-%H:%M:%S"), end_outlook.strftime("%Y-%m-%d-%H:%M:%S"), str(date_offset) ] log.debug('--FG: Launching background exchange update') # Fire off in the background the script to update things! :) run_in_background('update_exchange', cmd) else: log.debug('--FG: Background exchange update already running') else: log.debug('--FG: Directly querying Exchange') # Directly query the exchange server exchange_events = wf.cached_data(exchange_cache_key, exchange_wrapper, max_age=cache_time) if exchange_events is None: log.debug('--FG: Exchange Events returned NONE!!!') error_state = True wf.add_item('Unable to connect to exchange server', 'Check your connectivity or NTLM auth settings', icon='img/disclaimer.png') exchange_events = [] else: event_count += len(exchange_events) else: exchange_events = [] if use_google: # check for any enabled calendars no_google_calendars = True for key in wf.settings: if 'calendar' in key: no_google_calendars = False if no_google_calendars: wf.add_item('Not showing any Google Calendars', 'use the tcgc command to select calendars') # If the cache is "fresh" we need to do a bg refresh - because we are loading from the cache # If the cache isnt fresh - the server will be queried directly anyways if google_fresh: # Extract cached events google_events = wf.cached_data(google_cache_key, max_age=0) # Run update in background if not is_running('update_google'): cmd = [ '/usr/bin/python', wf.workflowfile('query_google.py'), start_google, stop_google, str(date_offset) ] # Fire off in the background the script to update things! :) run_in_background('update_google', cmd) else: # Directly run event update - ignore background stuff google_events = wf.cached_data(google_cache_key, google_wrapper, max_age=cache_time) if google_events is None: error_state = True import httplib conn = httplib.HTTPConnection("www.google.com") try: conn.request("HEAD", "/") wf.add_item( 'Unable to connect to Google', 'Authorization or Connection error - use tc to reauthorize', icon='img/disclaimer.png') except Exception as ex: wf.logger.info("Unable to connect to google") template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) wf.logger.info(message) import traceback wf.logger.info(traceback.format_exc()) wf.add_item('Unable to connect to Google', 'Check your internet connection or proxy settings', icon='img/disclaimer.png') google_events = [] else: for e in google_events: wf.logger.debug(' '.join([ '**FG --- Google:', str(e.get(u'start').get(u'dateTime', 'All Day')), e.get('summary', 'NoTitle') ])) event_count += len(google_events) else: google_events = [] # Build Header icon_file = 'img/date_span.png' if use_exchange and use_google: icon_file = 'img/iconBoth.png' elif use_exchange: icon_file = 'img/iconOutlook.png' elif use_google: icon_file = 'img/iconGoogle.png' # Fire off some log data log.info("Event Count Google: " + str(len(google_events))) log.info("Event Count Exchange: " + str(len(exchange_events))) log.info("Event Count Total: " + str(event_count)) if event_count == 0: if error_state is False: wf.add_item('Calendar is empty', date_text, icon=icon_file) wf.send_feedback() return first_menu_entry = wf.add_item(date_text, date_text_numeric, icon=icon_file) # Process events EventProcessor(wf).process_events(exchange_events, google_events) # Update elapsed time counter action_elapsed_time = time.time() - action_start_time if showing_cached_data: first_menu_entry.subtitle += " - Cached Data" #else: #first_menu_entry.subtitle += " query time: " + "{:.1f}".format( # action_elapsed_time) + " seconds" wf.send_feedback()
def main(wf): query = wf.args[0] if wf.update_available: # Add a notification to top of Script Filter results wf.add_item('New version available', 'Action this item to install the update', autocomplete='workflow:update', icon=ICON_INFO) index_exists = True # Create index if it doesn't exist if not os.path.exists(INDEX_DB): index_exists = False run_in_background('indexer', ['/usr/bin/python', 'update_index.py']) # Can't search without an index. Inform user and exit if not index_exists: wf.add_item('Creating search index…', 'Please wait a moment', icon=ICON_INFO) wf.send_feedback() return # Inform user of update in case they're looking for something # recently added (and it isn't there) if is_running('indexer'): wf.add_item('Updating search index…', 'Fresher results will be available shortly', icon=ICON_INFO) # Search! start = time() db = sqlite3.connect(INDEX_DB) # Set ranking function with weightings for each column. # `make_rank_function` must be called with a tuple/list of the same # length as the number of columns "selected" from the database. # In this case, `url` is set to 0 because we don't want to search on # that column # id, name, name_with_namespace, web_url, ssh_url_to_repo, http_url_to_repo db.create_function('rank', 1, make_rank_func((0, 1.0, 0.9, 0, 0, 0))) cursor = db.cursor() try: sql = """ SELECT id, name, name_with_namespace, web_url, ssh_url_to_repo, http_url_to_repo FROM ( SELECT rank(matchinfo(gitlab)) AS r, id, name, name_with_namespace, web_url, ssh_url_to_repo, http_url_to_repo FROM gitlab WHERE gitlab MATCH "%s*" ) ORDER BY r DESC LIMIT 100 """ % query logger.info(sql) cursor.execute(sql) results = cursor.fetchall() except sqlite3.OperationalError as err: # If the query is invalid, show an appropriate warning and exit if b'malformed MATCH' in err.message: wf.add_item('Invalid query', icon=ICON_WARNING) wf.send_feedback() return # Otherwise raise error for Workflow to catch and log else: raise err if not results: wf.add_item('No matches', 'Try a different query', icon=ICON_WARNING) logger.info('{} results for `{}` in {:0.3f} seconds'.format( len(results), query, time() - start)) # Output results to Alfred for (_, name, name_with_namespace, web_url, ssh_url_to_repo, http_url_to_repo) in results: it = wf.add_item( title=name_with_namespace, subtitle=web_url, arg=name, valid=True, icon=ICON_WEB, ) it.setvar("web_url", web_url) it.setvar("ssh_url_to_repo", ssh_url_to_repo) it.setvar("http_url_to_repo", http_url_to_repo) mod = it.add_modifier(key="shift", subtitle="Show git clone options", valid=True) mod.setvar("cmd", "CMD_CLONE") mod = it.add_modifier( key="fn", subtitle="Show some related links", arg=name, valid=True, ) mod.setvar("cmd", "CMD_RELATED_LINKS") mod.setvar("project_url", web_url) wf.send_feedback()
def main(wf): parser = argparse.ArgumentParser() parser.add_argument('-a', '--action', dest='action', help='Perform action') parser.add_argument('query', help='What to filter projects by', nargs='?', default=None) args = parser.parse_args(wf.args) query = args.query if args.action: if args.action == 'config': return do_config(wf) elif args.action == 'help': return do_help(wf) elif args.action == 'update': return do_update(wf) elif args.action == 'edit': return do_edit_config(wf) else: raise ValueError('Unknown action : {}'.format(args.action)) # Create default settings if not os.path.exists(wf.settings_path): for key in DEFAULT_SETTINGS: wf.settings[key] = DEFAULT_SETTINGS[key] # Show if update is available if wf.update_available: wf.add_item('Newer version available', 'Action this item to install the update.', autocomplete='workflow:update', icon=ICON_SYNC) # Load cached data if it exists. If it's out-of-date, we'll take # care of that directly projects = wf.cached_data('projects', None, max_age=0) # Update cache if it's out-of-date if not wf.cached_data_fresh('projects', max_age=MDFIND_INTERVAL): cmd = ['/usr/bin/python', wf.workflowfile('update_cache.py')] run_in_background('update', cmd) # Show update message if cache is empty if is_running('update') and not projects: wf.add_item('Generating list of Sublime Text projects', valid=False, icon=ICON_INFO) if query and projects: projects = wf.filter(query, projects, key=lambda p: os.path.basename(p), include_score=DEBUG, min_score=20) if DEBUG: # Show scores, rules for (path, score, rule) in projects: log.debug('{:0.2f} [{}] {}'.format(score, rule, path)) projects = [t[0] for t in projects] if not projects: wf.add_item('No matches found', 'Try a different query', icon=ICON_WARNING) else: home = os.getenv('HOME') for path in projects: wf.add_item(os.path.basename(path).replace('.sublime-project', ''), path.replace(home, '~'), arg=path, uid=path, valid=True, icon='document_icon.png') wf.send_feedback()
def main(wf): """Run workflow Script Filter. Args: wf (workflow.Workflow): Current Workflow object. Returns: int: Exit status. """ if not len(wf.args): return 1 query = wf.args[0] # .lower() log.debug('query : %s', query) # Add workflow and user units to unit registry register_units() # Notify of available update if wf.update_available: wf.add_item('A newer version is available', 'Action this item to download & install the new version', autocomplete='workflow:update', icon=ICON_UPDATE) # Load cached data exchange_rates = wf.cached_data(CURRENCY_CACHE_NAME, max_age=0) if exchange_rates: # Add exchange rates to conversion database register_exchange_rates(exchange_rates) if not wf.cached_data_fresh(CURRENCY_CACHE_NAME, CURRENCY_CACHE_AGE): # Update currency rates cmd = ['/usr/bin/python', wf.workflowfile('currency.py')] run_in_background('update', cmd) if is_running('update'): if exchange_rates is None: # No data cached yet wf.add_item('Fetching exchange rates…', 'Currency conversions will be momentarily possible', icon=ICON_INFO) else: wf.add_item('Updating exchange rates…', icon=ICON_INFO) error = None conversion = None try: conversion = convert(query, decimal_places=wf.settings.get('decimal_places', 2)) except UndefinedUnitError as err: log.critical('Unknown unit : %s', err.unit_names) error = 'Unknown unit : {0}'.format(err.unit_names) except DimensionalityError as err: log.critical('Invalid conversion : %s', err) error = "Can't convert from {0} {1} to {2} {3}".format( err.units1, err.dim1, err.units2, err.dim2) except ValueError as err: log.critical('Invalid query : %s', err) error = err.message except Exception as err: log.exception('%s : %s', err.__class__, err) error = err.message if not error and not conversion: error = 'Conversion input not understood' if error: # Show error wf.add_item(error, 'For example: 2.5cm in | 178lb kg | 200m/s mph', valid=False, icon=ICON_WARNING) else: # Show result wf.add_item(conversion, valid=True, arg=conversion, copytext=conversion, largetext=conversion, icon='icon.png') wf.send_feedback() log.debug('finished') return 0
ACTIONS = WF.filter(query, ACTIONS, key=search_key_for_action, match_on=MATCH_SUBSTRING) if len(ACTIONS) > 0: for action in ACTIONS: WF.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=get_icon("chevron-right")) else: WF.add_item("No action found for '%s'" % query, autocomplete="", icon=get_icon("info")) if len(WF._items) == 0: WF.add_item("No formula found for '%s'" % query[query.find(" ") + 1:], autocomplete="%s " % query[:query.find(" ")], icon=get_icon("info")) WF.send_feedback() # refresh cache cmd = ['/usr/bin/python', WF.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd)
def list_devices(args): arg = args[0] if args else '' devices = run_script(adb_path + " devices -l | sed -n '1!p' | tr -s ' '") devices = devices.rstrip().split('\n') items, wifiDevices = get_device_items(arg, devices) if wifiDevices: run_in_background("update_wifi_history", [ '/usr/bin/python', wf.workflowfile('scripts/update_wifi_history.py'), 'add', pickle.dumps(wifiDevices) ]) log.error("Save history wifi devices : count : {0}".format( len(wifiDevices))) for item in items: name = item.get('serial') log.debug(arg + " " + name) if arg == '' or arg.lower() in name.lower(): it = wf.add_item(title=item.title, uid=item.title, autocomplete=('', item.autocomplete)[item.valid], valid=item.valid, arg=item.arg, subtitle=item.subtitle) it.setvar('status', item.get("status")) it.setvar('full_info', item.subtitle) if item.valid: it.setvar('device_api', item.get('device_api')) it.setvar("serial", name) it.setvar('name', item.get('name')) if item.subtitle and not re.match(regexIp + ":5555", name): cmd_ip = adb_path + ' -s ' + name + " shell ip -f inet addr show wlan0 | grep inet | tr -s ' ' | awk '{print $2}'" ip = run_script(cmd_ip) if '/' in ip and re.match(regexIp, ip.split('/')[0]): it.setvar("ip", ip.strip('\n')) it.add_modifier("cmd", subtitle=ip) if item.get("build_number"): it.add_modifier("alt", subtitle=item.get("build_number")) # last func if name.startswith("emulator-"): name = hashlib.md5(item.subtitle).hexdigest() it.setvar("his_tag", name) lastFuncs = wf.cached_data('last_func:' + name, max_age=0) if lastFuncs and len(lastFuncs) > 0: log.debug(lastFuncs) last_func = lastFuncs[len(lastFuncs) - 1] mod = it.add_modifier( "ctrl", subtitle="run last command {}".format(last_func)) mod.setvar("last_func", last_func) mod = it.add_modifier("fn", subtitle="show command history", arg="cmd_history") mod.setvar("function", "cmd_history") # CONNECT if arg and ("connect ".startswith(arg.lower()) or re.match(regexConnect, arg)): localIpWithMask = run_script( 'ifconfig | grep -A 1 "en" | grep broadcast | cut -d " " -f 2,4 | tr "\\n" " "' ) localIp = localIpWithMask.split(" ")[0] rawMask = localIpWithMask.split(" ")[1].count("f") * 4 targetIp = arg[8:] if localIp: history = wf.stored_data("wifi_history") counter = 0 valid = True if re.match("^" + regexIp + "(:|:5|:55|:555|:5555)?$", targetIp) else False if valid: subtitle = "adb connect " + targetIp if targetIp else '' it = wf.add_item(title="Connect over WiFi", valid=valid, arg="adb_connect", subtitle=subtitle) m = it.add_modifier('cmd', subtitle="Remove all connection histories", arg='adb_connect_remove') m.setvar('extra', "all") it.setvar("ip", targetIp.strip('\n')) if history: historyWifiDevices = pickle.loads(history) currentDevices = [] for item in items: currentDevices.append(item.title.strip()) for historyWifiDevice in historyWifiDevices: if not historyWifiDevice.title in currentDevices: deviceIp = historyWifiDevice.title.split(":")[0] same_network = False if hasattr(historyWifiDevice, 'mask') and historyWifiDevice.mask: same_network = ipaddress.ip_network( u'%s/%d' % (localIp, rawMask), False) == ipaddress.ip_network( u'%s/%s' % (deviceIp, historyWifiDevice.mask), False) else: same_network = ipaddress.ip_network( u'%s/%d' % (localIp, rawMask), False) == ipaddress.ip_network( u'%s/%d' % (deviceIp, rawMask), False) if not same_network: continue if arg and historyWifiDevice.title.find( targetIp) == -1: continue log.debug("history item title " + historyWifiDevice.title) title = "Connect over WiFi" if historyWifiDevice.subtitle: title = "Connect " + historyWifiDevice.subtitle.split( '- ', 1)[1].split(', ', 1)[0] + " over WiFi" it = wf.add_item( title=title, valid=True, arg="adb_connect", autocomplete="connect " + historyWifiDevice.title, subtitle=historyWifiDevice.title, uid=(historyWifiDevice.title, "")[valid]) it.setvar("ip", historyWifiDevice.title) it.add_modifier( 'cmd', 'Remove connection history with {0}'.format( historyWifiDevice.title), arg='adb_connect_remove') it.add_modifier('alt', historyWifiDevice.subtitle) counter += 1 if not valid and counter == 0: if (not targetIp or re.match(regexIpInput, targetIp)): subtitle = "adb connect " + targetIp if targetIp else '' if not targetIp: it = wf.add_item(title="Connect over WiFi", valid=False, arg="adb_connect", autocomplete="connect ", subtitle=subtitle) else: it = wf.add_item(title="Connect over WiFi", valid=False, arg="adb_connect", subtitle=subtitle) # DISCONNECT if wifiDevices: log.debug(wifiDevices[0].title) if arg and ("disconnect ".startswith(arg.lower()) or re.match("^disconnect .*", arg)): targetIp = arg[11:] if wifiDevices: for wifiDevice in wifiDevices: it = wf.add_item(title="Disconnect from WiFi", uid=wifiDevice.title, valid=True, arg="adb_disconnect", autocomplete="disconnect ", subtitle=wifiDevice.title) ip = wifiDevice.title if "[OFFLINE]" in ip: ip = ip.split(" ")[0] it.setvar("ip", ip) elif targetIp: it = wf.add_item(title="Disconnect from WiFi", uid="adb_disconnect", valid=True, arg="adb_disconnect", autocomplete="disconnect ", subtitle="adb disconnect " + targetIp) it.setvar("ip", targetIp) if arg and ("restart".startswith(arg.lower()) or "kill-server".startswith( arg.lower()) or "start-server".startswith(arg.lower())) or ( len(items) == 0 and (len(arg) == 0 or (not arg.lower().startswith("connect") and not arg.lower().startswith("disconnect")))): wf.add_item(title="Restart adb", valid=True, arg="restart_adb", uid="restart_adb")
def refresh(wf): if not is_running('hackernews_refresh'): cmd = ['/usr/bin/python', wf.workflowfile('hackernews_refresh.py')] run_in_background('hackernews_refresh', cmd)
def main(wf): if DISPLAY_DETAILS: from pyquery import PyQuery as pq input_word = ' '.join(wf.args) if not is_valid_args(input_word): wf.add_item( title="Invalid arguments.", subtitle= "Type just one word. Insert space or select one to see definitions.", valid=False, ) wf.send_feedback() return #word is selected when space is at end if re.search(r"\s$", input_word) is not None and EXTENDED_DEFINITION: selectedWord = input_word.strip() searchString = None else: selectedWord = None searchString = input_word.strip() if searchString is not None: res = wf.cached_data(searchString, max_age=0) if res is None: res = get_rae_suggestions(searchString).json() wf.cache_data(searchString, res) for word in res: # defaults details_preview_str = "" details_full_str = "" if DISPLAY_DETAILS: details = wf.cached_data('details_' + word, max_age=0) if details is None: run_in_background('update_details_' + word, [ '/usr/bin/python', wf.workflowfile('update_details.py'), word ]) details_preview_str = "Loading details... " wf.rerun = REFRESH_RATE else: details_preview_str = get_details_preview(details) details_full_str = get_details_full(details) can_automcomplete = details is not None and not is_details_empty( details) wf.add_item(icon=None, title=word, quicklookurl=get_url_for_word(word) if can_automcomplete else None, autocomplete=word + " " if can_automcomplete else None, subtitle=details_preview_str, copytext=details_full_str, largetext=details_full_str) # .add_modifier( # key='alt', # subtitle='Full screen definition ', # arg=details_full_str, # valid=True, # icon=ICON_NOTE # ) # Temporarily disabled elif selectedWord is not None and EXTENDED_DEFINITION: details = wf.cached_data("details_" + selectedWord, max_age=0) if details is None: run_in_background('update_details_' + selectedWord, [ '/usr/bin/python', wf.workflowfile('update_details.py'), word ]) details = [] wf.rerun = REFRESH_RATE for detail in details: for item in get_menaing_strings_from_details(detail): wf.add_item( icon=None, title=item, ) # Default option to search if no result found wf.add_item( icon="icon-search.png", title="Search on web", subtitle="Open search RAE for " + input_word, arg=get_url_for_word(searchString or selectedWord), valid=True, ) # --- # Send output to Alfred. You can only call this once. # Well, you *can* call it multiple times, but subsequent calls # are ignored (otherwise the JSON sent to Alfred would be invalid). # ---- wf.send_feedback()
def refresh_list(cls): """Spawn subprocess to populate response from Google Drive""" if not is_running('drive_refresh'): cmd = ['/usr/bin/python', wf.workflowfile('drive_refresh.py')] run_in_background('drive_refresh', cmd)
def main(wf): """Run the workflow.""" from docopt import docopt # Handle arguments # ------------------------------------------------------------------ args = docopt(__doc__, wf.args) log.debug('args: {}'.format(args)) query = args.get('<query>') path = args.get('<path>') appnum = args.get('<appnum>') if appnum: appnum = int(appnum) apps = {} for i in range(1, 7): app = wf.settings.get('app_{}'.format(i)) if isinstance(app, list): app = app[:] apps[i] = app if not apps.get(1): # Things will break if this isn't set apps[1] = 'Finder' # Alternate actions # ------------------------------------------------------------------ if appnum and path: app = apps.get(appnum) if app is None: print('App {} not set. Use `reposettings`'.format(appnum)) return 0 else: if not isinstance(app, list): app = [app] for a in app: if a in BROWSERS: url = subprocess.check_output( ['git', 'config', 'remote.origin.url'], cwd=path ) url = re.sub(r'https://.+@', 'https://', url).strip() url = convertSSHUrlIntoHttpsIfNeeded(url) subprocess.call(['open', '-a', a, url]) else: subprocess.call(['open', '-a', a, path]) return 0 elif args.get('--edit'): subprocess.call(['open', wf.settings_path]) return 0 elif args.get('--update'): run_in_background('update', ['/usr/bin/python', 'update.py']) return 0 # Notify user if update is available # ------------------------------------------------------------------ if wf.update_available: v = wf.cached_data('__workflow_update_status', max_age=0)['version'] log.info('Newer version ({}) is available'.format(v)) wf.add_item('Version {} is available'.format(v), 'Use `workflow:update` to install', icon=ICON_UPDATE) # Try to search git repos # ------------------------------------------------------------------ search_dirs = wf.settings.get('search_dirs', []) # Can't do anything with no directories to search if not search_dirs or wf.settings == DEFAULT_SETTINGS: wf.add_item("You haven't configured any directories to search", 'Use `reposettings` to edit your configuration', icon=ICON_WARNING) wf.send_feedback() return 0 # Load data, update if necessary if not wf.cached_data_fresh('repos', max_age=UPDATE_INTERVAL): run_in_background('update', ['/usr/bin/python', 'update.py']) repos = wf.cached_data('repos', max_age=0) # Show appropriate warning/info message if there are no repos to # show/search # ------------------------------------------------------------------ if not repos: if is_running('update'): wf.add_item('Initialising database of repos…', 'Should be done in a few seconds', icon=ICON_INFO) else: wf.add_item('No known git repos', 'Check your settings with `reposettings`', icon=ICON_WARNING) wf.send_feedback() return 0 # Check if cached data is old version # ------------------------------------------------------------------ if isinstance(repos[0], basestring): run_in_background('update', ['/usr/bin/python', 'update.py']) wf.add_item('Updating format of repos database…', 'Should be done in a few seconds', icon=ICON_INFO) wf.send_feedback() return 0 # Perform search and send results to Alfred # ------------------------------------------------------------------ # Set modifier subtitles modifier_subtitles = {} i = 2 for mod in ('cmd', 'alt', 'ctrl', 'shift', 'fn'): if not apps.get(i): modifier_subtitles[mod] = ( 'App {} not set. Use `reposettings` to set it.'.format(i)) else: modifier_subtitles[mod] = 'Open in {}'.format(join_english(apps[i])) i += 1 # Total number of repos repo_count = len(repos) if query: repos = wf.filter(query, repos, lambda t: t[0], min_score=30) log.debug('{}/{} repos matching `{}`'.format(len(repos), repo_count, query)) if not repos: wf.add_item('No matching repos found', icon=ICON_WARNING) for name, path in repos: log.debug('`{}` @ `{}`'.format(name, path)) subtitle = (path.replace(os.environ['HOME'], '~') + ' // Open in {}'.format(join_english(apps[1]))) wf.add_item(name, subtitle, modifier_subtitles=modifier_subtitles, arg=path, uid=path, valid=True, type='file', icon='icon.png') wf.send_feedback() return 0
def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if len(wf.cached_data('brew_outdated_formulae', get_outdated_formulae, max_age=3600)) > 0: wf.add_item('Some of your formulae are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if not brew_installed(): wf.add_item('Brew does not seem to be installed!', 'Hit enter to see what you need to do...', arg='open http://brew.sh/#install && exit', valid=True) wf.add_item('I trust this workflow', 'Hit enter to install brew...', arg='ruby -e "$(curl -fsSL %s)"' % BREW_INSTALL_URL, valid=True) else: # extract query query = wf.args[0] if len(wf.args) else None if query and query.startswith('install'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Install formula.', arg='brew install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('search'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Open formula on GitHub.', arg=get_open_link_command(formula), valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Uninstall formula.', arg='brew uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Open formula on GitHub.', arg=get_open_link_command(name), valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('pin'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Pin formula.', arg='brew pin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('unpin'): for formula in filter_pinned_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Unpin formula.', arg='brew unpin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('cat'): for formula in filter_all_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Display the source to this formula.', arg='brew cat %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('outdated'): for formula in filter_outdated_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Update formula.', arg='upgrade %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('info'): wf.add_item(get_info(), autocomplete='', icon=helpers.get_icon(wf, 'info')) elif query and query.startswith('commands'): for command in get_commands(wf, query): wf.add_item(command, 'Run this command.', arg='brew %s' % command, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) else: actions = brew_actions.ACTIONS if len(wf.cached_data('brew_pinned_formulae', get_pinned_formulae, max_age=3600)) > 0: actions.append({ 'name': 'Unpin', 'description': 'Unpin formula.', 'autocomplete': 'unpin ', 'arg': '', 'valid': False, }) # filter actions by query if query: actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd)
def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if not helpers.brew_installed(): helpers.brew_installation_instructions(wf) else: # extract query query = wf.args[0] if len(wf.args) else None if (not query and len( wf.cached_data('cask_outdated_casks', get_outdated_casks, max_age=3600)) > 0): wf.add_item('Some of your casks are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if query and query.startswith('install'): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Install cask', arg='brew cask install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and any(query.startswith(x) for x in ['search', 'home']): for formula in filter_all_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_casks(wf, query): name = formula.split(' ')[0] wf.add_item(formula, 'Uninstall cask', arg='brew cask uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_casks(wf, query): wf.add_item(formula, 'Open homepage', arg='brew cask home %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('outdated'): for formula in filter_outdated_casks(wf, query): name = formula.split(' ')[0] wf.add_item(formula, 'Upgrade cask', arg='brew cask upgrade %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('config'): edit_settings(wf) wf.add_item('`settings.json` has been opened.', autocomplete='', icon=helpers.get_icon(wf, 'info')) else: actions = cask_actions.ACTIONS # filter actions by query if query: actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('cask_refresh.py')] run_in_background('cask_refresh', cmd)