def test_kill(self):
     """Kill"""
     assert kill('test') is False
     cmd = ['sleep', '1']
     assert run_in_background('test', cmd) == 0
     assert is_running('test')
     assert kill('test') is True
     sleep(0.3)  # give process time to exit
     assert not is_running('test')
    def test_workflow_update_methods(self):
        """Workflow update methods"""

        def fake(wf):
            return

        Workflow().reset()
        # Initialise with outdated version
        wf = Workflow(update_settings={
            'github_slug': 'deanishe/alfred-workflow-dummy',
            'version': 'v2.0',
            'frequency': 1,
        })

        wf.run(fake)

        # Check won't have completed yet
        self.assertFalse(wf.update_available)

        # wait for background update check
        self.assertTrue(is_running('__workflow_update_check'))
        while is_running('__workflow_update_check'):
            time.sleep(0.05)
        time.sleep(1)

        # There *is* a newer version in the repo
        self.assertTrue(wf.update_available)

        # Mock out subprocess and check the correct command is run
        c = WorkflowMock()
        with c:
            self.assertTrue(wf.start_update())
        # wf.logger.debug('start_update : {}'.format(c.cmd))
        self.assertEquals(c.cmd[0], '/usr/bin/python')
        self.assertEquals(c.cmd[2], '__workflow_update_install')

        # Grab the updated release data, then reset the cache
        update_info = wf.cached_data('__workflow_update_status')

        wf.reset()

        # Initialise with latest available release
        wf = Workflow(update_settings={
            'github_slug': 'deanishe/alfred-workflow-dummy',
            'version': update_info['version'],
        })

        wf.run(fake)

        # Wait for background update check
        self.assertTrue(is_running('__workflow_update_check'))
        while is_running('__workflow_update_check'):
            time.sleep(0.05)

        # Remote version is same as the one we passed to Workflow
        self.assertFalse(wf.update_available)
        self.assertFalse(wf.start_update())
 def test_run_in_background(self):
     """Run in background"""
     cmd = ['sleep', '1']
     run_in_background('test', cmd)
     sleep(0.5)
     self.assertTrue(is_running('test'))
     self.assertTrue(os.path.exists(self._pidfile('test')))
     self.assertEqual(run_in_background('test', cmd), None)
     sleep(0.6)
     self.assertFalse(is_running('test'))
     self.assertFalse(os.path.exists(self._pidfile('test')))
 def test_run_in_background(self):
     """Run in background"""
     cmd = ['sleep', '1']
     assert run_in_background('test', cmd) == 0
     assert is_running('test')
     assert os.path.exists(_pidfile('test'))
     # Already running
     assert run_in_background('test', cmd) is None
     sleep(1.1)  # wait for job to finish
     assert not is_running('test')
     assert not os.path.exists(_pidfile('test'))
 def test_existing_process(self):
     """Existing process"""
     _write_pidfile('test', os.getpid())
     try:
         assert is_running('test')
         assert os.path.exists(_pidfile('test'))
     finally:
         _delete_pidfile('test')
Example #6
0
def main(wf):
    """Run workflow Script Filter.

    Args:
        wf (workflow.Workflow): Current Workflow object.

    """
    global ureg
    ureg = UnitRegistry(wf.decode(DEFAULT_UNIT_DEFINITIONS))
    ureg.default_format = 'P'

    wf.magic_arguments['appkey'] = open_currency_instructions

    if not len(wf.args):
        return

    query = wf.args[0]  # .lower()
    log.debug('query : %s', query)

    handle_update(wf)
    # Create data files if necessary
    bootstrap(wf)

    # Add workflow and user units to unit registry
    register_units()

    # Notify of available update
    if wf.update_available:
        wf.add_item('A newer version is available',
                    'Action this item to download & install the new version',
                    autocomplete='workflow:update',
                    icon=ICON_UPDATE)

    # Load cached data
    exchange_rates = wf.cached_data(CURRENCY_CACHE_NAME, max_age=0)

    if exchange_rates:  # Add exchange rates to conversion database
        register_exchange_rates(exchange_rates)

    if not wf.cached_data_fresh(CURRENCY_CACHE_NAME, CURRENCY_CACHE_AGE):
        # Update currency rates
        cmd = ['/usr/bin/python', wf.workflowfile('currency.py')]
        run_in_background('update', cmd)
        wf.rerun = 0.5

    if is_running('update'):
        wf.rerun = 0.5
        if exchange_rates is None:  # No data cached yet
            wf.add_item(u'Fetching exchange rates…',
                        'Currency conversions will be momentarily possible',
                        icon=ICON_INFO)
        else:
            wf.add_item(u'Updating exchange rates…',
                        icon=ICON_INFO)

    return convert(query)
Example #7
0
def generate_all_icons():
    """Callback for magic argument"""
    if background.is_running('icongen'):
        return 'Generation already in progress.'

    background.run_in_background(
        'icongen',
        ['/usr/bin/python', wf.workflowfile('icons.py')]
    )
    return 'Starting icon generation. This may take up to 15 minutes.'
Example #8
0
    def run(self, wf):
        from docopt import docopt
        self.wf = wf

        args = docopt(__usage__, argv=self.wf.args)

        self.workflows = self.wf.cached_data('workflows', None,
                                             max_age=0)

        if self.workflows:
            log.debug('%d workflows in cache', len(self.workflows))
        else:
            log.debug('0 workflows in cache')

        # Start update scripts if cached data is too old
        if not self.wf.cached_data_fresh('workflows',
                                         max_age=CACHE_MAXAGE):
            self._update()

        # Notify user if cache is being updated
        if is_running('update'):
            self.wf.add_item('Updating from Packal…',
                             'Please try again in a second or two',
                             valid=False, icon=ICON_INFO)

        if not self.workflows:
            self.wf.send_feedback()
            return 0

        self.workflows.sort(key=itemgetter('updated'), reverse=True)

        log.debug('%d workflows found in cache', len(self.workflows))

        self.query = args.get('<query>')
        self.author = args.get('<author>')
        self.bundleid = args.get('<bundleid>')

        for key in ('tags', 'categories', 'versions', 'authors'):
            if args.get(key):
                return self._two_stage_filter(key)

        if args.get('author-workflows'):
            return self.do_author_workflows()
        elif args.get('workflows'):
            return self._filter_workflows(self.workflows, self.query)
        elif args.get('update'):
            return self.do_update()
        elif args.get('open'):
            return self.do_open()
        elif args.get('status'):
            return self.do_status()
        elif args.get('ignore-author'):
            return self.do_ignore_author()
        else:
            raise ValueError('No action specified')
Example #9
0
def do_import_search(wf, url):
    """Parse URL for OpenSearch config."""
    ctx = Context(wf)
    # ICON_IMPORT = ctx.icon('import')
    ICONS_PROGRESS = [
        ctx.icon('progress-1'),
        ctx.icon('progress-2'),
        ctx.icon('progress-3'),
        ctx.icon('progress-4'),
    ]

    data = wf.cached_data('import', None, max_age=0, session=True)
    if data:
        error = data['error']
        search = data['search']
        # Clear cache data
        wf.cache_data('import', None, session=True)
        wf.cache_data('import-status', None, session=True)

        if error:
            wf.add_item(error, icon=ICON_ERROR)
            wf.send_feedback()
            return

        it = wf.add_item(u'Add "{}"'.format(search['name']),
                         u'↩ to add search',
                         valid=True,
                         icon=search['icon'])

        for k, v in search.items():
            it.setvar(k, v)

    else:
        progress = int(os.getenv('progress') or '0')
        i = progress % len(ICONS_PROGRESS)
        picon = ICONS_PROGRESS[i]
        log.debug('progress=%d, i=%d, picon=%s', progress, i, picon)
        wf.setvar('progress', progress + 1)
        if not is_running('import'):
            run_in_background('import', ['./searchio', 'fetch', url])

        status = wf.cached_data('import-status', None, max_age=0, session=True)
        title = status or u'Fetching OpenSearch Configuration …'

        wf.rerun = 0.2
        wf.add_item(title,
                    u'Results will be shown momentarily',
                    icon=picon)

    wf.send_feedback()
Example #10
0
def main(wf):
    args = Args(wf.args)

    actions = wf.cached_data('actions', None, max_age=0)

    if wf.update_available:
        # Add a notification to top of Script Filter results
        wf.add_item(u'New version available',
                    u'Action this item to install the update',
                    autocomplete='workflow:update',
                    icon=ICON_INFO)

    if not wf.cached_data_fresh('actions', max_age=CACHE_MAX_AGE):
        cmd = ['/usr/bin/python', wf.workflowfile('alfredhelp.py'), '--scan']
        run_in_background(u'scan', cmd)

    if is_running(u'scan'):
        wf.add_item(
            title=u'Scanning alfred workflows...',
            valid=False,
            icon=ICON_INFO
        )

    if args.show_keywords and actions:
        if args.query:
            actions = wf.filter(args.query, actions, key=search_key)

        for action in actions:
            argument = action.keyword
            if action.add_space:
                argument += u' '
            wf.add_item(
                title=u'{keyword} - {title}'.format(keyword=action.keyword, title=action.title),
                subtitle=action.subtitle,
                icon=action.icon,
                arg=argument,
                valid=True
            )

    elif args.scan:
        def get_posts():
            return scan(path.join(wf.alfred_env['preferences'], 'workflows'))

        wf.cached_data('actions', get_posts, max_age=CACHE_MAX_AGE)
        scan(path.join(wf.alfred_env['preferences'], 'workflows'))

    wf.send_feedback()
    return 0
Example #11
0
def main(workflow):
    parser = argparse.ArgumentParser()
    parser.add_argument("--set-token", dest="api_token", nargs="?", default=None)
    parser.add_argument("query", nargs="?", default=None)
    arguments = parser.parse_args(workflow.args)

    if arguments.api_token:
        workflow.save_password("hipchat_api_token", arguments.api_token)
        return 0

    try:
        api_token = workflow.get_password("hipchat_api_token")
    except PasswordNotFound:
        workflow.add_item(
            "No API key set.", "Please use hcsettoken to set your Hipchat API token.", valid=False, icon=ICON_WARNING
        )
        workflow.send_feedback()

        return 0

    users = workflow.cached_data("users", None, max_age=0)

    if not workflow.cached_data_fresh("users", max_age=60):  # 60s
        cmd = ["/usr/bin/python", workflow.workflowfile("update.py")]
        run_in_background("update", cmd)

    if is_running("update"):
        logger.debug("updating users")

    if arguments.query and users:
        users = workflow.filter(arguments.query, users, key=search_terms_for_user, min_score=20)

    if not users:
        workflow.add_item("Whoops, no users found", icon=ICON_WARNING)
        workflow.send_feedback()

        return 0

    for user in users:
        status_icon = get_status_icon(user["presence"]["show"] if user["presence"] else None)
        workflow.add_item(
            user["name"], user["email"], arg=actionTemplate.format(user["mention_name"]), valid=True, icon=status_icon
        )

    workflow.send_feedback()
    logger.debug("returned {} results".format(len(users)))
Example #12
0
def output_query_vault_results(ap):
    """
    A simple helper function to manage outputting LastPass vault items to an
    Alfred Script Filter. Uses an ArgParser instance to figure out which
    command and argument to use.
    """
    # Notify the user if the cache is being updated:
    if is_running('update'):
        log.debug('Currenly running update; notifying user...')
        wf.add_item(
            'Getting new data from LastPass.',
            'This should only take a few moments, so hang tight.',
            valid=False,
            icon='icons/loading.png',
            uid='1'
        )

    results = util.search_vault_for_query(ap.arg)
    if results:
        for result in results:
            wf.add_item(
                result['hostname'],
                'TAB to explore; ' +
                'ENTER to copy password; ' +
                '\u2318-Click to copy username; ' +
                'Shift-Click to open URL',
                modifier_subtitles={
                    'cmd': '\u2318-Click to copy username.',
                    'shift': 'Shift-Click to open the URL.'
                },
                valid=True,
                arg='{} {}***{}'.format(ap.command,
                                        result['hostname'],
                                        result['url']),
                autocomplete='view-details {}'.format(result['hostname']),
            )
    else:
        wf.add_item(
            'No items matching "{}".'.format(ap.arg),
            'View the `lpvs` debug log for more information.',
            valid=False,
            icon='icons/warning.png'
        )

    wf.send_feedback()
    return
    def run(self, wf):
        """Run workflow."""
        self.wf = wf
        wf.args  # check for magic args
        self.keyword = self.wf.settings.get('keyword', DEFAULT_KEYWORD)
        args = docopt(__doc__)
        log.debug(u'args : %r', args)

        # Open Help file
        if args.get('--helpfile'):
            return self.do_open_help_file()

        # Perform search
        self.query = wf.decode(args.get('<query>') or '')

        # List Smart Folders with custom keywords
        if args.get('--config'):
            return self.do_configure_folders()

        # Was a configured folder passed?
        folder = wf.decode(args.get('--folder') or '')

        # Get list of Smart Folders. Update in background if necessary.
        self.folders = self.wf.cached_data('folders', max_age=0)
        if self.folders is None:
            self.folders = []

        # Update folder list if it's old
        if not self.wf.cached_data_fresh('folders', CACHE_AGE_FOLDERS):
            log.debug('updating list of Smart Folders in background...')
            run_in_background('folders',
                              ['/usr/bin/python',
                               self.wf.workflowfile('cache.py')])

        if is_running('folders'):
            self.wf.rerun = 0.5

        # Has a specific folder been specified?
        if folder:
            return self.do_search_in_folder(folder)

        return self.do_search_folders()
def build_wf_entry(wf):

    if is_running('bg'):
        """Update status"""
        phase = wf.stored_data('phase')
        log.info('PHASE: ', phase)
        if phase != 'done':
            wf.rerun = 0.5
        if phase == 'downloading':
            pct = None
            while pct is None:
                try:
                    pct = wf.stored_data('download_percent')
                except:
                    pass

            progress = wf.stored_data('download_progress')
            file = wf.stored_data('download_file')

            # wf.rerun = 0.5

            title = "Downloading {} [{}]".format(file, progress)
            subtitle = string_from_percent(pct) + " " + str(pct) + "%"
            wf.add_item(title, subtitle=subtitle)

        if phase == 'processing':

            try:
                emoji_count = wf.stored_data('emoji_count')
                subtitle = "Parsed {} emoji".format(emoji_count)
            except:
                subtitle = "Parsed ... emoji"
                pass

            title = 'Parsing Emoji'
            wf.add_item(title, subtitle=subtitle)

    else:
        """Last case"""
        wf.add_item("Complete", subtitle='Emoji searching is now ready to use',
                    icon="images/Checkmark.png")
Example #15
0
def main(wf):
    """Run the workflow."""
    from docopt import docopt

    # Handle arguments
    # ------------------------------------------------------------------
    args = docopt(__doc__, wf.args)

    log.debug('args: {}'.format(args))

    query = args.get('<query>')
    path = args.get('<path>')
    appnum = args.get('<appnum>')
    if appnum:
        appnum = int(appnum)

    apps = {}
    for i in range(1, 7):
        app = wf.settings.get('app_{}'.format(i))
        if isinstance(app, list):
            app = app[:]
        apps[i] = app

    if not apps.get(1):  # Things will break if this isn't set
        apps[1] = 'Finder'

    # Alternate actions
    # ------------------------------------------------------------------
    if appnum and path:
        app = apps.get(appnum)
        if app is None:
            print('App {} not set. Use `reposettings`'.format(appnum))
            return 0
        else:
            if not isinstance(app, list):
                app = [app]
            for a in app:
                if a in BROWSERS:
                    url = subprocess.check_output(
                        ['git', 'config', 'remote.origin.url'],
                        cwd=path
                    )
                    url = re.sub(r'https://.+@', 'https://', url).strip()
                    url = convertSSHUrlIntoHttpsIfNeeded(url)

                    subprocess.call(['open', '-a', a, url])

                else:
                    subprocess.call(['open', '-a', a, path])
            return 0

    elif args.get('--edit'):
        subprocess.call(['open', wf.settings_path])
        return 0

    elif args.get('--update'):
        run_in_background('update', ['/usr/bin/python', 'update.py'])
        return 0

    # Notify user if update is available
    # ------------------------------------------------------------------
    if wf.update_available:
        v = wf.cached_data('__workflow_update_status', max_age=0)['version']
        log.info('Newer version ({}) is available'.format(v))
        wf.add_item('Version {} is available'.format(v),
                    'Use `workflow:update` to install',
                    icon=ICON_UPDATE)

    # Try to search git repos
    # ------------------------------------------------------------------
    search_dirs = wf.settings.get('search_dirs', [])

    # Can't do anything with no directories to search
    if not search_dirs or wf.settings == DEFAULT_SETTINGS:
        wf.add_item("You haven't configured any directories to search",
                    'Use `reposettings` to edit your configuration',
                    icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    # Load data, update if necessary
    if not wf.cached_data_fresh('repos', max_age=UPDATE_INTERVAL):
        run_in_background('update', ['/usr/bin/python', 'update.py'])

    repos = wf.cached_data('repos', max_age=0)

    # Show appropriate warning/info message if there are no repos to
    # show/search
    # ------------------------------------------------------------------
    if not repos:
        if is_running('update'):
            wf.add_item('Initialising database of repos…',
                        'Should be done in a few seconds',
                        icon=ICON_INFO)
        else:
            wf.add_item('No known git repos',
                        'Check your settings with `reposettings`',
                        icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    # Check if cached data is old version
    # ------------------------------------------------------------------
    if isinstance(repos[0], basestring):
        run_in_background('update', ['/usr/bin/python', 'update.py'])
        wf.add_item('Updating format of repos database…',
                    'Should be done in a few seconds',
                    icon=ICON_INFO)
        wf.send_feedback()
        return 0

    # Perform search and send results to Alfred
    # ------------------------------------------------------------------

    # Set modifier subtitles
    modifier_subtitles = {}
    i = 2
    for mod in ('cmd', 'alt', 'ctrl', 'shift', 'fn'):
        if not apps.get(i):
            modifier_subtitles[mod] = (
                'App {} not set. Use `reposettings` to set it.'.format(i))
        else:
            modifier_subtitles[mod] = 'Open in {}'.format(join_english(apps[i]))
        i += 1

    # Total number of repos
    repo_count = len(repos)

    if query:
        repos = wf.filter(query, repos,
                          lambda t: t[0],
                          min_score=30)
        log.debug('{}/{} repos matching `{}`'.format(len(repos),
                                                     repo_count,
                                                     query))

    if not repos:
        wf.add_item('No matching repos found', icon=ICON_WARNING)

    for name, path in repos:
        log.debug('`{}` @ `{}`'.format(name, path))
        subtitle = (path.replace(os.environ['HOME'], '~') +
                    '  //  Open in {}'.format(join_english(apps[1])))
        wf.add_item(name,
                    subtitle,
                    modifier_subtitles=modifier_subtitles,
                    arg=path,
                    uid=path,
                    valid=True,
                    type='file',
                    icon='icon.png')

    wf.send_feedback()
    return 0
Example #16
0
def main(wf):
    parser = argparse.ArgumentParser()
    parser.add_argument("-i", "--installed", action="store_true")
    parser.add_argument("-a", "--all", action="store_true")
    parser.add_argument("-d", "--dlc", action="store_true")
    parser.add_argument("-u", "--user", action="store", type=str, default=None)
    if sys.stdin.isatty():
        (options, query) = parser.parse_known_args(wf.args)
    else:
        (options, query) = parser.parse_known_args(shlex.split(wf.args[0]))

    if options.user:
        wf.settings['steam_user'] = options.user

    if not wf.settings.get('steam_user', None):
        wf.add_item('No steam username set',
                    'Use -u [username] to set your username!',
                    valid=False,
                    icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    query = " ".join(map(lambda x: pipes.quote(x), query))

    if not wf.cached_data_fresh('games_%s' % wf.settings.get('steam_user'), 12 * 60 * 60):
        run_in_background('update', ['/usr/bin/python', wf.workflowfile('background.py')])

    if is_running('update'):
        wf.add_item('Updating Steam games...', icon=ICON_INFO)

    games = wf.cached_data('games_%s' % wf.settings.get('steam_user'), None, max_age=0)

    if games:
        if query:
            games = wf.filter(query, games, key=lambda x: x['name'])

        if not options.dlc:
            games = filter(lambda x: 'dlc' not in x or not x['dlc'], games)

        if not options.all:
            games = filter(lambda x: 'mac' not in x or x['mac'], games)

        if options.installed:
            # This will check the primary steam install location
            # Need to figure out how to parse the libraryfolders.vdf filter to get
            # additional locations.  Appear to be '\t"\d+"\t+"[PATH TO FOLDER]"' lines...
            games = filter(lambda x: exists(expanduser("%s/appmanifest_%s.acf" % (DEFAULT_STEAM_LIBRARY, x['id']))),
                           games)

        for game in games:
                icon = wf.cachefile("icon_%s.png" % game['id'])
                wf.add_item(game['name'],
                            uid=game['id'],
                            valid=True,
                            arg=game['id'],
                            icon=icon if exists(icon) else None)

        if not games:
            wf.add_item('No %smatches%s' % ('installed ' if options.installed else '',
                                            ' for %s' % query if query else ''),
                        'Try searching with --all for all games or --dlc to see DLC',
                        icon=ICON_INFO)
    else:
        if not is_running('update'):
            wf.add_item('Unable to retrieve your games from Steam',
                        'Is your custom URL really "%s"?' % wf.settings.get('steam_user'))
    wf.send_feedback()
    return 0
Example #17
0
def refresh_list():  # pragma: no cover
    if not is_running('pocket_refresh'):
        cmd = ['/usr/bin/python', WF.workflowfile('pocket_refresh.py')]
        run_in_background('pocket_refresh', cmd)
def main(wf):

    ####################################################################
    # Get init data
    ####################################################################
    parser = argparse.ArgumentParser()
    parser.add_argument('query', nargs='?', default=None)
    args = parser.parse_args(wf.args)

    password = util.getPassword(wf)
    url = util.getURL(wf)

    ####################################################################
    # Fetch all data in background if the query is empty
    ####################################################################
    if args.query == None:
        if not is_running('update'):
            cmd = ['/usr/bin/python', wf.workflowfile('update_data.py')]
            run_in_background('update', cmd)

    data = util.getData(wf, 'light')

    def search_key_for_post(post):
        """Generate a string search key for a post"""
        item = data[post]

        elements = []
        elements.append(item['name'])  # title of post
        elements.append(item['friendly_name'])
        elements.append(item['entity_id'])

        return u' '.join(elements)

    def wrapper():
        return data

    posts = wf.cached_data('allLights', wrapper, max_age=1)

    # If script was passed a query, use it to filter posts
    if args.query and data:
        posts = wf.filter(args.query,
                          data,
                          key=search_key_for_post,
                          min_score=20)

    if not posts:  # we have no data to show, so show a warning and stop
        wf.add_item('No posts found', icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    if wf.update_available:
        # Add a notification to top of Script Filter results
        wf.add_item('New version available',
                    'Action this item to install the update',
                    autocomplete='workflow:update',
                    icon=ICON_INFO)

    # Loop through the returned posts and add an item for each to
    # the list of results for Alfred
    #for post in posts:

    for post in posts:
        #sys.stderr.write("post : " + str(post) + '\n')
        item = data[post]
        subtitle = ''

        if item['state'] != 'unavailable':

            if item['state'] == 'on':
                ICON = icon.getIcon('light-on', 'w')
                subtitle = '<Enter> to turn OFF light'
            else:
                ICON = icon.getIcon('light-off', 'b')
                subtitle = '<Enter> to turn ON light'

            wf.add_item(
                title=item['friendly_name'],
                subtitle=subtitle,
                valid=True,
                arg=item['entity_id'],
                #arg='https://browall.duckdns.org:8123/api/services/automation/trigger?api_password=DrumNBass1111',
                icon=ICON)

    # Send the results to Alfred as XML
    wf.send_feedback()
    return 0
Example #19
0
def filter(args):
    wf = workflow()
    prefs = Preferences.current_prefs()
    command = args[1] if len(args) > 1 else None
    duration_info = _duration_info(prefs.upcoming_duration)

    if command == 'duration':
        selected_duration = prefs.upcoming_duration

        # Apply selected duration option
        if len(args) > 2:
            try:
                selected_duration = int(args[2])
            except:
                pass

        duration_info = _duration_info(selected_duration)

        if 'custom' in duration_info:
            wf.add_item(duration_info['label'], duration_info['subtitle'], arg='-upcoming duration %d' % (duration_info['days']), valid=True, icon=icons.RADIO_SELECTED if duration_info['days'] == selected_duration else icons.RADIO)

        for duration_info in _durations:
            wf.add_item(duration_info['label'], duration_info['subtitle'], arg='-upcoming duration %d' % (duration_info['days']), valid=True, icon=icons.RADIO_SELECTED if duration_info['days'] == selected_duration else icons.RADIO)

        wf.add_item('Back', autocomplete='-upcoming ', icon=icons.BACK)

        return

    # Force a sync if not done recently or join if already running
    if not prefs.last_sync or \
       datetime.now() - prefs.last_sync > timedelta(seconds=30) or \
       is_running('sync'):
        sync()

    wf.add_item(duration_info['label'], subtitle='Change the duration for upcoming tasks', autocomplete='-upcoming duration ', icon=icons.UPCOMING)

    conditions = True

    # Build task title query based on the args
    for arg in args[1:]:
        if len(arg) > 1:
            conditions = conditions & (Task.title.contains(arg) | List.title.contains(arg))

    if conditions is None:
        conditions = True

    tasks = Task.select().join(List).where(
        Task.completed_at.is_null() &
        (Task.due_date < date.today() + timedelta(days=duration_info['days'] + 1)) &
        (Task.due_date > date.today() + timedelta(days=1)) &
        Task.list.is_null(False) &
        conditions
    )\
        .join(Reminder, JOIN.LEFT_OUTER, on=Reminder.task == Task.id)\
        .order_by(Task.due_date.asc(), Reminder.date.asc(), Task.order.asc())

    try:
        for t in tasks:
            wf.add_item(u'%s – %s' % (t.list_title, t.title), t.subtitle(), autocomplete='-task %s ' % t.id, icon=icons.TASK_COMPLETED if t.completed else icons.TASK)
    except OperationalError:
        background_sync()

    wf.add_item('Main menu', autocomplete='', icon=icons.BACK)

    # Make sure tasks stay up-to-date
    background_sync_if_necessary(seconds=2)
def sync(background=False):
    from wunderlist.models import base, root, list, task, user, hashtag, reminder
    from peewee import OperationalError

    # If a sync is already running, wait for it to finish. Otherwise, store
    # the current pid in alfred-workflow's pid cache file
    if not background:
        if is_running('sync'):
            wait_count = 0
            while is_running('sync'):
                time.sleep(.25)
                wait_count += 1

                if wait_count == 2:
                    notify('Please wait...', 'The workflow is making sure your tasks are up-to-date')

            return False

        pidfile = workflow().cachefile('sync.pid')

        with open(pidfile, 'wb') as file_obj:
            file_obj.write('{0}'.format(os.getpid()))

    Preferences.current_prefs().last_sync = datetime.now()

    base.BaseModel._meta.database.create_tables([
        root.Root,
        list.List,
        task.Task,
        user.User,
        hashtag.Hashtag,
        reminder.Reminder
    ], safe=True)

    # Perform a query that requires the latest schema; if it fails due to a
    # mismatched scheme, delete the old database and re-sync
    try:
        task.Task.select().where(task.Task.recurrence_count > 0).count()
        hashtag.Hashtag.select().where(hashtag.Hashtag.tag == '').count()
    except OperationalError:
        base.BaseModel._meta.database.close()
        workflow().clear_data(lambda f: 'wunderlist.db' in f)

        # Make sure that this sync does not try to wait until its own process
        # finishes
        sync(background=True)
        return

    first_sync = False

    try:
        root.Root.get()
    except root.Root.DoesNotExist:
        first_sync = True

    root.Root.sync(background=background)

    if background:
        if first_sync:
            notify('Initial sync has completed', 'All of your tasks are now available for browsing')

        # If executed manually, this will pass on to the post notification action
        print 'Sync completed successfully'

    return True
    def do_search_in_folder(self, folder):
        """List/search contents of a specific Smart Folder.

        Sends results to Alfred.

        :param folder: name or path of Smart Folder
        :type folder: ``unicode``

        """
        log.info(u'searching folder "%s" for "%s" ...', folder, self.query)
        files = []
        folder_path = None
        for name, path in self.folders:
            if path == folder:
                folder_path = path
                break
            elif name == folder:
                folder_path = path
                break

        else:
            return self._terminate_with_error(
                u"Unknown folder '{}'".format(folder),
                'Check your configuration with `smartfolders`')

        # Get contents of folder; update if necessary
        key = cache_key(folder_path)
        files = self.wf.cached_data(key, max_age=0)
        if files is None:
            files = []

        if not self.wf.cached_data_fresh(key, CACHE_AGE_CONTENTS):
            run_in_background(key,
                              ['/usr/bin/python',
                               self.wf.workflowfile('cache.py'),
                               '--folder', folder_path])
        if is_running(key):
            self.wf.rerun = 0.5

        if self.query:
            files = self.wf.filter(self.query, files,
                                   key=os.path.basename,
                                   min_score=10)

        if not files:
            if not self.query:
                self._add_message('Empty Smart Folder', icon=ICON_WARNING)
            else:
                self._add_message('No matching results',
                                  'Try a different query',
                                  icon=ICON_WARNING)
        else:
            for i, path in enumerate(files):
                title = os.path.basename(path)
                subtitle = path.replace(os.getenv('HOME'), '~')
                self.wf.add_item(title, subtitle,
                                 uid=path,
                                 arg=path,
                                 valid=True,
                                 icon=path,
                                 icontype='fileicon',
                                 type='file')

                if (i + 1) == MAX_RESULTS:
                    break

        self.wf.send_feedback()
Example #22
0
def main(wf):
    # build argument parser to parse script args and collect their
    # values
    parser = argparse.ArgumentParser()
    # add an optional (nargs='?') --setkey argument and save its
    # value to 'apikey' (dest). This will be called from a separate "Run Script"
    # action with the API key
    parser.add_argument('--setkey', dest='apikey', nargs='?', default=None)
    parser.add_argument('--seturl', dest='apiurl', nargs='?', default=None)
    parser.add_argument('query', nargs='?', default=None)
    # parse the script's arguments
    args = parser.parse_args(wf.args)

    ####################################################################
    # Save the provided API key or URL
    ####################################################################

    # decide what to do based on arguments
    if args.apikey:  # Script was passed an API key
        log.info("Setting API Key")
        wf.save_password('gitlab_api_key', args.apikey)
        return 0  # 0 means script exited cleanly

    if args.apiurl:
        log.info("Setting API URL to {url}".format(url=args.apiurl))
        wf.settings['api_url'] = args.apiurl
        return 0

    ####################################################################
    # Check that we have an API key saved
    ####################################################################

    try:
        wf.get_password('gitlab_api_key')
    except PasswordNotFound:  # API key has not yet been set
        wf.add_item('No API key set.',
                    'Please use glsetkey to set your GitLab API key.',
                    valid=False,
                    icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    ####################################################################
    # View/filter GitLab Projects
    ####################################################################

    query = args.query

    projects = wf.cached_data('projects', None, max_age=0)

    if wf.update_available:
        # Add a notification to top of Script Filter results
        wf.add_item('New version available',
                    'Action this item to install the update',
                    autocomplete='workflow:update',
                    icon=ICON_INFO)

    # Notify the user if the cache is being updated
    if is_running('update') and not projects:
        wf.rerun = 0.5
        wf.add_item(
            'Updating project list via GitLab...',
            subtitle=
            u'This can take some time if you have a large number of projects.',
            valid=False,
            icon=ICON_INFO)

    # Start update script if cached data is too old (or doesn't exist)
    if not wf.cached_data_fresh('projects',
                                max_age=3600) and not is_running('update'):
        cmd = ['/usr/bin/python', wf.workflowfile('update.py')]
        run_in_background('update', cmd)
        wf.rerun = 0.5

    # If script was passed a query, use it to filter projects
    if query and projects:
        projects = wf.filter(query,
                             projects,
                             key=search_for_project,
                             min_score=20)

    if not projects:  # we have no data to show, so show a warning and stop
        wf.add_item('No projects found', icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    # Loop through the returned posts and add an item for each to
    # the list of results for Alfred
    for project in projects:
        wf.add_item(title=project['name_with_namespace'],
                    subtitle=project['path_with_namespace'],
                    arg=project['web_url'],
                    valid=True,
                    icon=None)

    # Send the results to Alfred as XML
    wf.send_feedback()
Example #23
0
def main(wf):
    """Run workflow."""
    from docopt import docopt
    args = docopt(__doc__, wf.args)

    log.debug('args : %r', args)

    # Run Script actions
    # ------------------------------------------------------------------

    if args.get('--post'):
        open_url(os.getenv('post_url'))
        return

    if args.get('--comments'):
        open_url(os.getenv('comments_url'))
        return

    if args.get('--subreddit'):
        remember_subreddit()
        open_url(os.getenv('subreddit_url'))
        return

    if args.get('--submit'):
        open_url(os.getenv('subreddit_url') + 'submit')
        return

    ####################################################################
    # Background tasks
    ####################################################################

    # Update cached list of top subreddits
    if args.get('--update'):
        log.info('updating list of top subreddits ...')
        update_top_subreddits()
        log.info('updated list of top subreddits.')
        return

    # Search using API and cache results
    if args.get('--search'):
        name = wf.decode(args.get('--search'))
        nsfw = 'nsfw-' if NSFW else ''
        key = '--search-{}{}'.format(nsfw, cache_key(name))
        log.info('searching API for %r ...', name)
        subreddits = search_subreddits(name)
        wf.cache_data(key, subreddits)
        log.info('API returned %d subreddit(s) for %r', len(subreddits), name)
        # Tidy up cache in a background task to keep things snappy
        clear_cache()
        return

    # Update cached list of top subreddits
    if not is_running('top') and \
            not wf.cached_data_fresh('__top', TOP_CACHE_MAX_AGE):
        run_in_background('top', ['/usr/bin/python', 'reddit.py', '--update'])

    ####################################################################
    # Script Filter
    ####################################################################

    # Workflow updates
    # ------------------------------------------------------------------
    if wf.update_available:
        wf.add_item('A newer version is available',
                    '↩ to install update',
                    autocomplete='workflow:update',
                    icon=ICON_UPDATE)

    # Show popular subreddits
    # ------------------------------------------------------------------
    query = args.get('<query>')
    log.debug('query=%r', query)

    if query == '':
        return show_top()

    # Show subreddit or posts
    # ------------------------------------------------------------------

    name, slash, query = parse_query(query)
    if not name:
        wf.add_item('Invalid query',
                    'Try a different query',
                    icon=ICON_WARNING)
        wf.send_feedback()
        return 0

    # Search for matching subreddit
    # ------------------------------------------------------------------
    if not slash:
        return show_search(name)

    # Browse/search within subreddit
    # ------------------------------------------------------------------
    return show_posts(name, query)
Example #24
0
def main(wf):
    """Run workflow Script Filter.

    Args:
        wf (workflow.Workflow): Current Workflow object.

    Returns:
        int: Exit status.
    """
    if not len(wf.args):
        return 1
    query = wf.args[0]  # .lower()
    log.debug('query : %s', query)

    # Add workflow and user units to unit registry
    register_units()

    # Notify of available update
    if wf.update_available:
        wf.add_item('A newer version is available',
                    'Action this item to download & install the new version',
                    autocomplete='workflow:update',
                    icon=ICON_UPDATE)

    # Load cached data
    exchange_rates = wf.cached_data(CURRENCY_CACHE_NAME, max_age=0)

    if exchange_rates:  # Add exchange rates to conversion database
        register_exchange_rates(exchange_rates)

    if not wf.cached_data_fresh(CURRENCY_CACHE_NAME, CURRENCY_CACHE_AGE):
        # Update currency rates
        cmd = ['/usr/bin/python', wf.workflowfile('currency.py')]
        run_in_background('update', cmd)

    if is_running('update'):
        if exchange_rates is None:  # No data cached yet
            wf.add_item('Fetching exchange rates…',
                        'Currency conversions will be momentarily possible',
                        icon=ICON_INFO)
        else:
            wf.add_item('Updating exchange rates…',
                        icon=ICON_INFO)

    error = None
    conversion = None

    try:
        conversion = convert(query,
                             decimal_places=wf.settings.get('decimal_places',
                                                            2))
    except UndefinedUnitError as err:
        log.critical('Unknown unit : %s', err.unit_names)
        error = 'Unknown unit : {0}'.format(err.unit_names)

    except DimensionalityError as err:
        log.critical('Invalid conversion : %s', err)
        error = "Can't convert from {0} {1} to {2} {3}".format(
            err.units1, err.dim1, err.units2, err.dim2)

    except ValueError as err:
        log.critical('Invalid query : %s', err)
        error = err.message

    except Exception as err:
        log.exception('%s : %s', err.__class__, err)
        error = err.message

    if not error and not conversion:
        error = 'Conversion input not understood'

    if error:  # Show error
        wf.add_item(error,
                    'For example: 2.5cm in  |  178lb kg  |  200m/s mph',
                    valid=False, icon=ICON_WARNING)
    else:  # Show result
        wf.add_item(conversion,
                    valid=True,
                    arg=conversion,
                    copytext=conversion,
                    largetext=conversion,
                    icon='icon.png')

    wf.send_feedback()
    log.debug('finished')
    return 0
 def test_no_pidfile(self):
     """No PID file for non-existent job"""
     assert not is_running('boomstick')
Example #26
0
 def test_non_existent_process(self):
     """Non-existent process"""
     _write_pidfile('test', 9999999)
     assert not is_running('test')
     assert not os.path.exists(_pidfile('test'))
Example #27
0
def main(wf):


    from query_exchange   import query_exchange_server
    from query_google    import query_google_calendar

    import pytz
    from pytz import timezone
    from datetime import timedelta, datetime
    from settings import get_value_from_settings_with_default_boolean, get_value_from_settings_with_default_int
    import time



    # Check to see if updates are available
    if wf.update_available:

        wf.add_item('A newer version is available',
                '↩ to install update',
                icon='update-available.png', arg='update', valid=True)

    # Parse and log the query variable
    query = None
    if len(wf.args):
        query = wf.args[0]
    log.debug('query : {!r}'.format(query))

    # Get date offset
    args = wf.args
    date_offset = 0
    if len(args) > 0:
        date_offset = int(args[0])


    #Start calculating elapsed time - displayed in results page at end
    action_start_time = time.time()

    # Find out cache time
    cache_time = get_value_from_settings_with_default_int(wf, 'cache_time', 9000)

    morning = timezone("US/Eastern").localize(datetime.today().replace(hour=0, minute=0, second=1) + timedelta(days=date_offset))
    night = timezone("US/Eastern").localize(datetime.today().replace(hour=23, minute=59, second=59) + timedelta(days=date_offset))

    # Outlook needs a different time format than google it would appear
    start_outlook = morning.astimezone(pytz.utc)
    end_outlook   = night.astimezone(pytz.utc)
    start_google  = morning.astimezone(pytz.utc).isoformat()
    stop_google   = night.astimezone(pytz.utc).isoformat()

    log.info("%s\t\t\t%s",start_google, stop_google)

    def google_wrapper():
        """A wrapper around doing a google query so this can be used with a cache function"""
        return query_google_calendar(wf, start_google, stop_google, date_offset)

    def exchange_wrapper():
        """Wrapper around outlook query so can be used with caching"""
        return query_exchange_server(wf,start_outlook, end_outlook, date_offset)



    # Format date text for displays
    date_text = night.strftime("%A %B %d, %Y")
    date_text_numeric = night.strftime("%m/%d/%y")

    # Build Cache Keys
    exchange_cache_key = get_cache_key('exchange', date_offset)
    google_cache_key  = get_cache_key('google', date_offset)
    log.debug("-- FG: CacheKey (Google)   " + google_cache_key)
    log.debug("-- FG: CacheKey (Exchange) " + exchange_cache_key)


    # Check which calendars to use from settings
    use_exchange = get_value_from_settings_with_default_boolean(wf, 'use_exchange', False)
    use_google   = get_value_from_settings_with_default_boolean(wf, 'use_google', False)

    if not use_google and not use_exchange:
        wf.add_item('Calendars are disabled','use the tc command to setup a calendar', icon=ICON_INFO, arg="tc")
        wf.send_feedback()
        return




    log.debug("Max Age: %i  Cache Age Google:  %i   Exchange: %i",
              cache_time,
              wf.cached_data_age(google_cache_key),
              wf.cached_data_age(exchange_cache_key)
              )
    # Check cache status
    google_fresh   = wf.cached_data_fresh(google_cache_key, max_age=cache_time)
    exchange_fresh = wf.cached_data_fresh(exchange_cache_key, max_age=cache_time)



    # Determine whether cache data is being shown or "live" data
    showing_cached_data = True

    if use_google:
        showing_cached_data &= google_fresh

    if use_exchange:
        showing_cached_data &= exchange_fresh


    event_count = 0
    error_state = False


    log.debug('--FG:   Use Exchange:' + str(use_exchange))
    log.debug('--FG: Exchange Fresh:' + str(exchange_fresh))
    if use_exchange:

        # If the cache is fresh we need to do a bg refresh - because who knows what has happened
        # If the cache is stale then directly query the exchange server

        if exchange_fresh:
            log.debug('--FG: Loading Exchange events from Cache')

            #Extract the cached events
            exchange_events = wf.cached_data(exchange_cache_key, max_age=0)

            log.debug(str(exchange_events))

            # Run update in the background
            if not is_running('update_exchange'):
                cmd = ['/usr/bin/python',
                       wf.workflowfile('query_exchange.py'),
                       start_outlook.strftime("%Y-%m-%d-%H:%M:%S"),
                       end_outlook.strftime("%Y-%m-%d-%H:%M:%S"),
                       str(date_offset)]

                log.debug('--FG: Launching background exchange update')
                # Fire off in the background the script to update things! :)
                run_in_background('update_exchange', cmd)
            else:
                log.debug('--FG: Background exchange update already running')

        else:
            log.debug('--FG: Directly querying Exchange')

            # Directly query the exchange server
            exchange_events = wf.cached_data(exchange_cache_key, exchange_wrapper, max_age=cache_time)

        if exchange_events is None:
            log.debug('--FG: Exchange Events returned NONE!!!')
            error_state = True

            wf.add_item('Unable to connect to exchange server', 'Check your connectivity or NTLM auth settings', icon='img/disclaimer.png')
            exchange_events = []
        else:
            event_count += len(exchange_events)

    else:
        exchange_events = []

    if use_google:
        # check for any enabled calendars
        no_google_calendars = True
        for key in wf.settings:
            if 'calendar' in key:
                no_google_calendars = False

        if no_google_calendars:
            wf.add_item('Not showing any Google Calendars', 'use the tcgc command to select calendars')

        # If the cache is "fresh" we need to do a bg refresh - because we are loading from the cache
        # If the cache isnt fresh - the server will be queried directly anyways
        if google_fresh:

            # Extract cached events
            google_events = wf.cached_data(google_cache_key, max_age=0)

            # Run update in background
            if not is_running('update_google'):
                cmd = ['/usr/bin/python',
                       wf.workflowfile('query_google.py'),
                       start_google,
                       stop_google,
                       str(date_offset)]

                # Fire off in the background the script to update things! :)
                run_in_background('update_google', cmd)
        else:

            # Directly run event update - ignore background stuff
            google_events = wf.cached_data(google_cache_key, google_wrapper, max_age=cache_time)

        if google_events is None:

            error_state = True

            import httplib
            conn = httplib.HTTPConnection("www.google.com")
            try:
                conn.request("HEAD", "/")
                wf.add_item('Unable to connect to Google', 'Authorization or Connection error - use tc to reauthorize',
                            icon='img/disclaimer.png')
            except Exception as ex:
                wf.logger.info("Unable to connect to google")
                template = "An exception of type {0} occured. Arguments:\n{1!r}"
                message = template.format(type(ex).__name__, ex.args)
                wf.logger.info(message)
                import traceback
                wf.logger.info(traceback.format_exc())
                wf.add_item('Unable to connect to Google', 'Check your internet connection or proxy settings',
                            icon='img/disclaimer.png')

            google_events = []
        else:

            for e in google_events:
                wf.logger.debug(' '.join(['**FG --- Google:', str(e.get(u'start').get(u'dateTime', 'All Day')),
                                          e.get('summary', 'NoTitle')]))

            event_count += len(google_events)
    else:
        google_events = []




    # Build Header
    icon_file = 'img/date_span.png'

    if use_exchange and use_google:
        icon_file = 'img/iconBoth.png'
    elif use_exchange:
        icon_file = 'img/iconOutlook.png'
    elif use_google:
        icon_file = 'img/iconGoogle.png'

    # Fire off some log data
    log.info("Event Count   Google: " + str(len(google_events)))
    log.info("Event Count Exchange: " + str(len(exchange_events)))
    log.info("Event Count    Total: " + str(event_count))


    if event_count == 0:
        if error_state is False:
            wf.add_item('Calendar is empty', date_text, icon=icon_file)
        wf.send_feedback()
        return

    first_menu_entry = wf.add_item(date_text, date_text_numeric, icon=icon_file)

    # Process events
    EventProcessor(wf).process_events(exchange_events, google_events)


    # Update elapsed time counter
    action_elapsed_time = time.time() - action_start_time


    if showing_cached_data:
        first_menu_entry.subtitle += " - Cached Data"
    else:
        first_menu_entry.subtitle += " query time: " + "{:.1f}".format(
            action_elapsed_time) + " seconds"

    wf.send_feedback()
def refresh(wf):
    if not is_running('hackernews_refresh'):
        cmd = ['/usr/bin/python', wf.workflowfile('hackernews_refresh.py')]
        run_in_background('hackernews_refresh', cmd)
Example #29
0
def main(wf):
    # Workflow requires a query
    query = wf.args[0]

    index_exists = True

    # Create index if it doesn't exist
    if not os.path.exists(INDEX_DB):
        index_exists = False
        run_in_background('indexer', ['/usr/bin/python', 'index.py'])

    # Can't search without an index. Inform user and exit
    if not index_exists:
        wf.add_item('Creating search index…', 'Please wait a moment',
                    icon=ICON_INFO)
        wf.send_feedback()
        return

    # Inform user of update in case they're looking for something
    # recently added (and it isn't there)
    if is_running('indexer'):
        wf.add_item('Updating search index…',
                    'Fresher results will be available shortly',
                    icon=ICON_INFO)

    # Search!
    start = time()
    db = sqlite3.connect(INDEX_DB)
    # Set ranking function with weightings for each column.
    # `make_rank_function` must be called with a tuple/list of the same
    # length as the number of columns "selected" from the database.
    # In this case, `url` is set to 0 because we don't want to search on
    # that column
    db.create_function('rank', 1, make_rank_func((1.0, 1.0, 0)))
    cursor = db.cursor()
    try:
        cursor.execute("""SELECT author, title, url FROM
                            (SELECT rank(matchinfo(books))
                             AS r, author, title, url
                             FROM books WHERE books MATCH ?)
                          ORDER BY r DESC LIMIT 100""", (query,))
        results = cursor.fetchall()
    except sqlite3.OperationalError as err:
        # If the query is invalid, show an appropriate warning and exit
        if b'malformed MATCH' in err.message:
            wf.add_item('Invalid query', icon=ICON_WARNING)
            wf.send_feedback()
            return
        # Otherwise raise error for Workflow to catch and log
        else:
            raise err

    if not results:
        wf.add_item('No matches', 'Try a different query', icon=ICON_WARNING)

    log.info('{} results for `{}` in {:0.3f} seconds'.format(
             len(results), query, time() - start))

    # Output results to Alfred
    for (author, title, url) in results:
        wf.add_item(title, author, valid=True, arg=url, icon='icon.png')

    wf.send_feedback()
Example #30
0
 def test_no_pidfile(self):
     """No PID file for non-existent job"""
     assert not is_running('boomstick')
Example #31
0
def show_search(name, nsfw=NSFW):
    """List subreddits matching `name`."""
    nsfw = 'nsfw-' if nsfw else ''
    top = wf.cached_data('__top', max_age=0) or []
    history = wf.cached_data('__history', max_age=0) or []
    key = '--search-{}{}'.format(nsfw, cache_key(name))

    # Load cached results for name or start search in background
    cached = wf.cached_data(key, None, SEARCH_CACHE_MAX_AGE) or []
    if not cached and not is_running('search'):
        run_in_background(
            'search',
            ['/usr/bin/python', 'reddit.py', '--search',
             name.encode('utf-8')])
        wf.rerun = 0.3

    log.debug('loaded subreddits: %d history, %d top, %d cached', len(history),
              len(top), len(cached))

    if is_running('search'):
        wf.rerun = 0.3

    subreddits = history
    other = top + cached
    seen = {sr['name'] for sr in history}
    for sr in other:
        if sr['name'] in seen:
            continue
        subreddits.append(sr)
        seen.add(sr['name'])

    # Filter results because Reddit's search is super-crappy
    subreddits = wf.filter(name,
                           subreddits,
                           key=lambda sr: sr['name'],
                           min_score=30)

    if not subreddits:
        if is_running('search'):
            wf.add_item('Loading from API …', 'Hang in there')
        else:
            wf.add_item('No matching subreddits found',
                        'Try a different query',
                        icon=ICON_WARNING)
        wf.send_feedback()
        return

    # Cache all subreddits in case we need to "remember" one
    results = {sr['name']: sr for sr in subreddits}
    wf.cache_data('--last', results, session=True)

    # List all matching subreddits

    for sr in subreddits:

        log.debug(repr(sr))

        url = sr['url']
        it = wf.add_item(sr['name'],
                         sr['title'],
                         autocomplete='{}/'.format(sr['name']),
                         arg=url,
                         uid=sr['name'],
                         quicklookurl=url,
                         valid=True,
                         icon=ICON_REDDIT)

        # Export subreddit to ENV in case we want to save it
        it.setvar('subreddit_name', sr['name'])
        it.setvar('subreddit_title', sr['title'])
        it.setvar('subreddit_type', sr['type'])
        it.setvar('subreddit_url', url)
        it.setvar('argv', '-s')
        it.add_modifier('alt',
                        'Make post in "r/{}" in browser'.format(sr['name']),
                        valid=True).setvar('argv', '-b')

    wf.send_feedback()
    return
    def refresh_list(cls):
        """Spawn subprocess to populate response from Google Drive"""

        if not is_running('drive_refresh'):
            cmd = ['/usr/bin/python', wf.workflowfile('drive_refresh.py')]
            run_in_background('drive_refresh', cmd)
 def test_non_existent_process(self):
     """Non-existent process"""
     _write_pidfile('test', 9999999)
     assert not is_running('test')
     assert not os.path.exists(_pidfile('test'))
Example #34
0
def main(wf):
    args = wf.args[0]
    # Do stuff here ...
    # Add an item to Alfred feedback
    libpath = wf.stored_data('library_location')

    if not libpath:
        wf.add_item('Set Quiver Library with qset', icon=ICON_INFO)
        wf.send_feedback()
        return
    db.connect()

    if not os.path.exists("quiver.db"):
        wf.add_item('Constructing database...', icon=ICON_INFO)
        call(['/usr/bin/python',
                           wf.workflowfile('quiver_to_db.py')])
        wf.send_feedback()

    # Add a notification if the script is running
    if is_running('update'):
        wf.add_item('Constructing database...', icon=ICON_INFO)

    icon_set = {"Inbox" : "icons/inbox.png", "Recents": "icons/recent.png", "Trash": "icons/trash.png"}

    tagset = list(Tags.select(Tags.tag, fn.COUNT(Tags.id).alias("count"))
                        .group_by(Tags.tag)
                        .distinct()
                        .dicts()
                        .execute())
    notebooks = list(Note.select(Note.notebook, fn.COUNT(Note.id).alias("count"))
                        .group_by(Note.notebook)
                        .distinct()
                        .dicts()
                        .execute())
    notebooks_list = [x["notebook"] for x in notebooks]
    try:
        notebooks_list.remove("Inbox")
        notebooks_list.remove("Trash")
    except:
        pass
    notebooks_list = ["Inbox", "Recents", "Trash"] + notebooks_list
    taglist = [x["tag"] for x in tagset]
    ttaglist = ["#" + x for x in taglist]
    targ = "#" + args
    # Searching by Tag
    if args.startswith(u"#"):
        if args in ttaglist:
            notes = Note.select(Tags, Note).filter(args.strip("#") == Tags.tag).join(Tags).distinct().execute()
            display_notes(notes)
        else:
            tag_filter = wf.filter(args, tagset, tag_filter_key)
            for tag in tag_filter:
                tag_name = tag_filter_key(tag)
                wf.add_item(tag_name, str(tag["count"]) + " item(s)",  autocomplete=tag_name, icon="icons/tag.png")

    # Searching by Notebook
    elif args in notebooks_list:
        if args == "Recents":
            # Show Recents
            display_notes(Note.select().order_by(-Note.last_modified).distinct().limit(10).execute())
        else:
            display_notes(Note.select().filter(Note.notebook == args).execute())
    else:
        notebooks_q = {x["notebook"]: x for x in notebooks}
        if len(args) > 0:
            notebooks_list = wf.filter(args, notebooks_list)
        for n in notebooks_list:
            if n in icon_set:
                icon = icon_set[n]
            else:
                icon = "icons/notebook.png"
            if n == "Recents":
                wf.add_item("Recents", autocomplete="Recents", icon=icon)
            else:
                # Default retrieval `notebook = n' with `count = 0' to prevent keyerror on empty Index / Trash
                notebook = notebooks_q.get(n, {"notebook": n, "count": 0})
                wf.add_item(notebook["notebook"], str(notebook["count"]) + " item(s)", autocomplete = n, icon = icon)

        if len(args) > 0:
            # Perform Search!
            results = NoteIndex.search_bm25(
                                    args,
                                    weights={'title': 2.0, 'content': 1.0},
                                    with_score=True,
                                    score_alias='search_score').order_by(NoteIndex.rank())
            if len(results) == 0:
                wf.add_item("No Results", icon=ICON_ERROR)
            else:
                for result in results:
                    r = Note.get(uuid=result.uuid)
                    wf.add_item(r.title, str(result.search_score) + "-" + unicode(result.content),  arg=r.uuid, valid=True, icon="icons/note.png")


    wf.send_feedback()

    # Regenerate database if it is old.
    if not wf.cached_data_fresh('update_db', 3600):
        log.debug("REWRITING DB!!!")
        run_in_background('update',
                          ['/usr/bin/python',
                           wf.workflowfile('quiver_to_db.py')])
Example #35
0
def main(wf):
    if wf.first_run:
        kill_notifier()

    statuses = [
        'all', 'active', 'pending', 'paused', 'waiting', 'done', 'error',
        'removed', 'stopped'
    ]
    actions = ['reveal', 'rm', 'url', 'pause', 'resume']
    settings = [
        'rpc', 'secret', 'limit', 'limitup', 'limitnum', 'clear', 'add',
        'quit', 'stat', 'help', 'pauseall', 'resumeall'
    ]
    commands = actions + settings

    command = 'reveal'
    status = 'all'
    param = ''

    if len(wf.args) == 1:
        if wf.args[0] in commands:
            command = wf.args[0]
        elif wf.args[0] in statuses:
            status = wf.args[0]
        else:
            param = wf.args[0:]
    elif len(wf.args) > 1:
        if wf.args[0] in settings:
            command = wf.args[0]
            param = wf.args[1]  #settings take one param only
        elif wf.args[0] in actions:
            command = wf.args[0]
            param = wf.args[
                1:]  #actions can take multiple param to filter the result
        elif wf.args[0] in statuses:
            status = wf.args[0]
            param = wf.args[
                1:]  #statuses can take multiple param to filter the result
        else:
            param = wf.args[0:]

    if command not in settings:
        if command == 'pause':
            status = 'active'
        elif command == 'resume':
            status = 'incomplete'
        if get_rpc():
            get_tasks(command, status, param)
    else:
        if command == 'rpc':
            wf.add_item('Set Aria2\'s RPC Path',
                        'Set the path to ' + param,
                        arg=u'--rpc-setting ' + param,
                        valid=True)
        elif command == 'secret':
            wf.add_item('Set Aria2\'s RPC Secret',
                        'Set the secret to ' + param,
                        arg=u'--secret-setting ' + param,
                        valid=True)
        elif command == 'add':
            wf.add_item('Add new download: ' + param,
                        arg='--add ' + param,
                        valid=True)
        elif command == 'clear':
            wf.add_item('Clear all stopped download?',
                        arg='--clear',
                        valid=True)
        elif command == 'pauseall':
            wf.add_item('Pause all active downloads?',
                        arg='--pauseall',
                        valid=True)
        elif command == 'resumeall':
            wf.add_item('Resume all paused downloads?',
                        arg='--resumeall',
                        valid=True)
        elif command == 'help':
            wf.add_item('Need some help?', arg='--help', valid=True)
        elif command == 'quit':
            wf.add_item('Quit Aria2?', arg='--quit', valid=True)
        elif command == 'limit':
            limit_speed('download', param)
        elif command == 'limitup':
            limit_speed('upload', param)
        elif command == 'limitnum':
            limit_num(param)
        elif command == 'stat':
            get_stats()

    if wf.update_available:
        wf.add_item('New version available',
                    'Action this item to install the update',
                    autocomplete='workflow:update')

    wf.send_feedback()

    if not is_running('notifier'):
        cmd = ['/usr/bin/python', wf.workflowfile('notifier.py')]
        run_in_background('notifier', cmd)
Example #36
0
def background_refresh():
    if not is_running('gmail_refresh'):
        cmd = ['/usr/bin/python', WF.workflowfile('gmail_refresh.py')]
        run_in_background('gmail_refresh', cmd)
Example #37
0
def main(wf):
    if wf.first_run:
        kill_notifier()

    statuses = ['all', 'active', 'pending', 'paused', 'waiting',
            'done', 'error', 'removed', 'stopped']
    actions = ['reveal', 'rm', 'url', 'pause', 'resume']
    settings = ['rpc', 'secret', 'limit', 'limitup', 'limitnum', 'clear', 'add', 'quit', 
            'stat', 'help', 'pauseall', 'resumeall']
    commands = actions + settings

    command = 'reveal'
    status = 'all'
    param = ''

    if len(wf.args) == 1:
        if wf.args[0] in commands:
            command = wf.args[0]
        elif wf.args[0] in statuses:
            status = wf.args[0]
        else:
            param = wf.args[0:]
    elif len(wf.args) > 1:
        if wf.args[0] in settings:
            command = wf.args[0]
            param = wf.args[1]      #settings take one param only
        elif wf.args[0] in actions:
            command = wf.args[0]
            param = wf.args[1:]     #actions can take multiple param to filter the result
        elif wf.args[0] in statuses:
            status = wf.args[0]
            param = wf.args[1:]     #statuses can take multiple param to filter the result
        else:
            param = wf.args[0:]

    if command not in settings:
        if command == 'pause':
            status = 'active'
        elif command == 'resume':
            status = 'incomplete'
        if get_rpc():
            get_tasks(command, status, param)
    else:
        if command == 'rpc':
            wf.add_item('Set Aria2\'s RPC Path', 'Set the path to ' + param,
                arg=u'--rpc-setting ' + param, valid=True)
        elif command == 'secret':
            wf.add_item('Set Aria2\'s RPC Secret', 'Set the secret to ' + param,
                arg=u'--secret-setting ' + param, valid=True)
        elif command == 'add':
            wf.add_item('Add new download: ' + param, arg='--add ' + param, valid=True)
        elif command == 'clear':
            wf.add_item('Clear all stopped download?', arg='--clear', valid=True)
        elif command == 'pauseall':
            wf.add_item('Pause all active downloads?', arg='--pauseall', valid=True)
        elif command == 'resumeall':
            wf.add_item('Resume all paused downloads?', arg='--resumeall', valid=True)
        elif command == 'help':
            wf.add_item('Need some help?', arg='--help', valid=True)
        elif command == 'quit':
            wf.add_item('Quit Aria2?', arg='--quit', valid=True)
        elif command == 'limit':
            limit_speed('download', param)
        elif command == 'limitup':
            limit_speed('upload', param)
        elif command == 'limitnum':
            limit_num(param)
        elif command == 'stat':
            get_stats()

    if wf.update_available:
        wf.add_item('New version available',
                    'Action this item to install the update',
                    autocomplete='workflow:update')
    
    wf.send_feedback()

    if not is_running('notifier'):
        cmd = ['/usr/bin/python', wf.workflowfile('notifier.py')]
        run_in_background('notifier', cmd)
Example #38
0
def filter(args):
    wf = workflow()
    prefs = Preferences.current_prefs()
    command = args[1] if len(args) > 1 else None
    duration_info = _duration_info(prefs.upcoming_duration)

    if command == 'duration':
        selected_duration = prefs.upcoming_duration

        # Apply selected duration option
        if len(args) > 2:
            try:
                selected_duration = int(args[2])
            except:
                pass

        duration_info = _duration_info(selected_duration)

        if 'custom' in duration_info:
            wf.add_item(duration_info['label'],
                        duration_info['subtitle'],
                        arg='-upcoming duration %d' % (duration_info['days']),
                        valid=True,
                        icon=icons.RADIO_SELECTED if duration_info['days']
                        == selected_duration else icons.RADIO)

        for duration_info in _durations:
            wf.add_item(duration_info['label'],
                        duration_info['subtitle'],
                        arg='-upcoming duration %d' % (duration_info['days']),
                        valid=True,
                        icon=icons.RADIO_SELECTED if duration_info['days']
                        == selected_duration else icons.RADIO)

        wf.add_item('Back', autocomplete='-upcoming ', icon=icons.BACK)

        return

    # Force a sync if not done recently or join if already running
    if not prefs.last_sync or \
       datetime.utcnow() - prefs.last_sync > timedelta(seconds=30) or \
       is_running('sync'):
        sync()

    wf.add_item(duration_info['label'],
                subtitle='Change the duration for upcoming tasks',
                autocomplete='-upcoming duration ',
                icon=icons.UPCOMING)

    conditions = True

    # Build task title query based on the args
    for arg in args[1:]:
        if len(arg) > 1:
            conditions = conditions & (Task.title.contains(arg)
                                       | TaskFolder.title.contains(arg))

    if conditions is None:
        conditions = True

    tasks = Task.select().join(TaskFolder).where(
        (Task.status != 'completed') &
        (Task.dueDateTime < datetime.now() + timedelta(days=duration_info['days'] + 1)) &
        (Task.dueDateTime > datetime.now() + timedelta(days=1)) &
        Task.list_id.is_null(False) &
        conditions
    )\
        .order_by(Task.dueDateTime.asc(), Task.reminderDateTime.asc(), Task.lastModifiedDateTime.asc())

    try:
        for t in tasks:
            wf.add_item(u'%s – %s' % (t.list_title, t.title),
                        t.subtitle(),
                        autocomplete='-task %s ' % t.id,
                        icon=icons.TASK_COMPLETED
                        if t.status == 'completed' else icons.TASK)
    except OperationalError:
        background_sync()

    wf.add_item('Main menu', autocomplete='', icon=icons.BACK)

    # Make sure tasks stay up-to-date
    background_sync_if_necessary(seconds=2)
Example #39
0
def refresh_list(wf):
    if not is_running("pocket_refresh"):
        cmd = ["/usr/bin/python", wf.workflowfile("pocket_refresh.py")]
        run_in_background("pocket_refresh", cmd)
Example #40
0
def main(wf):
    log.info('Started main')
    ####################################################################
    # Check for Update
    ####################################################################

    # Update available?
    # log.debug(wf.cached_data(__workflow_update_status))
    if wf.update_available:
        wf.add_item('A newer version is available',
                    'Press ENTER to install update',
                    autocomplete='workflow:update',
                    icon='update_available.png')

    ####################################################################
    # Get and Parse arguments
    ####################################################################

    # Build argument parser to parse script args and collect their values
    parser = argparse.ArgumentParser()

    # Keyword actions:
    # Save the API key
    parser.add_argument('--setkey', dest='apikey', nargs='?', default=None)
    # Save the tag for this user
    parser.add_argument('--setuser', dest='user', nargs='?', default=None)
    # Update data
    parser.add_argument('--update',
                        dest='update_method',
                        nargs='?',
                        default='normal')
    # Show only projects for a specific tag
    parser.add_argument('--user', dest='user_tag', nargs='?', default=None)

    # Show the list of options for the selected project
    parser.add_argument('--options',
                        dest='project_id',
                        nargs='?',
                        default=None)

    # Submenu options, project_id is stored in args.project_id
    parser.add_argument('--archive_project',
                        dest='project_id',
                        nargs='?',
                        default=None)
    parser.add_argument('--delete_project',
                        dest='project_id',
                        nargs='?',
                        default=None)

    # Add an optional query and save it to 'query'
    parser.add_argument('query', nargs='?', default=None)

    # Parse the script's arguments
    args = parser.parse_args(wf.args)

    ####################################################################
    # Run argument-specific actions
    ####################################################################

    # Save the API key
    if args.apikey:  # Script was passed an API key
        # Save the provided API key
        wf.save_password('10k_api_key', args.apikey)

        # Notify the user
        notify_title = 'Saved API key'
        notify_text = 'Your 10.000ft API key was saved'

        return notify(notify_title, notify_text)

    # Save the tag for this user
    if args.user:  # Script was passed a username
        # save the user
        wf.settings['user'] = args.user.lower()
        log.debug('WF settings: ' + str(wf.settings))

        # Notify the user
        notify_title = 'Saved User-tag-name'
        notify_text = 'Your 10.000ft User-tag-name was saved'

        return notify(notify_title, notify_text)

    # Update data
    if wf.args[0] == '--update':
        # Update data from 10.000ft
        update_method = args.update_method
        update_data(update_method)

        # Notify the user
        notify_title = 'Update running'
        notify_text = 'Data will be fetched from 10.000ft.'
        return notify(notify_title, notify_text)

    # Update project: Archive
    if wf.args[0] == '--archive_project':
        # Archive project if --archive_project
        update_project(args.project_id, 'archive_project')
        return 0

    # Update project: Delete
    if wf.args[0] == '--delete_project':
        # Delete project if --delete_project
        update_project(args.project_id, 'delete_project')
        return 0

    ####################################################################
    # Get data and filter 10.000ft projects
    ####################################################################

    # Is the API key stored in the Keychain?
    try:
        wf.get_password('10k_api_key')
    except PasswordNotFound:  # API key has not yet been set
        wf.add_item('No API key set.',
                    'Please use .10ksetkey to set your 10.000ft API key.',
                    valid=False,
                    icon='icons/warning.png')
        wf.send_feedback()
        return 0

    # Get query from Alfred
    query = args.query

    # Get posts from cache. Set `data_func` to None, as we don't want to
    # update the cache in this script and `max_age` to 0 because we want
    # the cached data regardless of age
    projects = wf.cached_data('projects', None, max_age=0)
    clients = wf.cached_data('clients', None, max_age=0)

    # Start update script if cached data is too old (or doesn't exist)
    if not wf.cached_data_fresh('projects', max_age=600):
        update_data('refresh')

    if not wf.cached_data_fresh('clients', max_age=600):
        update_data('refresh')

    # Notify the user if the cache is being updated
    if is_running('update'):
        wf.add_item('Fetching data from 10.000ft...',
                    valid=False,
                    icon='icons/fetching_data.png')

    # If script was passed a query, use it to filter projects
    if query and projects:
        projects = wf.filter(query,
                             projects,
                             key=search_key_for_project,
                             min_score=20)

    # If we have no data to show, so show a warning and stop
    if not projects:
        wf.add_item('No projects found', icon='icons/warning.png')
        wf.send_feedback()
        return 0

    ####################################################################
    # Show submenu options for project
    ####################################################################

    # If argument --options is passed on, show the options for manipulating a
    # project.
    if wf.args[0] == '--options':

        # Get current project data
        log.info('Started building options menu')
        project = get_project_data(args.project_id)

        # Build report URLs
        report_time = build_report_url(25, project)
        report_fees = build_report_url(27, project)

        # Add options for projects
        wf.add_item(title='View project',
                    arg='https://app.10000ft.com/viewproject?id=' +
                    str(project['id']),
                    valid=True,
                    icon='icons/project_view.png')
        wf.add_item(title='Edit project',
                    arg='https://app.10000ft.com/editproject?id=' +
                    str(project['id']),
                    valid=True,
                    icon='icons/project_edit.png')
        wf.add_item(title='Budget report time for project',
                    arg=report_time,
                    valid=True,
                    icon='icons/project_budget_report_time.png')
        wf.add_item(title='Budget report fees for project',
                    arg=report_fees,
                    valid=True,
                    icon='icons/project_budget_report_fees.png')
        wf.add_item(title='Archive project',
                    arg='10000ft.py --archive_project ' + str(project['id']),
                    valid=True,
                    icon='icons/project_archive.png')
        wf.add_item(title='Delete project',
                    arg='10000ft.py --delete_project ' + str(project['id']),
                    valid=True,
                    icon='icons/project_delete.png')
        # Send the results to Alfred as XML
        wf.send_feedback()

    ####################################################################
    # Show List of projects
    ####################################################################

    else:
        # Loop through the returned projects and add an item for each to the
        # list of results for Alfred
        for project in projects:
            # Extract tags from data and put them in a list
            taglist = build_taglist(project['tags']['data'])

            if wf.args[0] == '--user':
                # Only show projects of current user if the argument --user is
                # passed on
                if 'user' in wf.settings:
                    # Get the user tag from wf.settings
                    user_tag = wf.settings['user']
                    # Check if the current user_tag is in the list of tags for
                    # this project.
                    if user_tag in taglist:
                        # Add the project to the list as an item
                        add_project(project, taglist)
                else:
                    # Show an error if the 'user' key is not in wf.settings
                    wf.add_item('No User-tag-name saved.',
                                ('Please use .10ksetuser to set '
                                 'your 10.000ft User-tag-name.'),
                                valid=False,
                                icon='icons/warning.png')
                    wf.send_feedback()
                    return 0
            else:
                # In all other situations, just show the list of all the
                # projects
                add_project(project, taglist)
        # Send the results to Alfred as XML
        wf.send_feedback()
        return 0