def test_workflow_update_methods(self):
        """Workflow update methods"""

        def fake(wf):
            return

        Workflow().reset()
        # Initialise with outdated version
        wf = Workflow(update_settings={
            'github_slug': 'deanishe/alfred-workflow-dummy',
            'version': 'v2.0',
            'frequency': 1,
        })

        wf.run(fake)

        # Check won't have completed yet
        self.assertFalse(wf.update_available)

        # wait for background update check
        self.assertTrue(is_running('__workflow_update_check'))
        while is_running('__workflow_update_check'):
            time.sleep(0.05)
        time.sleep(1)

        # There *is* a newer version in the repo
        self.assertTrue(wf.update_available)

        # Mock out subprocess and check the correct command is run
        c = WorkflowMock()
        with c:
            self.assertTrue(wf.start_update())
        # wf.logger.debug('start_update : {}'.format(c.cmd))
        self.assertEquals(c.cmd[0], '/usr/bin/python')
        self.assertEquals(c.cmd[2], '__workflow_update_install')

        # Grab the updated release data, then reset the cache
        update_info = wf.cached_data('__workflow_update_status')

        wf.reset()

        # Initialise with latest available release
        wf = Workflow(update_settings={
            'github_slug': 'deanishe/alfred-workflow-dummy',
            'version': update_info['version'],
        })

        wf.run(fake)

        # Wait for background update check
        self.assertTrue(is_running('__workflow_update_check'))
        while is_running('__workflow_update_check'):
            time.sleep(0.05)

        # Remote version is same as the one we passed to Workflow
        self.assertFalse(wf.update_available)
        self.assertFalse(wf.start_update())
Exemplo n.º 2
0
def main():
    """This method is installed as a console script entry point by setuptools

    It uses the command line arguments specified by opts() to generate a
    Workflow object and adds to it several WorkflowStages.

    If the needed command line arguments are not passed, the user is asked to
    enter them.

    The generated Workflow object is then executed with run() 
    """

    args = opts().parse_args()

    if args.help == 'all':
        opts().print_help()
        return
    elif args.help == 'stages':
        print WorkflowStage.longhelp()
        return

    logging.basicConfig(
        level=getattr(logging, args.logging.upper()),
        format='%(levelname)s: %(asctime)s in %(name)s - %(message)s',
        datefmt='%m/%d/%Y %I:%M:%S %p'
    )

    w = Workflow()

    if not args.stages:
        print 'Stages not given with --stages argument'
        print WorkflowStage.shorthelp()
        stages = raw_input(
            'Enter space separated stage specifiers (e.g. "1 2 3"): ').split()
    else:
        stages = args.stages

    classmap = {cls.spec: cls for cls in all_subclasses(WorkflowStage)}

    for stage_spec in stages:
        try:
            w.append(classmap[stage_spec](args))
        except KeyError as e:
            logging.error(
                'No valid stage specifier {0} - use "--help stages" to see '
                'stage specifiers for this software'.format(stage_spec))
            raise

    w.run()
Exemplo n.º 3
0
    def test_run_single_sensor_usecase(self):
        usecase = 'mms3'
        mmdtype = 'mmd3'
        w = Workflow(usecase, Period('1991-01-01', '1992-01-01'))
        w.add_primary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16))
        w.add_primary_sensor('avhrr.n11', (1988, 11, 8), (1994, 12, 31))
        w.add_primary_sensor('avhrr.n12', (1991, 9, 16), (1998, 12, 14))
        w.run(mmdtype, log_dir='.', with_history=True, simulation=True)

        with open('mms3.status', 'r') as status:
            self.assertEqual('337 created, 0 running, 0 backlog, 337 processed, 0 failed\n', status.readline())
        with open('mms3.report', 'r') as report:
            self.assertEqual(337, len(report.readlines()))

        os.remove('mms3.status')
        os.remove('mms3.report')
class YahooTransitAlfredWorkflow(object):
    YAHOO_TRANSIT_SEARCH_URL = 'http://transit.yahoo.co.jp/search/result?flatlon=&from=%s&tlatlon=&to=%s'
    ICON_URL = 'icon.png'

    def __init__(self):
        self.wf = Workflow()
        self.log = self.wf.logger

    def run(self):
        sys.exit(self.wf.run(self.main))

    def main(self, wf):
        self.log.debug('start')
        args = wf.args

        queries = args[0].split()

        if len(queries) > 1:
            self.src, self.dst = queries[0:2]
            self.src = urllib.quote(self.src.encode('utf-8'))
            self.dst = urllib.quote(self.dst.encode('utf-8'))

            informations = self._fetch_transit_informations()
            for info in informations:
                wf.add_item(info.title, info.description, arg=info.url, valid=True, icon=self.ICON_URL)
        else:
            wf.add_item('transit <origin> <destination>')

        wf.send_feedback()

    def _get_url(self):
        return self.YAHOO_TRANSIT_SEARCH_URL % (self.src, self.dst)

    def _fetch_transit_informations(self):
        url = self._get_url()
        response = web.get(url)
        self.redirect_url = response.url
        soup = BeautifulSoup(response.content)
        routes = soup.select('[id^=route]')
        return [self._parse_information_from_node(route) for route in routes]

    def _parse_information_from_node(self, node):
        id = node['id']
        base_url = self._get_url()
        url = '%s#%s' % (base_url, id)

        summary = node.select('.routeSummary')[0]
        time = summary.select('li.time span')[0].getText()
        duration = summary.select('li.time')[0].getText()
        duration = duration.replace(time, '')
        transfer = summary.select('li.transfer')[0].getText()
        fare = summary.select('li.fare')[0].getText()
        humanized_fare = fare.replace('[priic]', '')

        info = TransitInformation(time, duration, transfer, humanized_fare, url)
        return info
Exemplo n.º 5
0
    def execute(self):
        global LOG
        wf = Workflow()
        self.wf = wf
        LOG = wf.logger
        
        self.minus = get_default(wf.settings, 'minus', self.minus_default)
        self.plus = get_default(wf.settings, 'plus', self.plus_default)

        sys.exit(wf.run(self.main))
Exemplo n.º 6
0
def run_workflow(a, b, c, d, e, f, g, h, i, j):
    workflow1 = Workflow(weibullmodels[i], windrosemodels[b], turbmodels[c], None, depthmodels[h],
                         farm_support_cost_models[j], None, oandm_given_costs, cablemodels[d], infield_efficiency,
                         thrust_coefficient, thrustmodels[f], wakemodels[a], mergingmodels[e], power,
                         powermodels[g], aep_average, other_costs, total_costs, LPC)
    nbins = 25  # Number of wind speeds bins for the discretisation of the Weibull distribution.
    real_angle = 30.0  # Angle [degrees] per wind sector in measured windrose.
    artif_angle = 1.0  # Desired angle [degrees] resolution for wake analysis.
    workflow1.windrose.nbins = nbins
    workflow1.windrose.artificial_angle = artif_angle
    workflow1.windrose.real_angle = real_angle
    workflow1.print_output = True
    workflow1.run("layout.dat")
    print workflow1.runtime, "seconds runtime"
    power2.reset()
    thrust_coefficient2.reset()

    with open("output.dat", "a", 1) as output2:
        output2.write("{}\t{}\t{}\n".format(workflow1.aep, workflow1.finance, workflow1.runtime))
Exemplo n.º 7
0
    def test_run_dual_sensor_usecase_without_arc(self):
        usecase = 'mms1'
        mmdtype = 'mmd1'
        w = Workflow(usecase, Period('1991-01-01', '1992-01-01'))
        w.add_primary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16))
        w.add_primary_sensor('avhrr.n11', (1988, 11, 8), (1994, 12, 31))
        w.add_primary_sensor('avhrr.n12', (1991, 9, 16), (1998, 12, 14))
        w.add_secondary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16))
        w.add_secondary_sensor('avhrr.n11', (1988, 11, 8), (1994, 12, 31))
        w.add_secondary_sensor('avhrr.n12', (1991, 9, 16), (1998, 12, 14))
        w.run(mmdtype, log_dir='.', simulation=True, without_arc=True)

        with open('mms1.status', 'r') as status:
            self.assertEqual('203 created, 0 running, 0 backlog, 203 processed, 0 failed\n', status.readline())
        with open('mms1.report', 'r') as report:
            self.assertEqual(203, len(report.readlines()))

        os.remove('mms1.status')
        os.remove('mms1.report')
Exemplo n.º 8
0
class ListHandler(object):
    """
    The new way of fetching and displaying lists. Converting over to this.
    """

    def __init__(
        self,
        query='',
        cache_timeout=60 * 10
    ):
        self.workflow = Workflow()
        self.query = query
        self.cache_timeout = cache_timeout

    @property
    def cache_key(self):
        return self.__class__.__name__

    def run(self):
        result = self.workflow.run(self._run)
        self.workflow.send_feedback()
        sys.exit(result)

    def fetch(self):
        raise NotImplementedError

    def _run(self, workflow):
        items = workflow.cached_data(
            self.cache_key,
            self.fetch,
            self.cache_timeout
        )

        if self.query:
            items = self.filtered_items(items, self.query)

        for item in items:
            self.add_item(item)

    def add_item(self, item):
        raise NotImplementedError

    def filtered_items(self, items, query):
        return self.workflow.filter(
            query,
            items,
            key=lambda x: str(x)
        )
Exemplo n.º 9
0
            tm = utcnow.astimezone(tz).strftime('%H:%M')
            times.append((tm, tz.zone.replace('_', ' ')))

        times.sort()

        # Display times in Alfred
        for tm, name in times:
            self.wf.add_item('{} {}'.format(tm, name))

        self.wf.send_feedback()


# This is a standard Python idiom for running a .py file when it's
# called directly. It's good practice to do this, as it means the file
# can be imported by other files without it executing.
if __name__ == '__main__':
    # Create an instance of our helper
    wf = Workflow()
    # Set the global `log` instance
    log = wf.logger
    # Set the global `decode` function
    decode = wf.decode
    # Create our workflow object
    demo = Demo()
    # Run it. If the inner function (demo.run) returns a non-zero
    # (`None` counts as zero) value, Alfred will recognise the workflow
    # as having failed. `wf.run` will catch any exceptions raised by
    # `demo.run`, display them in Alfred and return 1 to `sys.exit`,
    # indicating that the workflow failed.
    sys.exit(wf.run(demo.run))
Exemplo n.º 10
0
            output.append(VMDetails(
                m.name, m.id, get_vm_state(m.state), '/Applications/VirtualBox.app'))

    return output


def get_vm_state(state_const):
    for state, constant in constants.all_values('MachineState').iteritems():
        if constant == state_const:
            return state
    return None


def get_vm_screenshot():
    global currentVM
    if currentVM is None:
        return ''
    icon = '%s/%s.png' % (wf.cachedir, hash(currentVM.name))
    if 'Parallels' in currentVM.type:
        os.popen("""prlctl capture "%s" --file "%s" """ %
                 (currentVM.name, icon))
    else:
        os.popen("""VBoxManage controlvm "%s" screenshotpng "%s" """ %
                 (currentVM.name, icon))

    return icon

if __name__ == '__main__':
    wf = Workflow()
    sys.exit(wf.run(complete))
Exemplo n.º 11
0
from workflow import Workflow

usecase = 'mms2'
mmdtype = 'mmd2'

w = Workflow(usecase)
w.add_primary_sensor('atsr.1', '1991-08-01', '1996-09-01')
w.add_primary_sensor('atsr.1', '1996-10-01', '1996-11-01')
w.add_primary_sensor('atsr.1', '1996-12-30', '1997-02-01')
w.add_primary_sensor('atsr.1', '1997-03-01', '1997-04-01')
w.add_primary_sensor('atsr.1', '1997-05-01', '1997-06-01')
w.add_primary_sensor('atsr.1', '1997-07-01', '1997-09-01')
w.add_primary_sensor('atsr.1', '1997-10-01', '1997-11-01')
w.add_primary_sensor('atsr.1', '1997-12-01', '1997-12-18')
w.add_primary_sensor('atsr.2', '1995-06-01', '1996-01-01')
w.add_primary_sensor('atsr.2', '1996-07-01', '2003-06-23')
w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09')
w.set_samples_per_month(300000)

w.run(mmdtype, hosts=[('localhost', 48)])
Exemplo n.º 12
0
                code=request_token
            )
            WF.save_password(
                'pocket_access_token',
                user_credentials['access_token']
            )
            # We don't need the cache anymore. Clear it for security reasons
            WF.clear_cache()
        except RateLimitException:
            WF.logger.error('RateLimitException')


def refresh_list():  # pragma: no cover
    if not is_running('pocket_refresh'):
        cmd = ['/usr/bin/python', WF.workflowfile('pocket_refresh.py')]
        run_in_background('pocket_refresh', cmd)


def get_icon(name):
    name = '%s-dark' % name if is_dark() else name
    return 'icons/%s.png' % name


def is_dark():
    rgb = [int(x) for x in WF.alfred_env['theme_background'][5:-6].split(',')]
    return (0.299 * rgb[0] + 0.587 * rgb[1] + 0.114 * rgb[2]) / 255 < 0.5


if __name__ == '__main__':
    WF.run(main)  # pragma: no cover
Exemplo n.º 13
0
from workflow import Workflow

usecase = 'mms14'
mmdtype = 'mmd14'

w = Workflow(usecase)
# the following lines define the full MMD tb 2016-08-01
#w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09')
#w.add_primary_sensor('amsre', '2002-06-01', '2011-10-05')

# the following lines define the test MMD tb 2016-08-01
w.add_primary_sensor('atsr.3', '2008-05-01', '2008-06-01')
w.add_secondary_sensor('amsre', '2008-05-01', '2008-06-01')

w.set_samples_per_month(100000)
w.run(mmdtype, hosts=[('localhost', 8)])
Exemplo n.º 14
0
from workflow import Workflow

usecase = 'mms3'
mmdtype = 'mmd3'

w = Workflow(usecase)
w.add_primary_sensor('atsr.1', '1991-08-01', '1996-09-01')
w.add_primary_sensor('atsr.1', '1996-10-01', '1996-11-01')
w.add_primary_sensor('atsr.1', '1996-12-30', '1997-02-01')
w.add_primary_sensor('atsr.1', '1997-03-01', '1997-04-01')
w.add_primary_sensor('atsr.1', '1997-05-01', '1997-06-01')
w.add_primary_sensor('atsr.1', '1997-07-01', '1997-09-01')
w.add_primary_sensor('atsr.1', '1997-10-01', '1997-11-01')
w.add_primary_sensor('atsr.1', '1997-12-01', '1997-12-18')
w.add_primary_sensor('atsr.2', '1995-06-01', '1996-01-01')
w.add_primary_sensor('atsr.2', '1996-07-01', '2003-06-23')
w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09')
w.set_samples_per_month(0)

w.run(mmdtype, hosts=[('localhost', 24)], calls=[('sampling-start', 1)], with_history=True)
Exemplo n.º 15
0
from workflow import Period
from workflow import Workflow

usecase = 'mms7'
mmdtype = 'mmd7'

w = Workflow(usecase, Period('2012-07-02', '2015-01-01'))
w.add_primary_sensor('avhrr_f.m02', '2007-03-01', '2014-04-01')
w.add_secondary_sensor('amsr2', '2012-07-02', '2015-04-01')
w.set_samples_per_month(5000000)

w.run(mmdtype, hosts=[('localhost', 12)], without_arc=True)
Exemplo n.º 16
0
        :rtype: ``list``
        """
        scriptdirs = [
            os.path.join(APP_SCRIPT_DIRECTORY, app_name),
            os.path.join(APP_SUPPORT_DIRECTORY, app_name, 'Scripts')
        ]

        scripts = []
        for scriptdir in scriptdirs:
            # scriptdir = os.path.join(APP_SCRIPT_DIRECTORY, app_name)
            if not os.path.isdir(scriptdir):
                self.wf.logger.debug(
                    'App script directory does not exists : {!r}'.format(
                        scriptdir))
                continue
            for root, dirnames, filenames in os.walk(scriptdir):
                for filename in filenames:
                    ext = os.path.splitext(filename)[1]
                    if ext.lower() not in SCRIPT_EXTENSIONS:
                        continue
                    scripts.append(os.path.join(root, filename))
        self.wf.logger.debug('{} scripts found for app {!r}'.format(
            len(scripts), app_name))
        return scripts


if __name__ == '__main__':
    wf = Workflow()
    app = ScriptRunner()
    wf.run(app.run)
Exemplo n.º 17
0
w = Workflow(usecase)
w.add_primary_sensor('atsr.1', '1991-08-01', '1996-09-01')
w.add_primary_sensor('atsr.1', '1996-10-01', '1996-11-01')
w.add_primary_sensor('atsr.1', '1997-01-01', '1997-02-01')
w.add_primary_sensor('atsr.1', '1997-03-01', '1997-04-01')
w.add_primary_sensor('atsr.1', '1997-05-01', '1997-06-01')
w.add_primary_sensor('atsr.1', '1997-07-01', '1997-08-01')
w.add_primary_sensor('atsr.1', '1997-10-01', '1997-11-01')
w.add_primary_sensor('atsr.1', '1997-12-01', '1997-12-18')
w.add_primary_sensor('atsr.2', '1995-06-01', '1996-01-01')
w.add_primary_sensor('atsr.2', '1996-07-01', '2003-06-23')
w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09')
#w.add_secondary_sensor('avhrr.n10', '1986-11-17', '1991-09-17')
w.add_secondary_sensor('avhrr.n11', '1988-11-08', '1994-09-14')
w.add_secondary_sensor('avhrr.n12', '1991-09-16', '1998-12-15')
w.add_secondary_sensor('avhrr.n14', '1995-01-01', '2000-01-01')
w.add_secondary_sensor('avhrr.n15', '1998-10-26', '2000-08-01')
w.add_secondary_sensor('avhrr.n15', '2000-09-01', '2003-04-09')
w.add_secondary_sensor('avhrr.n15', '2003-12-21', '2010-08-01')
w.add_secondary_sensor('avhrr.n16', '2001-01-01', '2003-04-09')
w.add_secondary_sensor('avhrr.n16', '2003-12-21', '2011-01-01')
w.add_secondary_sensor('avhrr.n17', '2002-07-10', '2003-04-09')
w.add_secondary_sensor('avhrr.n17', '2003-12-21', '2010-10-01')
w.add_secondary_sensor('avhrr.n18', '2005-06-05', '2014-01-01')
w.add_secondary_sensor('avhrr.n19', '2009-02-06', '2014-01-01')
w.add_secondary_sensor('avhrr.m02', '2006-11-21', '2014-01-01')
w.set_samples_per_month(40000000)

w.run(mmdtype, hosts=[('localhost', 48)], selected_only=True)
Exemplo n.º 18
0

def main(wf):

    update_settings(wf)

    # Get the current setting
    current_setting = wf.settings['date-format']

    # These are the date formats the workflow supports
    for key in sorted(DATE_MAPPINGS.keys()):
        # Indicate the current setting for the user
        if key == current_setting:
            title_setting = key + ' *'
        else:
            title_setting = key

        # You know what? There's not point letting them set the format to the current format. It's
        # a waste of effort.
        wf.add_item(title=title_setting,
                    subtitle=DATE_MAPPINGS[key]['name'],
                    valid=(key != current_setting),
                    arg=key)

    wf.send_feedback()


if __name__ == '__main__':
    workFlow = Workflow(default_settings=DEFAULT_WORKFLOW_SETTINGS)
    sys.exit(workFlow.run(main))
Exemplo n.º 19
0
                arg=data['title'],
                valid=True)
    wf.send_feedback()


def remove(wf):
    link = wf.args[1]
    data = wf.stored_data('pycrastinator')
    data = [item for item in data if item['link'] != link]
    wf.store_data('pycrastinator', data)


if __name__ == '__main__':
    wf = Workflow()
    wf.data_serializer = 'json'
    command = wf.args[0]

    if not wf.stored_data('pycrastinator'):
        wf.store_data('pycrastinator', [])

    if command == 'add':
        sys.exit(wf.run(add))

    elif command == 'confirm':
        sys.exit(wf.run(confirm))

    elif command == 'remove':
        sys.exit(wf.run(remove))

    else:
        sys.exit(wf.run(main))
Exemplo n.º 20
0
def set_clipboard(url):
    clipboard = os.popen(""" osascript -e 'set the clipboard to "%s"' """ %
                         url).readline()
    return 'Link copied to clipboard'


def archive_item(item_id):
    access_token = wf.get_password('pocket_access_token')
    pocket_instance = Pocket(config.CONSUMER_KEY, access_token)
    try:
        pocket_instance.archive(item_id, wait=False)
        return 'Link archived'
    except Exception:
        return 'Connection error'


def delete_item(item_id):
    access_token = wf.get_password('pocket_access_token')
    pocket_instance = Pocket(config.CONSUMER_KEY, access_token)
    try:
        pocket_instance.delete(item_id, wait=False)
        return 'Link deleted'
    except Exception:
        return 'Connection error'


if __name__ == '__main__':
    wf = Workflow()
    sys.exit(wf.run(execute))
Exemplo n.º 21
0
    def _split_query(self, query):
        if not query or DELIMITER not in query:
            return None, query
        elif query.endswith(DELIMITER):  # trailing space deleted
            raise GoBack(query.rstrip(DELIMITER).strip())
        return [s.strip() for s in query.split(DELIMITER)]

    def _update(self, force=False):
        """Update cached data"""
        log.debug('Updating workflow lists...')
        args = ['/usr/bin/python',
                self.wf.workflowfile('update_workflows.py')]
        if force:
            args.append('--force-update')
        log.debug('update command : %r', args)
        retcode = run_in_background('update', args)
        if retcode:
            log.debug('Update failed with code %r', retcode)
            print('Update failed')
            return 1
        if force:
            print('Updating workflow list…'.encode('utf-8'))
        return 0


if __name__ == '__main__':
    wf = Workflow()
    log = wf.logger
    pk = PackalWorkflow()
    wf.run(pk.run)
Exemplo n.º 22
0
# -*- coding: utf-8 -*-
# !/usr/bin/env python3

import yaml
import argparse
from workflow import Workflow


def __arg_parse():
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config', help=' config file', required=True)
    return parser.parse_args()


if __name__ == '__main__':
    args = __arg_parse()
    print(args)
    config_file = open(args.config, encoding='utf-8')
    configs = yaml.load(config_file)
    workflow = Workflow(configs)
    workflow.run(1)
Exemplo n.º 23
0
#w.add_primary_sensor('avhrr.n12', '1991-09-16', '1998-12-15')
#w.add_primary_sensor('avhrr.n14', '1995-01-01', '2000-01-01')
#w.add_primary_sensor('avhrr.n15', '1998-10-26', '2003-04-09')
#w.add_primary_sensor('avhrr.n15', '2003-12-21', '2011-01-01')
#w.add_primary_sensor('avhrr.n16', '2001-01-01', '2003-04-09')
#w.add_primary_sensor('avhrr.n16', '2003-12-21', '2011-01-01')
#w.add_primary_sensor('avhrr.n17', '2002-07-10', '2003-04-09')
#w.add_primary_sensor('avhrr.n17', '2003-12-21', '2011-01-01')
#w.add_primary_sensor('avhrr.n18', '2005-06-05', '2014-01-01')
#w.add_primary_sensor('avhrr.n19', '2009-02-06', '2014-01-01')
#w.add_primary_sensor('avhrr.m02', '2006-11-21', '2014-01-01')
w.add_secondary_sensor('avhrr.n07', '1981-09-01', '1985-02-01')
w.add_secondary_sensor('avhrr.n08', '1983-05-04', '1985-10-03')
w.add_secondary_sensor('avhrr.n09', '1985-02-25', '1988-11-07')
w.add_secondary_sensor('avhrr.n10', '1986-11-17', '1991-09-17')
w.add_secondary_sensor('avhrr.n11', '1988-11-08', '1994-09-14')
#w.add_secondary_sensor('avhrr.n12', '1991-09-16', '1998-12-15')
#w.add_secondary_sensor('avhrr.n14', '1995-01-01', '2000-01-01')
#w.add_secondary_sensor('avhrr.n15', '1998-10-26', '2003-04-09')
#w.add_secondary_sensor('avhrr.n15', '2003-12-21', '2011-01-01')
#w.add_secondary_sensor('avhrr.n16', '2001-01-01', '2003-04-09')
#w.add_secondary_sensor('avhrr.n16', '2003-12-21', '2011-01-01')
#w.add_secondary_sensor('avhrr.n17', '2002-07-10', '2003-04-09')
#w.add_secondary_sensor('avhrr.n17', '2003-12-21', '2010-10-01')
#w.add_secondary_sensor('avhrr.n18', '2005-06-05', '2014-01-01')
#w.add_secondary_sensor('avhrr.n19', '2009-02-06', '2014-01-01')
#w.add_secondary_sensor('avhrr.m02', '2006-11-21', '2014-01-01')
w.set_samples_per_month(40000000)

w.run(mmdtype, hosts=[('localhost', 48)], selected_only=True)
Exemplo n.º 24
0
# w.add_primary_sensor('avhrr.n10', '1986-11-17', '1991-09-17')
# w.add_primary_sensor('avhrr.n11', '1988-11-08', '1995-01-01')
# w.add_primary_sensor('avhrr.n12', '1991-09-16', '1998-12-15')
# w.add_primary_sensor('avhrr.n14', '1995-01-01', '2002-10-08')
# w.add_primary_sensor('avhrr.n15', '1998-10-26', '2013-01-01')
# w.add_primary_sensor('avhrr.n16', '2001-01-01', '2013-01-01')
# w.add_primary_sensor('avhrr.n17', '2002-06-25', '2013-01-01')
# w.add_primary_sensor('avhrr.n18', '2005-05-20', '2015-12-01')
# w.add_primary_sensor('avhrr.n19', '2009-02-07', '2015-12-01')
# w.add_primary_sensor('avhrr.m02', '2006-10-30', '2015-12-01')

# this one does the after-failure-processing 2016-03-09
#w.add_primary_sensor('avhrr.n14', '1995-01-01', '2002-10-08')
#w.add_primary_sensor('avhrr.n15', '2006-12-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n16', '2006-01-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n17', '2005-12-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n18', '2005-11-01', '2015-12-01')
#w.add_primary_sensor('avhrr.n19', '2009-02-07', '2015-12-01')
#w.add_primary_sensor('avhrr.m02', '2006-10-30', '2015-12-01')

# this one does the after-failure-processing 2016-04-15
#w.add_primary_sensor('avhrr.n15', '2010-04-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n16', '2010-11-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n17', '2010-10-01', '2013-01-01')
#w.add_primary_sensor('avhrr.n18', '2011-01-01', '2015-12-01')
#w.add_primary_sensor('avhrr.n19', '2011-01-01', '2015-12-01')
#w.add_primary_sensor('avhrr.m02', '2011-01-01', '2015-12-01')
w.set_samples_per_month(0)

w.run(mmdtype, hosts=[('localhost', 8)], calls=[('sampling-start.sh', 1), ('coincidence-start.sh', 1)], with_history=True)
Exemplo n.º 25
0
        if self.arg == 'True':
            self.zotquery.update_clone()
            self.zotquery.update_json()
            return 0
        update, spot = self.zotquery.is_fresh()
        if update:
            if spot == 'Clone':
                self.zotquery.update_clone()
            elif spot == 'JSON':
                self.zotquery.update_json()
        return 0


#-----------------------------------------------------------------------------
# Main Script
#-----------------------------------------------------------------------------

def main(wf):
    """Accept Alfred's args and pipe to workflow class"""
    argv = docopt(config.__usage__,
                  argv=wf.args,
                  version=config.__version__)
    log.info(wf.args)
    pd = ZotWorkflow(wf)
    res = pd.run(argv)
    if res:
        print(res)

if __name__ == '__main__':
    sys.exit(WF.run(main))
Exemplo n.º 26
0
# -*-coding:utf-8 -*-
# Created date: 2021-05-02
# @Author  : Shuang Song
# @Contact   : [email protected]
# GitHub   : https://github.com/SongshGeo
# Research Gate: https://www.researchgate.net/profile/Song_Shuang9

import sys
from datetime import date
from workflow import Workflow

reload(sys)
sys.setdefaultencoding("utf-8")


def main(wf):
    # Get query from Alfred
    query = wf.args[0]
    d = date.today().strftime("%Y-%m-%d")
    text = """创建日期: %s\n中译:\n变体:""" % d

    with open(query, 'w') as markdown_file:
        markdown_file.write(text)
        markdown_file.close()
    pass


if __name__ == u"__main__":
    workflow = Workflow()
    sys.exit(workflow.run(main))
Exemplo n.º 27
0
from workflow import Period
from workflow import Workflow

usecase = 'mms6b'
mmdtype = 'mmd6'

w = Workflow(usecase, Period('2012-07-02', '2015-01-01'))
w.add_primary_sensor('amsr2', '2012-07-02', '2015-04-01')
w.set_samples_per_month(3000000)

w.run(mmdtype, hosts=[('localhost', 24)],
      calls=[('sampling-start.sh', 1), ('coincidence-start.sh', 2), ('sub-start.sh', 2), ('mmd-start.sh', 2)],
      with_history=True, without_arc=True)
Exemplo n.º 28
0
    if request_token:
        try:
            user_credentials = Pocket.get_credentials(
                consumer_key=config.CONSUMER_KEY, code=request_token)
            WF.save_password('pocket_access_token',
                             user_credentials['access_token'])
            # We don't need the cache anymore. Clear it for security reasons
            WF.clear_cache()
        except RateLimitException:
            WF.logger.error('RateLimitException')


def refresh_list():  # pragma: no cover
    if not is_running('pocket_refresh'):
        cmd = ['/usr/bin/python', WF.workflowfile('pocket_refresh.py')]
        run_in_background('pocket_refresh', cmd)


def get_icon(name):
    name = '%s-dark' % name if is_dark() else name
    return 'icons/%s.png' % name


def is_dark():
    rgb = [int(x) for x in WF.alfred_env['theme_background'][5:-6].split(',')]
    return (0.299 * rgb[0] + 0.587 * rgb[1] + 0.114 * rgb[2]) / 255 < 0.5


if __name__ == '__main__':
    WF.run(main)  # pragma: no cover
Exemplo n.º 29
0
                for item in res['web']:
                    wf.add_item(
                        title    = ', '.join(item['value']),
                        subtitle = item['key'],
                        copytext = ', '.join(item['value']),
                        arg      = ', '.join(item['value']),
                        valid    = True,
                    )
        else:
            wf.add_item(
                title    = errorInfo[str(error_code)],
                subtitle = wf.args[0],
            )

    except urllib2.URLError:
        wf.add_item(
            title    = u'网络异常',
            subtitle = u'请检查网络设置',
        )
    except:
        wf.add_item(
            title    = u'未知错误',
            subtitle = u'未知错误',
        )
    finally:
        wf.send_feedback()

if __name__ == '__main__':
    wf = Workflow()
    sys.exit(wf.run(translate))
Exemplo n.º 30
0
    hipchat_search = wflw.cached_data('alfred-hipchat',
                                      wrapper,
                                      max_age=wflw.settings['cache_max_age'])

    if query:
        hipchat_search = wflw.filter(query,
                                     hipchat_search,
                                     key=search_hipchat_names,
                                     min_score=20)

    if hipchat_search:
        for item in hipchat_search:
            wflw.add_item(
                title=item['name'],
                subtitle=item['description'],
                arg=json.dumps(item),
                valid=True
                )
        wflw.send_feedback()


if __name__ == u"__main__":
    WF = Workflow()
    if 'api_url' not in WF.settings:
        WF.settings['api_url'] = "https://api.hipchat.com"
    if 'timeout' not in WF.settings:
        WF.settings['timeout'] = 5
    if 'cache_max_age' not in WF.settings:
        WF.settings['cache_max_age'] = 180
    sys.exit(WF.run(main))
Exemplo n.º 31
0
    def _parse_args(self):
        """Parse command-line arguments with argparse"""
        parser = ArgumentParser()
        parser.add_argument(
            'action',
            choices=('search',
                     'config',
                     'edit_client_rules',
                     'setclient',
                     'toggle',
                     'compose',
                     'reload',
                     'update',
                     'help'))
        parser.add_argument('query', nargs='?', default='')
        return parser.parse_args(self.wf.args)


if __name__ == '__main__':

    wf = Workflow(
        update_settings=UPDATE_SETTINGS,
        default_settings=DEFAULT_SETTINGS,
        # libraries=[os.path.join(os.path.dirname(__file__), 'libs')],
    )
    # wf.magic_prefix = 'wf:'
    log = wf.logger
    app = MailToApp()
    sys.exit(wf.run(app.run))
Exemplo n.º 32
0
def get_all_files(root_dir):
    for lists in os.listdir(root_dir):
        path = os.path.join(root_dir, lists)
        if os.path.isdir(path):
            get_all_files(path)
        else:
            ALL_FILES.append(path)


def upload_file(wf):
    count = 0
    q = Auth(access_key, secret_key)

    get_all_files(PATH)
    for f_path in ALL_FILES:
        key = f_path[len(PATH):]
        token = q.upload_token(bucket_name, key, 3600)
        put_file(token, key, f_path)
        count += 1

    wf.add_item('删除 %s 个旧文件' % DELETE_COUNT)
    wf.add_item('成功上传 %s 个文件' % count)
    wf.send_feedback()


if __name__ == '__main__':
    clear_qiniu()
    wf = Workflow()

    sys.exit(wf.run(upload_file))
Exemplo n.º 33
0
from workflow import Workflow

usecase = 'mms12'
mmdtype = 'mmd12'

w = Workflow(usecase)
w.add_primary_sensor('avhrr_f.m01', '2012-12-13', '2016-01-04')
w.add_primary_sensor('avhrr_f.m02', '2007-03-01', '2016-01-04')
w.set_samples_per_month(0)

w.run(mmdtype, hosts=[('localhost', 8)], calls=[('sampling-start.sh', 1), ('coincidence-start.sh', 1)], with_history=True)
Exemplo n.º 34
0
        action, vault_id = wf.args
    else:
        vault_id = None

    if vault_id == "lpass-cli_login":
        subprocess.check_output('/usr/bin/osascript TerminalLoginLaunch', shell=True)
        sys.exit(1)

    processed_info = dict()
    lp_show = '/usr/local/bin/lpass show {}'.format(vault_id)
    raw_info = subprocess.check_output(lp_show, shell=True)
    for row in raw_info.split('\n'):
        try:
            if "Username:"******"Username"] = row.split("Username:"******"Password" in row:
                processed_info["Password"] = row.split("Password:"******"__main__":
    wf = Workflow()
    sys.exit(wf.run(parse_arguments))
Exemplo n.º 35
0
    # Delete argument cache file
    os.unlink(argcache)

    pidfile = _pid_file(name)

    # Fork to background
    _background()

    # Write PID to file
    with open(pidfile, 'wb') as file:
        file.write('{}'.format(os.getpid()))

    # Run the command
    try:
        log.debug('Task `{}` running'.format(name))
        log.debug('cmd : {!r}'.format(args))

        retcode = subprocess.call(args, **kwargs)

        if retcode:
            log.error('Command failed with [{}] : {!r}'.format(retcode, args))

    finally:
        if os.path.exists(pidfile):
            os.unlink(pidfile)
        log.debug('Task `{}` finished'.format(name))


if __name__ == '__main__':  # pragma: no cover
    wf.run(main)
Exemplo n.º 36
0
#!/usr/bin/python
# encoding: utf-8
#
# Copyright (c) 2016 Dean Jackson <*****@*****.**>
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2016-07-9
#
"""
"""

from __future__ import print_function, unicode_literals, absolute_import

import sys

from workflow import Workflow

log = None


def main(wf):
    """Do nothing."""
    log.debug('datadir=%r', wf.datadir)


if __name__ == '__main__':
    wf = Workflow()
    log = wf.logger
    sys.exit(wf.run(main))
Exemplo n.º 37
0
        raise KeyError('Bundle ID unknown : {}'.format(bid))

    def _split_query(self, query):
        if not query or not DELIMITER in query:
            return None, query
        elif query.endswith(DELIMITER):  # trailing space deleted
            raise GoBack(query.rstrip(DELIMITER).strip())
        return [s.strip() for s in query.split(DELIMITER)]

    def _update(self, force=False):
        """Update cached data"""
        log.debug('Updating workflow lists...')
        args = ['/usr/bin/python',
                self.wf.workflowfile('update_workflows.py')]
        if force:
            args.append('--force-update')
        log.debug('update command : {}'.format(args))
        retcode = run_in_background('update', args)
        if retcode:
            log.debug('Update failed with code {}'.format(retcode))
            print('Update failed')
            return 1
        print('Updating workflow list…'.encode('utf-8'))
        return 0

if __name__ == '__main__':
    wf = Workflow()
    log = wf.logger
    pk = PackalWorkflow()
    wf.run(pk.run)
Exemplo n.º 38
0
    return result


def main(wf):
    try:
        # Get API key from Keychain
        api_key = wf.get_password('gitlab_api_key')
        api_url = wf.settings.get('api_url',
                                  'https://gitlab.com/api/v4/projects')

        # Retrieve projects from cache if available and no more than 600
        # seconds old
        def wrapper():
            return get_projects(api_key, api_url)

        projects = wf.cached_data('projects', wrapper, max_age=3600)

        # Record our progress in the log file
        log.debug('{} gitlab projects cached'.format(len(projects)))

    except PasswordNotFound:  # API key has not yet been set
        # Nothing we can do about this, so just log it
        wf.logger.error('No API key saved')


if __name__ == u"__main__":
    wf = Workflow()
    log = wf.logger
    wf.run(main)
Exemplo n.º 39
0
        for item in vpnList:
            subtitle = '(' + item['status'] + ')' + item['type']
            actionArg = item['id'] + ',' + item['type']
            if item['status'] == VPN_STATUS_CONNECTED:
                actionArg = 'off,' + actionArg
            else:
                actionArg = 'on,' + actionArg
            icon = ICON_ON if item['status'] == VPN_STATUS_CONNECTED else ICON_OFF
            workflow.add_item(title=item['name'], subtitle=subtitle, icon=icon,
                arg=actionArg, valid=True)

        workflow.send_feedback()
        return 0
    else:
        # Do action to switch vpn.
        vpnControl, vpnId, vpnType = args.query.split(',')
        if vpnControl == 'on':
            connect(workflow, vpnId, vpnType)
        elif vpnControl == 'onsequence':
            vpnIds = vpnId.split(':')
            vpnTypes = vpnType.split(':')
            connectSequence(workflow, vpnIds, vpnTypes)
        else:
            disconnect(vpnId)
        return 0


if __name__=="__main__":
    workflow = Workflow()
    sys.exit(workflow.run(main))
Exemplo n.º 40
0
# -*- coding: utf-8 -*-
# !/usr/bin/env python3

import yaml
import argparse
from workflow import Workflow


def __arg_parse():
    parser = argparse.ArgumentParser()
    parser.add_argument('-c',
                        '--config',
                        help=' config file',
                        required=True)
    return parser.parse_args()


if __name__ == '__main__':
  args = __arg_parse()
  config_file = open(args.config, encoding='utf-8')
  configs = yaml.load(config_file)
  workflow = Workflow(configs)
  workflow.run(0)
Exemplo n.º 41
0
from workflow import Workflow

usecase = "mms14"
mmdtype = "mmd14"

w = Workflow(usecase)
# the following lines define the full MMD tb 2016-08-01
# w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09')
# w.add_primary_sensor('amsre', '2002-06-01', '2011-10-05')

# the following lines define the test MMD tb 2016-08-01
w.add_primary_sensor("atsr.3", "2008-05-01", "2008-06-01")
w.add_secondary_sensor("amsre", "2008-05-01", "2008-06-01")

w.set_samples_per_month(100000)
w.run(mmdtype, hosts=[("localhost", 8)])
Exemplo n.º 42
0
 def execute(self):
     wf = Workflow()
     self.wf = wf
     self.log = wf.logger
     sys.exit(wf.run(self.main))
Exemplo n.º 43
0
 def execute(self):
     global LOG
     wf = Workflow()
     LOG = wf.logger
     sys.exit(wf.run(self.main))
Exemplo n.º 44
0
        phonetic_type) else ''

    username = sys.argv[sys.argv.index('-username') +
                        1] if '-username' in sys.argv else None
    password = sys.argv[sys.argv.index('-password') +
                        1] if '-password' in sys.argv else None
    filepath = sys.argv[sys.argv.index('-filepath') +
                        1] if '-filepath' in sys.argv else os.path.join(
                            os.environ['HOME'],
                            'Documents/Alfred-youdao-wordbook.xml')
    textpath = sys.argv[sys.argv.index('-textpath') +
                        1] if '-textpath' in sys.argv else os.path.join(
                            os.environ['HOME'], 'Documents/youdao-wordbook.md')

    m2 = hashlib.md5()
    m2.update(password)
    password_md5 = m2.hexdigest()

    item = {
        "word": params[0],
        "trans": params[1],
        "phonetic": phonetic,
        "tags": "Alfred",
        "progress": "-1",
    }

    saver = SaveWord(username, password_md5, filepath, textpath, item)
    wf = Workflow()

    sys.exit(wf.run(saver.save))
def delete_item(item_id):
    rdb_client = get_client(wf)
    try:
        rdb_client.delete_bookmark(item_id)
        return 'Link deleted'
    except ServerNotFoundError:
        return 'Connection error'


def authorize(username, password):
    import config
    try:
        user_credentials = xauth(
            config.CONSUMER_KEY, config.CONSUMER_SECRET, username, password)
        if len(user_credentials) == 2:
            wf.save_password(
                'readability_oauth_token', user_credentials[0])
            wf.save_password(
                'readability_oauth_token_secret', user_credentials[1])
            return 'Workflow authorized.'
    except:
        pass

    return 'Authorization failed.'


if __name__ == '__main__':
    wf = Workflow()
    sys.exit(wf.run(execute))
Exemplo n.º 46
0
        for name in names:
            wf.add_item("Post %s" % name, "via %s" % device,
                        arg=name,
                        icon=ICON_FILE_NAME,
                        valid=True)
    wf.send_feedback()

def main_post(wf):
    with open(IRKIT_CONFIG_JSON_PATH, 'r') as f:
        config = json.loads(f.read())
        client = Client(config)
        name = wf.args[1]
        client.post_signal(name)
        wf.send_feedback()

def config_not_found(wf):
    wf.add_item(u'%s is not found' % CONFIG_FILE_NAME, icon=ICON_ERROR)
    wf.send_feedback()

if __name__ == '__main__':
    wf = Workflow(update_settings={
        'github_slug': 'giginet/alfred-irkit-workflow',
        'version': open(os.path.join(os.path.dirname(__file__), 'version')).read(),
    })
    args = wf.args
    if not os.path.exists(IRKIT_CONFIG_JSON_PATH):
        sys.exit(wf.run(config_not_found))
    if len(args) >= 2 and args[0] == '--post':
        sys.exit(wf.run(main_post))
    sys.exit(wf.run(main_search))
Exemplo n.º 47
0
def add_action(wf, unit_from, unit_to, rate):
    try:
        rate = float(rate)
        unit_from = unit_from.upper()
        unit_to   = unit_to.upper()

        table = wf.stored_data(TABLENAME)
        table[(unit_from, unit_to)] = rate
        table[(unit_to, unit_from)] = 1 / rate
        wf.store_data(TABLENAME, table)

        print('1%s = %s%s' % (unit_from, number_to_string(rate), unit_to), file=sys.stdout)
        print('1%s = %s%s' % (unit_to, number_to_string(1/rate), unit_from), file=sys.stdout)
        sys.stdout.flush()
    except ValueError:
        pass


def main(wf):
    query0 = wf.args[0]

    if 'add' in query0:
        [_, unit_from, unit_to, rate] = query0.split(',')
        add_action(wf, unit_from, unit_to, rate)


if __name__ == '__main__':
    wf = Workflow()
    exited = wf.run(main)
    sys.exit(exited)
Exemplo n.º 48
0
    parts = urlparse.urlsplit(clipboard)
    if not parts.scheme or not parts.netloc:
        return None
    return {'url': clipboard, 'title': None}


def add_link(item, tags):
    if item:
        try:
            return POCKET.add(url=item['url'],
                              title=item['title'],
                              tags=','.join(tags))[0]
        except InvalidQueryException:
            pass
    return None


def add_and_archive_link(link, tags):
    result = add_link(link, tags)
    if (not result or 'status' not in result or 'item' not in result
            or 'item_id' not in result['item']):
        WF.logger.debug(result)
        return False

    POCKET.archive(result['item']['item_id'], wait=False)
    return True


if __name__ == '__main__':
    WF.run(main)
Exemplo n.º 49
0
            if len(actions) > 0:
                for action in actions:
                    wf.add_item(action['name'], action['description'],
                                uid=action['name'],
                                autocomplete=action['autocomplete'],
                                arg=action['arg'],
                                valid=action['valid'],
                                icon=helpers.get_icon(wf, 'chevron-right'))
            else:
                wf.add_item('No action found for "%s"' % query,
                            autocomplete='',
                            icon=helpers.get_icon(wf, 'info'))

        if len(wf._items) == 0:
            query_name = query[query.find(' ') + 1:]
            wf.add_item('No formula found for "%s"' % query_name,
                        autocomplete='%s ' % query[:query.find(' ')],
                        icon=helpers.get_icon(wf, 'info'))

    wf.send_feedback()

    # refresh cache
    cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')]
    run_in_background('brew_refresh', cmd)


if __name__ == '__main__':
    wf = Workflow(update_settings={'github_slug': GITHUB_SLUG})
    sys.exit(wf.run(main))
Exemplo n.º 50
0
def main(wf):
    url = 'http://127.0.0.1:50761/api/list'
    r = web.get(url)

    # throw an error if request failed
    # Workflow will catch this and show it to the user
    r.raise_for_status()

    # Parse the JSON returned by pinboard and extract the posts
    result = r.json()
    items = result['data']

    # Loop through the returned posts and add an item for each to
    # the list of results for Alfred
    for item in items:
        on = item.get('on', False)
        wf.add_item(title=item['title'],
                    subtitle=get_subtitle(item),
                    arg=item['id'],
                    valid=True,
                    icon='on.png' if on else 'off.png')

    # Send the results to Alfred as XML
    wf.send_feedback()


if __name__ == '__main__':
    my_wf = Workflow()
    sys.exit(my_wf.run(main))