def test_install_update(httpserver, info): """Update is installed""" # Clear any cached data wf = Workflow() wf.reset() # Assert cache was cleared assert wf.cached_data('__workflow_update_status') is None with fakeresponse(httpserver, DATA_JSON, HTTP_HEADERS_JSON): # No update for latest release assert update.install_update(TEST_REPO_SLUG, RELEASE_LATEST) is False # Check for updates assert update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True # Verify new workflow is downloaded and installed c = WorkflowMock() with c: assert update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True assert c.cmd[0] == 'open' assert c.cmd[1].endswith('.alfredworkflow') assert wf.cached_data( '__workflow_update_status')['available'] is False
def workflow(): global _workflow, _update_settings if _workflow is None: version = '__VERSION__' _workflow = Workflow( capture_args=False, update_settings={ 'github_slug': 'idpaterson/alfred-wunderlist-workflow', 'version': version, # Check for updates daily # TODO: check less frequently as the workflow becomes more # stable 'frequency': 1, # Always download pre-release updates if a prerelease is # currently installed 'prerelease': '-' in version } ) # Avoid default logger output configuration _workflow.logger = logging.getLogger('workflow') return _workflow
class TestDownload(unittest.TestCase): def setUp(self): self.wf = Workflow() self.wf.reset() download.log = self.wf.logger def tearDown(self): self.wf.reset() def test_download(self): download_link = 'http://filepi.com/i/RSpHA1T' ebook_id = '1529159300' ebook_title = 'Expert Oracle and Java Security' args = 'program --download-from-itebooks {} {} {}'.format(download_link, ebook_id, ebook_title).split() with patch.object(sys, 'argv', args): ret = download.main(self.wf) download_folder = os.path.expanduser(itebooks.default_download_folder) file_name = 'Expert Oracle and Java Security.pdf' file_path = os.path.join(download_folder, file_name) self.assertEqual(ret, 0) self.assertTrue(os.path.exists(file_path)) # Removes the downloaded file try: os.remove(file_path) except OSError: pass
def test_no_auto_update(self): """Update: no update check""" # Make sure there's no cached update data wf = Workflow() wf.reset() self.assertTrue(self.wf.cached_data('__workflow_update_status') is None) wf = Workflow() c = WorkflowMock(['script', 'workflow:noautoupdate']) with c: wf.args self.assertFalse(wf.settings.get('__workflow_autoupdate')) self.assertTrue(self.wf.cached_data('__workflow_update_status') is None) c = WorkflowMock() with c: wf = Workflow(update_settings={'github_slug': TEST_REPO_SLUG, 'version': RELEASE_CURRENT}) self.assertTrue(self.wf.cached_data('__workflow_update_status') is None)
def test_check_update(httpserver, infopl, alfred4): """Check update""" key = '__workflow_latest_version' tests = [ # data, alfred version, pre, expected value (RELEASES_JSON, None, False, True), (RELEASES_JSON, '3', False, True), (RELEASES_4PLUS_JSON, None, False, True), (RELEASES_4PLUS_JSON, '3', False, False), (RELEASES_4PLUS_JSON, '3', True, False), ] for data, alfred, pre, wanted in tests: wf = Workflow() wf.reset() with fakeresponse(httpserver, data, HTTP_HEADERS_JSON): v = update.check_update(TEST_REPO, RELEASE_CURRENT, pre, alfred) assert v == wanted, "unexpected update status" status = wf.cached_data(key) assert status is not None assert status['available'] == wanted assert wf.update_available == wanted if wanted: # other data may not be set if available is False v = update.check_update(TEST_REPO, status['version'], pre, alfred) assert v is False
def slack_list(keys): wf = Workflow() slack_search = [] for key in keys: api_key = str(key) slack_auth = web.get('https://slack.com/api/auth.test?token=' + api_key + '&pretty=1').json() if slack_auth['ok'] is False: wf.add_item(title='Authentication failed. Check your API key', valid=False) wf.send_feedback() break else: slack_channels = web.get('https://slack.com/api/channels.list?token=' + api_key + '&exclude_archived=1&pretty=1').json() slack_users = web.get('https://slack.com/api/users.list?token=' + api_key + '&pretty=1').json() slack_groups = web.get('https://slack.com/api/groups.list?token=' + api_key + '&pretty=1').json() for channels in slack_channels['channels']: slack_search.append({'name': channels['name'], 'team': slack_auth['team'],'team_id': slack_auth['team_id'], 'id': channels['id'], 'type': 'channel', 'api_key': api_key}) for users in slack_users['members']: slack_search.append({'name': users['name'], 'team': slack_auth['team'],'team_id': slack_auth['team_id'], 'id': users['id'], 'type': 'user', 'api_key': api_key}) slack_search.append({'name': users['profile']['real_name'], 'team': slack_auth['team'],'team_id': slack_auth['team_id'], 'id': users['id'], 'type': 'user', 'api_key': api_key}) for groups in slack_groups['groups']: if 'name' in groups: slack_search.append({'name': groups['name'], 'team': slack_auth['team'],'team_id': slack_auth['team_id'], 'id': groups['id'], 'type': 'group', 'api_key': api_key}) return slack_search
def test_install_update(self): """Update: installs update""" # Make sure there's no cached update data wf = Workflow() wf.reset() # Verify that there's no update available self.assertIsNone(wf.cached_data('__workflow_update_status')) self.assertFalse(update.install_update(TEST_REPO_SLUG, RELEASE_LATEST)) # Get new update data self.assertTrue(update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT)) # Verify new workflow is downloaded and installed c = WorkflowMock() with c: self.assertTrue(update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT)) self.assertEquals(c.cmd[0], 'open') self.assertTrue(c.cmd[1].endswith('.alfredworkflow')) self.assertFalse(wf.cached_data( '__workflow_update_status')['available'])
def get_path(_type): """Read paths.json file from non-volatile storage""" wf = Workflow() with open(wf.datafile("paths.json"), "r") as f: _paths = json.load(f) f.close() return to_unicode(_paths[_type])
def test_openhelp_no_url(infopl): """Magic: no help URL""" with WorkflowMock(['script', 'workflow:help']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_openlog(infopl): """Magic: open logfile""" with WorkflowMock(['script', 'workflow:openlog']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', wf.logfile] wf.reset()
def test_open_term(infopl): """Magic: open Terminal""" with WorkflowMock(['script', 'workflow:openterm']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', '-a', 'Terminal', wf.workflowdir] wf.reset()
def test_workflowdir(infopl): """Magic: open workflowdir""" with WorkflowMock(['script', 'workflow:openworkflow']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', wf.workflowdir] wf.reset()
def test_get_year_day_of_year(self): w = Workflow('test', 7) date = datetime.date(1989, 12, 9) self.assertEqual('1989-343', w._get_year_day_of_year(date)) date = datetime.date(1989, 2, 7) self.assertEqual('1989-038', w._get_year_day_of_year(date))
def test_get_next_period_overlap_year(self): w = Workflow('test', 7) date = datetime.date(2001, 12, 26) next_period = w._get_next_period(date) self.assertEqual(Period((2001, 12, 27), (2001, 12, 31)), next_period) next_period = w._get_next_period(next_period.get_end_date()) self.assertEqual(Period((2002, 1, 1), (2002, 1, 7)), next_period)
def test_get_next_period_cut_at_month_end(self): w = Workflow('test', 10) date = datetime.date(2001, 9, 22) next_period = w._get_next_period(date) self.assertEqual(Period((2001, 9, 23), (2001, 9, 30)), next_period) next_period = w._get_next_period(next_period.get_end_date()) self.assertEqual(Period((2001, 10, 1), (2001, 10, 10)), next_period)
def test_nmax(self): print 'test_nmax' result = False wf = Workflow() wf.add_workflow(self.tutorial_dir_1, None) print get_nmax(wf, [], [], datetime(2000,1,1)) result = True self.assertTrue(result)
def test_openhelp(infopl): """Magic: open help URL""" url = 'http://www.deanishe.net/alfred-workflow/' with WorkflowMock(['script', 'workflow:help']) as c: wf = Workflow(help_url=url) # Process magic arguments wf.args assert c.cmd == ['open', url] wf.reset()
def test_list_magic(infopl): """Magic: list magic""" # TODO: Verify output somehow with WorkflowMock(['script', 'workflow:magic']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def get_formats(): """Return combination of saved custom formats and defaults for locale""" wf = Workflow() if not 'date_formats' in wf.settings: wf.settings['date_formats'] = get_default_formats() return wf.settings.get('date_formats')
def execute(self): global LOG wf = Workflow() self.wf = wf LOG = wf.logger self.minus = get_default(wf.settings, 'minus', self.minus_default) self.plus = get_default(wf.settings, 'plus', self.plus_default) sys.exit(wf.run(self.main))
def test_workflow(self): print 'test_workflow' result = False w = Workflow() w.add_workflow(self.tutorial_dir_1, None) for j in w.jobs: print j.id print [j.rank for j in w.ranked_jobs]
def test_get_inp_preconditions_for_one_sensor(self): w = Workflow('test', 8) w.add_primary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16)) preconditions = list() preconditions = w._add_inp_preconditions(preconditions) self.assertEqual(232, len(preconditions)) self.assertEqual('ingest-avhrr.n10-1986-321-1986-328', preconditions[0]) self.assertEqual('ingest-avhrr.n10-1989-048-1989-055', preconditions[108]) self.assertEqual('ingest-avhrr.n10-1991-252-1991-259', preconditions[231])
def test_version_magic(infopl2): """Magic: version magic (Alfred 2)""" vstr = '1.9.7' # Version from version file with env(alfred_workflow_version=None): # Versioned with WorkflowMock(['script', 'workflow:version']) as c: with VersionFile(vstr): wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Unversioned with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Version from environment variable with env(alfred_workflow_version=vstr): with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_version_magic(infopl): """Magic: version magic""" # TODO: Verify output somehow vstr = '1.9.7' # Version from file(s) with env(alfred_workflow_version=None): # Version file with WorkflowMock(['script', 'workflow:version']) as c: with VersionFile(vstr): wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # info.plist with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Environment variable with env(alfred_workflow_version=vstr): with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_ingest_avhrr(self): w = Workflow('test', 11, 'config/dir') w.add_primary_sensor('avhrr-n12', '1995-06-01', '1996-06-05') w.run_ingestion(list([('localhost', 5)]), True, self.logdir) with open('test.status', 'r') as status: self.assertEqual('37 created, 0 running, 0 backlog, 37 processed, 0 failed\n', status.readline()) with open('test.report', 'r') as report: self.assertEqual(37, len(report.readlines()))
def test_ingest_avhrr_n07(self): w = Workflow('test', 7,'/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n07', '1981-09-01', '1985-01-30', 'v01.2') w.run_ingestion(list([('localhost', 24)]), True, self.logdir) with open('test.status', 'r') as status: self.assertEqual('203 created, 0 running, 0 backlog, 203 processed, 0 failed\n', status.readline()) with open('test.report', 'r') as report: self.assertEqual(203, len(report.readlines()))
def test_get_inp_preconditions_for_two_sensors(self): w = Workflow('test', 14) w.add_primary_sensor('avhrr.n11', (1988, 12, 18), (1990, 8, 15)) w.add_primary_sensor('avhrr.n12', (1985, 10, 16), (1988, 6, 23)) preconditions = list() preconditions = w._add_inp_preconditions(preconditions) self.assertEqual(153, len(preconditions)) self.assertEqual('ingest-avhrr.n12-1985-289-1985-302', preconditions[0]) self.assertEqual('ingest-avhrr.n12-1986-210-1986-212', preconditions[27]) self.assertEqual('ingest-avhrr.n12-1987-166-1987-179', preconditions[58])
def slack_keys(): wf_password = Workflow() try: slack_keys = wf_password.get_password("slack_api_key") except PasswordNotFound: wf.add_item(title="No API key set. Please run slt", valid=False) wf.send_feedback() return 0 keys = slack_keys.split(",") return keys
def test_get_data_period(self): w = Workflow('test') w.add_primary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16)) w.add_primary_sensor('avhrr.n11', (1988, 11, 8), (1994, 12, 31)) w.add_primary_sensor('avhrr.n12', (1991, 9, 16), (1998, 12, 14)) w.add_secondary_sensor('avhrr.n10', (1986, 11, 17), (1991, 9, 16)) w.add_secondary_sensor('avhrr.n11', (1988, 11, 8), (1994, 12, 31)) w.add_secondary_sensor('avhrr.n12', (1991, 9, 16), (1998, 12, 14)) data_period = w._get_data_period() self.assertEqual(datetime.date(1988, 11, 8), data_period.get_start_date()) self.assertEqual(datetime.date(1994, 12, 31), data_period.get_end_date())
def test_delete_cache(info2): """Magic: delete cache""" with WorkflowMock(['script', 'workflow:delcache']): wf = Workflow() testpath = wf.cachefile('file.test') with open(testpath, 'wb') as fp: fp.write('test!') assert os.path.exists(testpath) # Process magic arguments wf.args assert not os.path.exists(testpath)
# -*- coding: utf-8 -*- from __future__ import unicode_literals import sys from workflow import Workflow def main(wf): """ .. note:: 注意, 所有的第三方库的导入都要放在 main 函数内, 因为直到创建 Workflow 实例时, lib 目录才会被添加到系统路径中去. 在这之前所有的第三方库都无法被找到. """ from afwf_fts_anything.handlers import main wf = main(wf) wf.send_feedback() if __name__ == "__main__": wf = Workflow(libraries=[ "lib", ]) logger = wf.logger sys.exit(wf.run(main))
def decode(base64_text): wf = Workflow() text = base64.decodestring(base64_text) wf.add_item_arg(title=text, subtitle=base64_text, arg=text) wf.print_xml()
'poweroff', 'pause', 'reset', 'suspend', 'savestate', 'acpipowerbutton', 'acpisleepbutton', 'screenshotpng' ]: if command in ['poweroff', 'reset', 'suspend']: wf.clear_cache() os.popen('VBoxManage controlvm %s' % query) elif len(query.split()) > 1: # Parallels command = query.split()[0] if command in ['stop', 'reset', 'suspend']: wf.clear_cache() os.popen('prlctl %s' % query) if command in ['start', 'resume']: os.popen( """ osascript -e 'activate application "Parallels Desktop"' """ ) return # wait and then quit Parallels if possible time.sleep(2) if len(os.popen('prlctl list --no-header').readlines()) == 0: os.popen( """ osascript -e 'tell application "Parallels Desktop" to quit' """ ) if __name__ == '__main__': wf = Workflow() sys.exit(wf.run(execute))
if exists menu item user_connection of menu 1 then click menu item user_connection of menu 1 display notification "Connecting AirPods..." with title "Toggle AirPods" else if exists menu item user_disconnection of menu 1 then click menu item user_disconnection of menu 1 display notification "Disconnecting AirPods..." with title "Toggle AirPods" else click btMenu -- Close main BT drop down if Connect wasn't present return "Airpods were not found, try manually pairing them again!" end if end tell end tell end tell end tell """ workflow = Workflow() def main(wf): if len(sys.argv) < 2: print "No templates selected, nothing built." return with open('connection.scpt', 'w+') as f: device_name = " ".join(sys.argv[1:]) connection_script = connect_script_template.replace( 'your_device_name', device_name) f.write(connection_script) print "Successfully setup!(device name is {device_name})".format(
def create_plot_window(selected_value): plot_window = Workflow(self.root, self.param_dict) plot_window.plot()
from workflow import Workflow w = Workflow('ingest_avhrr_n08', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n08', '1983-05-04', '1985-10-03', 'v01.2') w.run_ingestion(hosts=[('localhost', 24)])
# -*- coding: utf-8 -*- import sys from workflow import Workflow, web def main(wf): from bs4 import BeautifulSoup as Soup if len(wf.args): query = wf.args[0] else: query = None r = web.get(query) soup = Soup(r.text) donwload_links = soup.find_all("div", "bo_v_file") magnet = donwload_links[-1] href = magnet.find('a')['href'] simple_href = href.split('&')[0] print str(simple_href) if __name__ == u"__main__": wf = Workflow(libraries=['./lib']) sys.exit(wf.run(main))
log.debug("Connecting to Tesla") tesla.connect(password) log.debug("Requesting access token") access_token = tesla.get_access_token() log.debug("storing access token in keyring") keyring.set_password(tesla.account_key, username, access_token) log.debug("storing username in workflow data directory") wf.store_data('username', username) def main(wf): # type: (Workflow) -> int log.debug("Set Credentials Script Called! args=%s" % wf.args) try: store_credentials() notify("Tesla Credentials", "Tesla credentials have been set successfully") except Exception as e: log.error(e) return 0 if __name__ == u"__main__": wf = Workflow(libraries=['./lib']) wf.set_last_version(version.version) log = wf.logger sys.exit(wf.run(main))
from workflow import Workflow w = Workflow('ingest_atsr_e2', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('atsr-e2', '1995-06-01', '2008-01-31', 'v3') w.run_ingestion(hosts=[('localhost', 24)])
from period import Period from workflow import Workflow period = Period('2002-06-01', '2011-10-07') w = Workflow('post_process_sst_xbt', 7, '/group_workspaces/cems2/esacci_sst/mms_new/config', period) w.set_input_dir( '/group_workspaces/cems2/esacci_sst/mms_new/mmd/mmd06c/xbt-sst_amsre-aq') w.set_usecase_config('usecase-06-pp.xml') w.run_post_processing(hosts=[('localhost', 24)])
from workflow import Workflow w = Workflow('usecase22_amsub_n16_ssmt2_f15', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('amsub-n16', '2000-09-21', '2008-05-28', 'v1.0') w.add_secondary_sensor('ssmt2-f15', '2000-09-21', '2008-05-28', 'v01') w.set_usecase_config('usecase-22.xml') w.run_matchup(hosts=[('localhost', 96)])
result = get_project_page(api_key, url, nextpage, result) return result def main(wf): try: # Get API key from Keychain api_key = wf.get_password('gitlab_api_key') api_url = wf.settings.get('api_url', 'https://gitlab.com/api/v4/projects') # Retrieve projects from cache if available and no more than 600 # seconds old def wrapper(): return get_projects(api_key, api_url) projects = wf.cached_data('projects', wrapper, max_age=3600) # Record our progress in the log file log.debug('{} gitlab projects cached'.format(len(projects))) except PasswordNotFound: # API key has not yet been set # Nothing we can do about this, so just log it wf.logger.error('No API key saved') if __name__ == u"__main__": wf = Workflow() log = wf.logger wf.run(main)
stdin=PIPE, stdout=PIPE, stderr=STDOUT) output = p.communicate( input=u"""\n""".join(remarks).encode('utf-8'))[0] wf.logger.debug('remarks search result: ' + output) if len(output): results = output.split('\n')[:-1] for i in range(len(results)): idx = int(results[i].split(':')[0]) - 1 wf.add_item(remarks[idx], subtitle=cmds[idx], arg=cmds[idx], valid=True, icon="icon.png") wf.send_feedback() if not len(output): wf.add_item(u'打开配置文件', subtitle=u'配置文件默认保存在安装目录下', arg='open', valid=True, icon='icon.png') wf.send_feedback() return 0 if __name__ == '__main__': wf = Workflow(update_settings={'github_slug': GITHUB_SLUG}) sys.exit(wf.run(main))
def get_auth_url(): flow = client.DropboxOAuth2FlowNoRedirect( config.APP_KEY, config.APP_SECRET) return flow.start() def get_title(account): normal_use = account['quota_info']['normal'] shared_use = account['quota_info']['shared'] total_quota = account['quota_info']['quota'] total_used = round(100.0 * (normal_use + shared_use) / total_quota, 2) return '%s (%s%% of %s used)' % ( account['display_name'], total_used, sizeof(account['quota_info']['quota'])) def sizeof(num): for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return "%3.1f %s" % (num, x) num /= 1024.0 if __name__ == '__main__': wf = Workflow( update_settings={'github_slug': 'fniephaus/alfred-dropbox'}, help_url='https://github.com/fniephaus/alfred-dropbox/issues' ) log = wf.logger sys.exit(wf.run(main))
import httplib2 from time import sleep, strftime from email.utils import parsedate from HTMLParser import HTMLParser from googleapiclient.errors import HttpError from googleapiclient.discovery import build from googleapiclient.http import BatchHttpRequest from oauth2client.client import flow_from_clientsecrets, OAuth2Credentials from oauth2client.tools import run_flow from workflow import Workflow, PasswordNotFound import config WF = Workflow() HP = HTMLParser() EMAIL_LIST = dict((x, []) for x in config.SYSTEM_LABELS.keys()) FIELDS = ('messages/id,messages/threadId,messages/labelIds,' + 'messages/snippet,messages/payload/headers') class PseudoStorage(): def put(self, value): pass def list_threads(label, request_id, response, exception): if exception is None: thread = { 'Date': None,
raise KeyError('Bundle ID unknown : {}'.format(bid)) def _split_query(self, query): if not query or not DELIMITER in query: return None, query elif query.endswith(DELIMITER): # trailing space deleted raise GoBack(query.rstrip(DELIMITER).strip()) return [s.strip() for s in query.split(DELIMITER)] def _update(self, force=False): """Update cached data""" log.debug('Updating workflow lists...') args = ['/usr/bin/python', self.wf.workflowfile('update_workflows.py')] if force: args.append('--force-update') log.debug('update command : {}'.format(args)) retcode = run_in_background('update', args) if retcode: log.debug('Update failed with code {}'.format(retcode)) print('Update failed') return 1 print('Updating workflow list…'.encode('utf-8')) return 0 if __name__ == '__main__': wf = Workflow() log = wf.logger pk = PackalWorkflow() wf.run(pk.run)
books, key=lambda book: book.title + book.author, match_on=MATCH_ALL ^ MATCH_ALLCHARS, min_score=30 ) books.sort(key=lambda book: book.last_accessed, reverse=True) for b in books: wf.add_item(type='file', title=b.title, valid=True, subtitle=b.author if b.path is not None else 'Please download file in books app first' ' to open in Alfred Books', arg=b.path, icon=b.path, icontype='fileicon', quicklookurl=b.path, largetext=b.title + ', by ' + b.author + '\nIs new: ' + b.is_new + '\nGenre: ' + b.genre + '\nCompleted: ' + b.read_pct + '\nDescription:\n' + b.book_desc) wf.send_feedback() if __name__ == "__main__": wf = Workflow(help_url='https://github.com/codycodes/alfred-books/issues', update_settings={'github_slug': 'codycodes/alfred-books'}) log = wf.logger sys.exit(wf.run(main))
# day = d['daily_forecast'][n] # 把API获取的天气、温度、风力等信息拼接成 alfred条目的标题、副标题 title = '{symbol}@{exchange} {name}'.format(symbol=r['symbol'], name=r['name'], exchange=r['exchange']) # 代码 名称 涨跌幅 最新价 昨收 今开 最高 最低 买入 卖出 动态结算 昨日结算 买量 卖量 成交量 持仓量 subtitle = '涨跌幅{changepercent}% 最新价{trade} 昨收{preclose} ' \ '今开{open} 最高{high} 最低{low} 买入{ask} 卖出{bid} 动态结算{settlement} 昨日结算 {presettlement}' \ '买量{askvol1} 卖量{bidvol1} 成交量{volume} 持仓量{position}'.format( changepercent=r['changepercent'], trade=r['trade'], preclose=r['preclose'], open=r['open'], high=r['high'], low=r['low'], ask=r['ask'], bid=r['bid'], settlement=r['settlement'], presettlement=r['presettlement'], askvol1=r['askvol1'], bidvol1=r['bidvol1'], volume=r['volume'], position=r['position'] ) # print(title) # print(subtitle) # 向alfred添加条目,传标题、副标题、图片路径(图片直接用的和风天气提供的天气图,每个图片的命名对应天气状态码) # wf.add_item(title=title, subtitle=subtitle,icon='images/{code}.png'.format(code=day['cond']['code_d'])) wf.add_item(title=title, subtitle=subtitle) wf.send_feedback() if __name__ == '__main__': wf = Workflow() sys.exit(wf.run(main))
'set_url': interface.set_jenkins_url, 'failing': interface.get_failed_jobs, 'building': interface.get_building_jobs, 'all': interface.get_all_jobs } try: jobs = options[command](query) if not query: wf.add_item("Open Jenkins", arg=interface.get_jenkins_url(), valid=True) for job in jobs: wf.add_item(job.name, subtitle=job.description, arg=job.url, valid=True, icon=job.image) except NoJobsFound: wf.logger.debug("Could not find any jobs for instance: %s", wf.settings['jenkins_url']) wf.add_item("Error: No jobs found") wf.send_feedback() if __name__ == '__main__': # pragma: no cover print Workflow().run(main)
# Copyright © 2014 [email protected] # # MIT Licence. See http://opensource.org/licenses/MIT # # Created on 2014-08-03 # """Simple tests for Duden parser.""" from __future__ import print_function, unicode_literals # from pprint import pprint from workflow import Workflow import duden wf = Workflow() duden.log = wf.logger terms = [ 'Untergang', 'Lageru', 'Eröffnung', 'skandieren', 'Pumps', 'in puncto', ] for t in terms: results = duden.lookup(t) print('{} results for `{}`'.format(len(results), t))
from workflow import Workflow w = Workflow('usecase02_avhrr_n18_n15', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n18', '2005-05-20', '2010-12-31', 'v01.2') w.add_secondary_sensor('avhrr-n15', '2005-05-20', '2010-12-31', 'v01.2') w.set_usecase_config('usecase-02.xml') w.run_matchup(hosts=[('localhost', 24)])
return 100 * ratio def search(text, limit=10): return list( sorted( emojis, key=lambda item: match(item[0], text), reverse=True, ))[:limit] def main(workflow): text = workflow.args[0].lower() results = search(text, limit=20) for name, code in results: workflow.add_item(title=u'{} {}'.format(code, name), arg=code, copytext=code, icon='icons/none.png', valid=True) if __name__ == u"__main__": wf = Workflow() wf.run(main) wf.send_feedback() sys.exit()
from workflow import Workflow w = Workflow('usecase02_avhrr_n17_iasi_ma', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n17', '2007-05-29', '2010-12-31', 'v01.2') w.add_secondary_sensor('iasi-ma', '2007-05-29', '2010-12-31', 'latest') w.set_usecase_config('usecase-03.xml') w.run_matchup(hosts=[('localhost', 72)])
phonetic = extra_args.get(phonetic_type) if extra_args.get( phonetic_type) else '' username = sys.argv[sys.argv.index('-username') + 1] if '-username' in sys.argv else None password = sys.argv[sys.argv.index('-password') + 1] if '-password' in sys.argv else None filepath = sys.argv[sys.argv.index('-filepath') + 1] if '-filepath' in sys.argv else os.path.join( os.environ['HOME'], 'Documents/Alfred-youdao-wordbook.xml') textpath = sys.argv[sys.argv.index('-textpath') + 1] if '-textpath' in sys.argv else os.path.join( os.environ['HOME'], 'Documents/youdao-wordbook.md') m2 = hashlib.md5() m2.update(password) password_md5 = m2.hexdigest() item = { "word": params[0], "trans": params[1], "phonetic": phonetic, "tags": "Alfred", "progress": "-1", } saver = SaveWord(username, password_md5, filepath, textpath, item) wf = Workflow() sys.exit(wf.run(saver.save))
def encode(text): wf = Workflow() base64_text = base64.encodestring(text).replace('\n', '') wf.add_item_arg(title=base64_text, subtitle=text, arg=base64_text) wf.print_xml()
from workflow import Workflow from workflow import web if __name__ == '__main__': wf = Workflow() if not wf.cached_data_fresh('hackernews_top_10', 60): items = web.get( 'https://hacker-news.firebaseio.com/v0/topstories.json').json() top_stories = [] i = 1 for item_id in items[:50]: item = web.get( 'https://hacker-news.firebaseio.com/v0/item/%s.json' % item_id).json() top_stories.append((item_id, item)) if i % 10 == 0: wf.cache_data('hackernews_top_%s0' % (i / 10), top_stories) top_stories = [] i += 1
# This file is for displaying the user's # currently selected time format from __future__ import unicode_literals, print_function import sys from versioning import update_settings from workflow import Workflow from date_format_mappings import DEFAULT_WORKFLOW_SETTINGS def main(wf): update_settings(wf) print("Date format is {format}".format(format=wf.settings['date-format'])) if __name__ == '__main__': workflow = Workflow(default_settings=DEFAULT_WORKFLOW_SETTINGS) sys.exit(workflow.run(main))
with open(path, 'wb') as f: if total_length is None: # no content length header f.write(r.content) else: dl = 0 total_length = int(total_length) for chunk in r.iter_content(chunk_size=1024, decode_unicode=True): dl += len(chunk) percent = float(dl / total_length) if chunk: f.write(chunk) print 'Downloaded {0:.2f}%'.format(percent) f.flush() return path def main(wf): url = 'http://gen.lib.rus.ec/get?md5=A4DC00DEA85E05FE862A4F02357A843D' file_name = '/Users/smargheim/Downloads/knowledge_and_demonstration_aristotles_posterior_analytics.pdf' download_file(url, file_name) #cd_bar = ProgressBar(title="TEST", text='Downloading PDF...') #cd_bar.update(float(percent)) #cd_bar.finish() if __name__ == '__main__': WF = Workflow() sys.exit(WF.run(main))
from workflow import Workflow w = Workflow('usecase02_avhrr_n09_n08', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n09', '1985-02-27', '1985-10-03', 'v01.2') w.add_secondary_sensor('avhrr-n08', '1985-02-27', '1985-10-03', 'v01.2') w.set_usecase_config('usecase-02.xml') w.run_matchup(hosts=[('localhost', 24)])
parser = Parser(config.getPath()) options = Options(parser, workflow) tokens = query.strip().split(" ", 1) tokens = [i.strip() for i in tokens if i != ""] if len(tokens) < 2: sheetName = "" if len(tokens) == 0 else tokens[0] if sheetName == "--search": Options.hint("Globally searching for ...?", "In global mode", workflow) handler = options.showAvailable if sheetName not in parser.availableSheets( ) else options.list handler(sheetName) else: sheetName = tokens[0] searchTerm = tokens[1] if sheetName == "--search": options.search(None, searchTerm) else: options.search(sheetName, searchTerm) workflow.send_feedback() return None if __name__ == "__main__": workflow = Workflow() exit(workflow.run(main))