def get_path(_type): """Read paths.json file from non-volatile storage""" wf = Workflow() with open(wf.datafile("paths.json"), "r") as f: _paths = json.load(f) f.close() return to_unicode(_paths[_type])
def get_path(_type): """Read paths.json file from non-volatile storage""" wf = Workflow() with open(wf.datafile("paths.json"), 'r') as f: _paths = json.load(f) f.close() return to_unicode(_paths[_type])
def check_cache(): """Does the cache need to be updated?""" wf = Workflow() update = False spot = None ### Step One: Check if cloned .sqlite database is up-to-date with Zotero database zotero_mod = os.stat(get_path("database_path"))[8] clone_mod = os.stat(wf.datafile("zotquery.sqlite"))[8] if zotero_mod > clone_mod: update = True spot = "Clone" # Step Two: Check if JSON cache is up-to-date with the cloned database cache_mod = os.stat(wf.datafile("zotero_db.json"))[8] if (cache_mod - clone_mod) > 10: update = True spot = "JSON" return [update, spot]
def check_cache(): """Does the cache need to be updated?""" wf = Workflow() update = False spot = None ### Step One: Check if cloned .sqlite database is up-to-date with Zotero database zotero_mod = os.stat(get_path('database_path'))[8] clone_mod = os.stat(wf.datafile('zotquery.sqlite'))[8] if zotero_mod > clone_mod: update = True spot = "Clone" # Step Two: Check if JSON cache is up-to-date with the cloned database cache_mod = os.stat(wf.datafile('zotero_db.json'))[8] if (cache_mod - clone_mod) > 10: update = True spot = u"JSON" return [update, spot]
def test_delete_data(info2): """Magic: delete data""" with WorkflowMock(['script', 'workflow:deldata']): wf = Workflow() testpath = wf.datafile('file.test') with open(testpath, 'wb') as fp: fp.write('test!') assert os.path.exists(testpath) # Process magic arguments wf.args assert not os.path.exists(testpath)
def test_reset(info2): """Magic: reset""" with WorkflowMock(['script', 'workflow:reset']): wf = Workflow() wf.settings['key'] = 'value' datatest = wf.datafile('data.test') cachetest = wf.cachefile('cache.test') settings_path = wf.datafile('settings.json') for p in (datatest, cachetest): with open(p, 'wb') as file_obj: file_obj.write('test!') for p in (datatest, cachetest, settings_path): assert os.path.exists(p) # Process magic arguments wf.args for p in (datatest, cachetest, settings_path): assert not os.path.exists(p)
def test_reset(infopl): """Magic: reset""" with WorkflowMock(["script", "workflow:reset"]): wf = Workflow() wf.settings["key"] = "value" datatest = wf.datafile("data.test") cachetest = wf.cachefile("cache.test") settings_path = wf.datafile("settings.json") for p in (datatest, cachetest): with open(p, "w") as file_obj: file_obj.write("test!") for p in (datatest, cachetest, settings_path): assert os.path.exists(p) # Process magic arguments wf.args for p in (datatest, cachetest, settings_path): assert not os.path.exists(p) wf.reset()
def test_delete_data(infopl): """Magic: delete data""" with WorkflowMock(["script", "workflow:deldata"]): wf = Workflow() testpath = wf.datafile("file.test") with open(testpath, "w") as fp: fp.write("test!") assert os.path.exists(testpath) # Process magic arguments wf.args assert not os.path.exists(testpath) wf.reset()
reload(sys) sys.setdefaultencoding('utf-8') wf = Workflow() ''' 全局变量 ''' dt_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36', 'Accept-Encoding': 'gzip, deflate, br', 'Referer': 'https://duotai.love/login' } dt_file_name = wf.datafile('cookie.txt') ''' 转换容量单位,size 为B 基本单位 ''' def convert_size(size): flag = False if (size == 0): return '0B' # 处理负数的情况 if (size < 0): size = size * -1 flag = True size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") i = int(math.floor(math.log(size, 1024)))
# encoding: utf-8 import glob import hashlib import os import sys from workflow import Workflow, ICON_SYNC, ICON_WARNING, web workflow = Workflow() repo_dir = workflow.datafile("gitignore") def main(wf): if len(sys.argv) < 2: print "No templates were selected, so nothing was built." return if not os.path.isdir(repo_dir): print "Please run gitignore-update first to download the templates." templates = sys.argv[1:] tmp_file_name = hashlib.md5(" ".join(templates)).hexdigest() tmp_file_path = "/tmp/" + tmp_file_name if os.path.isfile(tmp_file_path): os.system("open %s" % tmp_file_path) return formatted_templates = set()
# encoding: utf-8 import os import re import sys from sh import git, pwd, sh from workflow import Workflow, ICON_SYNC, web workflow = Workflow() repo_dir = workflow.datafile("gitignore") def main(wf): """ Run script. This script checks whether or not the gitignore repository has already been cloned, and either clones it or pulls the latest changes from the remote repository. In both cases, the list of templates is stored in the persistent storage provided by the Workflow's data API. """ if not os.path.isdir(repo_dir): clone_repo() else: pull_repo() update_templates() print "Templates have been successfully updated."