def main(): db.init() folder_lst = lzy.list_files("CSDN", False) i = 0 n = len(folder_lst) for fd in folder_lst: i += 1 name = fd.name _id = name[:-4] url = lzy.get_share_url(fd.id, False) ok = db.download_set_share_url(_id, url) if ok: print(f"修复成功[{i}/{n}]:{name} => {url}") else: print(f"修复失败[{i}/{n}]:{name} => {url}") file_list = lzy.list_files("CSDN", True) i = 0 n = len(file_list) for fi in file_list: i += 1 name = fi.name _id = name[:-4] url = lzy.get_share_url(fi.id, True) ok = db.download_set_share_url(_id, url) if ok: print(f"修复成功[{i}/{n}]:{name} => {url}") else: print(f"修复失败[{i}/{n}]:{name} => {url}")
def handle(self, *args, **options): db.init() jobs_to_exec = db.session.query( Job ).filter( Job.exec_time != None ).all() jobs_ids = [] for job in jobs_to_exec : jobs_ids.append( (job.job_id, job.exec_time) ) db.close() for job_id, exec_time in jobs_ids : delayed_exec( exec_time, add_task, ( job_id, ) )
def migrate(env, init=False): from core import db log.info('Migrate for %s', env.db_name) def clean_emails(): env.sql('DROP TABLE IF EXISTS emails') env.sql('DROP SEQUENCE IF EXISTS seq_emails_id') env.storage.rm('last_sync') env.db.commit() clean_emails() if init: db.init(env) env.username = env.username # reset db connection
def get_full(argv): from core import Env, db env = Env() parser, cmd = get_base(argv) cmd('sync')\ .arg('-t', '--target', default='fast', choices=sync.choices)\ .arg('-l', '--only', nargs='*', help='sync only these labels')\ .arg('-d', '--disabled', action='store_true')\ .arg('-u', '--username')\ .exe(lambda a: ( sync(Env(a.username), a.target, a.disabled, only=a.only)) ) cmd('parse')\ .arg('-u', '--username')\ .arg('-l', '--limit', type=int, default=1000)\ .arg('-o', '--offset', type=int, default=0)\ .arg('-w', '--where')\ .exe(lambda a: parse(Env(a.username), a.limit, a.offset, a.where)) cmd('thrids')\ .arg('-u', '--username')\ .arg('-c', '--clear', action='store_true')\ .exe(lambda a: thrids(Env(a.username), a.clear)) cmd('db-init')\ .arg('username')\ .arg('-r', '--reset', action='store_true')\ .arg('-p', '--password')\ .exe(lambda a: db.init(Env(a.username), a.password, a.reset)) cmd('migrate')\ .arg('-i', '--init', action='store_true')\ .exe(lambda a: migrate(env, a.init)) cmd('shell').exe(lambda a: shell(env)) cmd('run').exe(lambda a: run(env)) cmd('web', add_help=False).exe(lambda a: grun('web', ' '.join(a))) cmd('async', add_help=False).exe(lambda a: grun('async', ' '.join(a))) cmd('test', add_help=False).exe(lambda a: ( sh('py.test --ignore=node_modules --confcutdir=tests %s' % ' '.join(a)) )) cmd('npm', help='update nodejs packages')\ .exe(lambda a: npm()) cmd('static', help='generate front')\ .arg('-f', '--force', action='store_true')\ .arg('-c', '--clean', action='store_true')\ .exe(lambda a: front(env, a.force, a.clean)) cmd('touch').exe(lambda a: sh( './manage.py static &&' 'supervisorctl pid async web nginx | xargs kill -s HUP' )) return parser
def migrate(env, init=False, clean=False): from core import db log.info('Migrate for %s', env.db_name) if clean: env.sql('DROP TABLE IF EXISTS emails') env.sql('DROP SEQUENCE IF EXISTS seq_emails_id') env.storage.rm('last_sync') env.db.commit() if clean or init: db.init(env) env.username = env.username # reset db connection env.sql('ALTER TABLE emails DROP COLUMN delid') env.db.commit()
def handle(self, *args, **options): db.init() tasks.run_any() db.close()
from core import db from core import path class MyJSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return obj.strftime('ISODate(%Y-%m-%dT%H:%M:%S.%fZ)') else: return json.JSONEncoder.default(self, obj) def json_dumps(dic): return json.dumps(dic, ensure_ascii=False, indent=4, cls=MyJSONEncoder) db.init() _dic = [] for d in db.download.find({}): d.pop('_id') _dic.append(d) print(f'共 {len(_dic)} 条数据') _str = json_dumps(_dic) _path = path.frozen_path('kits/backup.db') print('开始备份...') f = open(path.frozen_path('kits/backup.db'), mode='w', encoding='utf8') f.write(_str) f.close() print(f'备份完成:{_path}')
def get_full(argv): from core import Env, db env = Env() parser, cmd = get_base(argv) cmd('sync')\ .arg('-t', '--target', default='fast', choices=sync.choices)\ .arg('-l', '--only', nargs='*', help='sync only these labels')\ .arg('-d', '--disabled', action='store_true')\ .arg('-u', '--username')\ .exe(lambda a: ( sync(Env(a.username), a.target, a.disabled, only=a.only)) ) cmd('parse')\ .arg('-u', '--username')\ .arg('-l', '--limit', type=int, default=1000)\ .arg('-o', '--offset', type=int, default=0)\ .arg('-w', '--where')\ .exe(lambda a: parse(Env(a.username), a.limit, a.offset, a.where)) cmd('thrids')\ .arg('-u', '--username')\ .arg('-c', '--clear', action='store_true')\ .exe(lambda a: thrids(Env(a.username), a.clear)) cmd('db-init')\ .arg('username')\ .arg('-r', '--reset', action='store_true')\ .arg('-p', '--password')\ .exe(lambda a: db.init(Env(a.username), a.password, a.reset)) cmd('migrate')\ .arg('-u', '--username')\ .arg('-i', '--init', action='store_true')\ .arg('-c', '--clean', action='store_true')\ .exe(lambda a: migrate(Env(a.username), a.init, a.clean)) cmd('shell').exe(lambda a: shell(env)) cmd('run').exe(lambda a: run(env)) cmd('web', add_help=False).exe(lambda a: grun('web', ' '.join(a))) cmd('async', add_help=False).exe(lambda a: grun('async', ' '.join(a))) cmd('test', add_help=False).exe(lambda a: ( sh('py.test --ignore=node_modules --confcutdir=tests %s' % ' '.join(a)) )) cmd('npm', help='update nodejs packages')\ .exe(lambda a: npm()) cmd('static', help='generate front')\ .arg('-f', '--force', action='store_true')\ .arg('-c', '--clean', action='store_true')\ .exe(lambda a: front(env, a.force, a.clean)) cmd('touch').exe(lambda a: sh( './manage.py static &&' 'supervisorctl pid async web nginx | xargs kill -s HUP' )) return parser
def process_request(self, request): if is_static( request ) : return # init db db.init()
def main(): try: db.init() bot.run(host=config.host, port=config.port) except KeyboardInterrupt: pass