def do_crawl(parser, args): setup_config(args) if args.all: crawl_all() elif args.sites: for site in args.sites: crawl(_get_site_by_abbr(site)) else: parser.print_help()
def do_cron(parser, args): setup_config(args) lockfile = '/tmp/ybk.cron.lock' path = pathlib.Path(lockfile) class doing(object): def __enter__(self): path.open('w').write('') def __exit__(self, type, value, traceback): if value: crawl_log.exception('出错啦') path.unlink() return True if not path.exists(): with doing(): crawl_all() now = datetime.utcnow() + timedelta(hours=8) with doing(): if 9 <= now.hour <= 20: realtime_all() with doing(): if now.hour == 6 and now.minute < 5: history_all() with doing(): if 9 <= now.hour <= 20: # 生成所有人的历史收益记录 ProfitLog.ensure_all_profits() # 更新所有交易账号的状态 if now.hour == 22 and 30 <= now.minute <= 35: trade_account_all() else: crawl_log.info('已有cron在跑, 直接退出')