def load_db(): msg = '' errmsg = '' if request.POST.submit: upload = request.files.get('dbfile') if upload: logger.debug(upload.filename) name = get_data_path('uploaded.db') upload.save(name, overwrite=True) logger.debug(f'uploaded file saved to {name}') try: tag_file_added, missed_fanhaos = load_tags_db() except DBError: errmsg = '数据库文件错误, 请检查文件是否正确上传' else: urls = [ bus_spider.get_url_by_fanhao(fanhao) for fanhao in missed_fanhaos ] add_download_job(urls) msg = f'上传 {tag_file_added} 条用户打标数据, {len(missed_fanhaos)} 个番号, ' msg += ' 注意: 需要下载其他数据才能开始建模, 请等候一定时间' else: errmsg = '请上传数据库文件' return template('load_db', path=request.path, msg=msg, errmsg=errmsg)
def get_data_by_name(name, target_value): file = '{}/{}.txt'.format(get_data_path(MODEL_PATH), name) with open(file, 'r') as file: fanhao_list = file.read() data, image = get_faces(fanhao_list) target = [target_value] * len(data) return data, image, target
def test_config_defaults(): config_path = util.get_data_path(util.CONFIG_FILE) conf = configparser.ConfigParser() defaults = { 'options': { 'proxy': 'http://localhost:7890' }, 'download': { 'count': 100, 'interval': 3600 } } conf.read_dict(defaults) conf.read(config_path) for section in conf: print(f'[{section}]') for key, value in conf[section].items(): print(f'{key} = {value}') print('') print(conf.get('download', 'count')) print(conf.get('download', 'interval')) print(conf.get('options', 'proxy'))
def test_file_path(): file = 'bus.db' path = util.get_data_path(file) print(path)
''' persist data to db ''' from datetime import date import datetime import operator from functools import reduce import json from peewee import * from enum import IntEnum from busface.util import logger, get_data_path, format_datetime, get_now_time, get_full_url DB_FILE = 'bus.db' db = SqliteDatabase(get_data_path(DB_FILE), pragmas={'journal_mode': 'wal'}) class BaseModel(Model): class Meta: database = db legacy_table_names = False class ExistError(Exception): pass class DBError(Exception): pass class Item(BaseModel):