def _default_new_user_build_user_payload(self, account: Accounts, params): auth_user_dict = AuthDao.auth_user(account.id) fuzzy_id = obfuscate_id(account.id) auth_user_dict['id'] = fuzzy_id auth_user_dict['_id'] = account.id auth_user_dict['login_updated_at'] = account.login_updated_at access_token = params.get('access_token') refresh_token = params.get('refresh_token') expires_at = params.get('expires_at') account_ext_ctx = params.get('accext', {}) client_id = PAN_SERVICE['client_id'] client_secret = PAN_SERVICE['client_secret'] account_ext_ctx['account_id'] = account.id log.info("will new account ext:{}".format(account_ext_ctx)) acc_ext: AccountExt = DataDao.new_accounts_ext(**account_ext_ctx) log.info("new account ext ok acc_ext id:{}".format(acc_ext.id)) pan_acc_id = DataDao.new_pan_account(account.id, account.name, client_id, client_secret, access_token, refresh_token, expires_at, get_now_datetime(), pin=1, bd_uid=acc_ext.user_id) auth_user_dict['_p'] = obfuscate_id(pan_acc_id) # print("auth_user_dict:", auth_user_dict) tk = make_account_token(auth_user_dict) # print('make_account_token:', tk) return tk, auth_user_dict
def query_assets_by_ref_id_for_tree(cls, ref_id, pin=None, offset=0, limit=500): assets_list = cls.query_assets_by_ref_id(ref_id, pin, offset, limit) params = [] assets: Assets = None for assets in assets_list: data_id = decrypt_id(assets.pro_no) has_children = False if assets.isdir == 1: icon_val = "folder" has_children = True else: f_type = guess_file_type(assets.desc) if f_type: icon_val = "jstree-file file-%s" % f_type item = { "id": obfuscate_id(data_id), "text": assets.desc, "data": { "path": "#", "server_ctime": 0, "isdir": assets.isdir, "tag": "asset", "_id": obfuscate_id(assets.id) }, "children": has_children, "icon": icon_val } params.append(item) return params
def fetch_root_item_by_user(self, user_id): pan_acc_map = {} if MASTER_ACCOUNT_ID == user_id: _pan_acc_list = DataDao.pan_account_list(user_id, 100) pa: PanAccounts = None for pa in _pan_acc_list: pan_acc_map[pa.id] = pa root_item_ms = DataDao.get_root_item_by_user_id(user_id) params = [] for item in root_item_ms: _item_path = item.path pan = item.pan if pan.id in pan_acc_map: pan_acc_map.pop(pan.id) params.append({ "id": obfuscate_id(item.id), "text": pan.name, "data": { "path": _item_path, "_id": obfuscate_id(item.id), "server_ctime": item.server_ctime, "isdir": 1, "source": 'local', "fn": "", "alias": "", "pos": 0 }, "children": True, "icon": "folder" }) if pan_acc_map: for pan_id in pan_acc_map: _pan = pan_acc_map[pan_id] _, root_item = DataDao.new_root_item(user_id, pan_id) params.append({ "id": obfuscate_id(root_item.id), "text": _pan.name, "data": { "path": root_item.path, "_id": obfuscate_id(root_item.id), "server_ctime": root_item.server_ctime, "isdir": 1, "source": 'local', "fn": "", "alias": "", "pos": 0 }, "children": True, "icon": "folder" }) return params
def build_user_payload(self, account: Accounts) -> (str, dict): auth_user_dict = AuthDao.auth_user(account.id) fuzzy_id = obfuscate_id(account.id) auth_user_dict['id'] = fuzzy_id pan_acc_cache = self.checkout_pan_accounts(account.id) for pan_account_id in pan_acc_cache: pan_acc = pan_acc_cache[pan_account_id] if pan_acc.pin == 1: auth_user_dict['_p'] = obfuscate_id(pan_account_id) break log.info("auth_user_dict:{}".format(auth_user_dict)) tk = make_account_token(auth_user_dict) log.info('make_account_token:'.format(tk)) return tk, auth_user_dict
def save(self, force_insert=False, force_update=False, using=None): """ Add obfuscated id if it doesn't exist """ # Need to save first (because on create, primary key id has not been set until after save) super(UserMeta, self).save(force_insert=force_insert, force_update=force_update, using=using) if self.obfuscated_user_id is None: self.obfuscated_user_id = utils.obfuscate_id(self.id) super(UserMeta, self).save(using=using)
def new_order(cls, user_ref_id, total): with db: oid = int(time.time() * 1000 * 1000) # "ord_no", "state", "ref_id", "total" o: Order = Order(ord_no=obfuscate_id(oid), ref_id=user_ref_id, total=total) o.save(force_insert=True) return o
def account_list(cls, pin=None, _type=None, offset=0, page_size=30): if pin and _type: account_list = Accounts.select(Accounts, AuthUser).join( AuthUser, on=(Accounts.id == AuthUser.acc_id), attr='auth').where(Accounts.pin == pin, AuthUser.type == _type).order_by( Accounts.created_at.desc()).offset( offset).limit(page_size) elif _type: account_list = Accounts.select(Accounts, AuthUser).join( AuthUser, on=(Accounts.id == AuthUser.acc_id), attr='auth').where(AuthUser.type == _type).order_by( Accounts.created_at.desc()).offset(offset).limit(page_size) elif pin: account_list = Accounts.select().where( Accounts.pin == pin).order_by( Accounts.created_at.desc()).offset(offset).limit(page_size) else: account_list = Accounts.select(Accounts).order_by( Accounts.created_at.desc()).offset(offset).limit(page_size) # print("account_list:", account_list) rs = [] n = 0 acc_ids = [] for acc in account_list: acc_dict = Accounts.to_dict(acc) if hasattr(acc, 'auth') and acc.auth: auth_user_dict = AuthUser.to_dict(acc.auth, BASE_FIELDS) acc_dict['auth'] = auth_user_dict else: acc_ids.append(acc.id) if not acc.fuzzy_id: acc_dict['fuzzy_id'] = obfuscate_id(acc.id) rs.append(acc_dict) n = n + 1 au_list = AuthUser.select().where(AuthUser.acc_id.in_(acc_ids)) au_dict = {} if au_list: for au in au_list: au_dict[au.acc_id] = AuthUser.to_dict(au) for acc_dict in rs: if acc_dict['id'] in au_dict: acc_dict['auth'] = au_dict[acc_dict['id']] uoe_list = UserOrgExtend.select().where( UserOrgExtend.acc_id == acc_dict['id']) if uoe_list: acc_dict['extorgs'] = [uoe.org_id for uoe in uoe_list] ure_list = UserRoleExtend.select().where( UserRoleExtend.acc_id == acc_dict['id']) if ure_list: acc_dict['extroles'] = [ure.role_id for ure in ure_list] acc_dict.pop('id') return rs, n == page_size
def default_guest_account(cls): guest: Accounts = Accounts.select().where( Accounts.name == "guest").first() if guest: if not guest.fuzzy_id: fuzzy_id = obfuscate_id(guest.id) guest.fuzzy_id = fuzzy_id with db: Accounts.update(fuzzy_id=fuzzy_id).where( Accounts.id == guest.id).execute() return guest
def new_product(cls, data_id, man_user_ref_id, params): with db: # "pro_no", "isdir", "name", "fs_id", "ref_id", "data_id", "price", "size" product: Product = Product(pro_no=obfuscate_id(data_id), isdir=params['isdir'], name=params['name'], fs_id=params['fs_id'], ref_id=man_user_ref_id, data_id=data_id, price=params.get('price', 0), size=params['size'], pin=0) product.save(force_insert=True) return product
def query_client_root_list(self, pan_id): # cdi: ClientDataItem = ClientDataDao.query_root_files_by_user_id(ref_id) params = [] cdi: ClientDataItem = ClientDataDao.get_top_dir_item_by_pan_id(pan_id) if cdi: fuzzy_id = obfuscate_id(cdi.id) name = "%s(%s)" % (PAN_ROOT_DIR['alias'], cdi.source_fs_id) # print("query_root_list item_id:", item_id, ",fuzzy_id:", fuzzy_id) params.append({ "id": fuzzy_id, "text": name, "data": { "source": "self", "_id": fuzzy_id, "tag": "self", "path": "#" }, "children": True, "icon": "folder" }) return params
def query_client_sub_list(self, parent_id, ref_id): cdi_list = ClientDataDao.query_client_item_list_by_parent( parent_id, ref_id) params = [] if cdi_list: for item in cdi_list: item_id = item.id fuzzy_id = obfuscate_id(int(item_id)) # print("query_root_list item_id:", item_id, ",fuzzy_id:", fuzzy_id) icon_val = "file" name = item.filename if item.aliasname: name = item.aliasname isdir = item.isdir has_children = True if isdir == 1 else False if not has_children: f_type = guess_file_type(name) if f_type: icon_val = "file file-%s" % f_type format_size = scale_size(item.size) params.append({ "id": fuzzy_id, "text": name, "data": { "source": "self", "_id": fuzzy_id, "isdir": isdir, "tag": "free", "path": item.path, "category": item.category, "fs_id": item.fs_id, "format_size": format_size }, "children": has_children, "icon": icon_val }) return params
def new_order_assets(cls, acc_auth: AuthUser, pro: Product): with db: oid = int(time.time() * 1000 * 1000) # "ord_no", "state", "ref_id", "total" o: Order = Order(ord_no=obfuscate_id(oid), ref_id=acc_auth.ref_id, total=pro.price) o.save(force_insert=True) oi: OrderItem = OrderItem(ord_id=o.id, pro_no=pro.pro_no, price=pro.price) oi.save(force_insert=True) format_size = scale_size(pro.size) a: Assets = Assets(ord_no=o.ord_no, pro_no=oi.pro_no, fs_id=pro.fs_id, isdir=pro.isdir, ref_id=o.ref_id, desc=pro.name, format_size=format_size, price=oi.price) a.save(force_insert=True) return a
def query_root_list(self, account_id=None): if account_id: root_item_list = DataDao.query_root_files_by_user_id(account_id) else: root_item_list = DataDao.query_free_root_files() params = [] for item in root_item_list: item_id = item.fs_id if "local" == item.source: fuzzy_id = obfuscate_id(int(item_id)) # print("query_root_list item_id:", item_id, ",fuzzy_id:", fuzzy_id) params.append({ "id": fuzzy_id, "text": item.desc, "data": { "source": item.source, "_id": fuzzy_id, "tag": "free", "path": "#" }, "children": True, "icon": "folder" }) return params
def product_by_pro_no(cls, data_id) -> Product: pro_no = obfuscate_id(data_id) return Product.select().where(Product.pro_no == pro_no).first()
def to_do(key, rs_key): _result = {'state': 0} data_item: DataItem = DataDao.get_data_item_by_id(item_id) _client_data_item: ClientDataItem = ClientDataDao.get_data_item_by_source_fs_id( data_item.fs_id, user_ref_id) if _client_data_item: if _client_data_item.pin == 0: pan_acc: PanAccounts = auth_service.get_pan_account( default_pan_id, user_id) pan_acc = auth_service.check_pan_token_validation(pan_acc) # 文件已迁出,等待迁入, 目录结构已存在 async_service.update_state(key_prefix, user_id, { "state": 0, "pos": 2 }) old_name = _client_data_item.filename ftype = guess_file_type(old_name) if not ftype: idx = old_name.rfind('.') if idx > 0: ftype = old_name[idx + 1:] new_name = "%s.%s" % (_client_data_item.source_fs_id, ftype) parent_dir = "/%s" % PAN_ROOT_DIR['name'] new_path = "%s/%s" % (parent_dir, new_name) _jsonrs = restapi.file_rename(pan_acc.access_token, _client_data_item.path, new_name) # jsonrs = restapi.file_search(pan_acc.access_token, key=fs_id, parent_dir=parent_dir) # print("search new file jsonrs:", jsonrs) if "errno" in _jsonrs and _jsonrs["errno"] == 0: _client_data_item.path = new_path _client_data_item.filename = new_name _client_data_item.pin = 1 ClientDataDao.update_client_item( _client_data_item.id, { "path": new_path, "filename": new_name, "pin": 1 }) _result['state'] = 0 _result['item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id( _client_data_item.id) _result['pos'] = 4 else: _result['state'] = -4 _result["err"] = LOGIC_ERR_TXT['rename_fail'] else: _result['state'] = 0 _result['item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id(_client_data_item.id) _result['pos'] = 4 else: _rs, share_log = open_service.build_shared_log(data_item) if not share_log: if 'state' in _rs: _result['state'] = _rs['state'] if 'err' in _rs: _result['state'] = -9 _result['err'] = _rs['err'] else: # copy if share_log.is_black == 1: _result['state'] = -9 _result['err'] = share_log.err else: _st, _client_data_item = self.copy_to_my_pan( user_id, user_ref_id, share_log, default_pan_id) _result['state'] = _st if _st < 0: if _st == -3: _result['err'] = LOGIC_ERR_TXT['mk_top_fail'] elif _st == -4: _result['err'] = LOGIC_ERR_TXT['rename_fail'] if _client_data_item: _result[ 'item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id( _client_data_item.id) _result['pos'] = 4 return _result
def check_copy_file(self, user_id, user_ref_id, default_pan_id, item_id, pids, tag): ctx = self key_prefix = "client:ready:" if not default_pan_id: pan_acc = auth_service.default_pan_account(user_id) if pan_acc: default_pan_id = pan_acc.id if not default_pan_id: return {"state": -2, "err": LOGIC_ERR_TXT['need_pann_acc']} def final_do(): pass def to_do(key, rs_key): _result = {'state': 0} data_item: DataItem = DataDao.get_data_item_by_id(item_id) _client_data_item: ClientDataItem = ClientDataDao.get_data_item_by_source_fs_id( data_item.fs_id, user_ref_id) if _client_data_item: if _client_data_item.pin == 0: pan_acc: PanAccounts = auth_service.get_pan_account( default_pan_id, user_id) pan_acc = auth_service.check_pan_token_validation(pan_acc) # 文件已迁出,等待迁入, 目录结构已存在 async_service.update_state(key_prefix, user_id, { "state": 0, "pos": 2 }) old_name = _client_data_item.filename ftype = guess_file_type(old_name) if not ftype: idx = old_name.rfind('.') if idx > 0: ftype = old_name[idx + 1:] new_name = "%s.%s" % (_client_data_item.source_fs_id, ftype) parent_dir = "/%s" % PAN_ROOT_DIR['name'] new_path = "%s/%s" % (parent_dir, new_name) _jsonrs = restapi.file_rename(pan_acc.access_token, _client_data_item.path, new_name) # jsonrs = restapi.file_search(pan_acc.access_token, key=fs_id, parent_dir=parent_dir) # print("search new file jsonrs:", jsonrs) if "errno" in _jsonrs and _jsonrs["errno"] == 0: _client_data_item.path = new_path _client_data_item.filename = new_name _client_data_item.pin = 1 ClientDataDao.update_client_item( _client_data_item.id, { "path": new_path, "filename": new_name, "pin": 1 }) _result['state'] = 0 _result['item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id( _client_data_item.id) _result['pos'] = 4 else: _result['state'] = -4 _result["err"] = LOGIC_ERR_TXT['rename_fail'] else: _result['state'] = 0 _result['item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id(_client_data_item.id) _result['pos'] = 4 else: _rs, share_log = open_service.build_shared_log(data_item) if not share_log: if 'state' in _rs: _result['state'] = _rs['state'] if 'err' in _rs: _result['state'] = -9 _result['err'] = _rs['err'] else: # copy if share_log.is_black == 1: _result['state'] = -9 _result['err'] = share_log.err else: _st, _client_data_item = self.copy_to_my_pan( user_id, user_ref_id, share_log, default_pan_id) _result['state'] = _st if _st < 0: if _st == -3: _result['err'] = LOGIC_ERR_TXT['mk_top_fail'] elif _st == -4: _result['err'] = LOGIC_ERR_TXT['rename_fail'] if _client_data_item: _result[ 'item'] = ctx.__build_client_item_simple_dict( _client_data_item) _result['item']['id'] = obfuscate_id( _client_data_item.id) _result['pos'] = 4 return _result st = self.check_file_authorized(user_ref_id, item_id, pids, tag) result = {'state': 0} if st == 0: if "self" == tag: client_data_item: ClientDataItem = ClientDataDao.get_data_item_by_id( item_id, user_ref_id) if client_data_item: result['state'] = 0 result['item'] = ctx.__build_client_item_simple_dict( client_data_item) result['item']['id'] = obfuscate_id(client_data_item.id) result['pos'] = 4 return result else: # 找不到文件 search return {"state": -5, "err": LOGIC_ERR_TXT['not_exists']} else: async_service.init_state(key_prefix, user_id, { "state": 0, "pos": 0 }) async_rs = async_service.async_checkout_client_item( key_prefix, user_id, to_do, final_do) if async_rs['state'] == 'block': result['state'] = -11 result['err'] = LOGIC_ERR_TXT['sys_lvl_down'] else: err_msg = LOGIC_ERR_TXT['unknown'] if -10 == st: err_msg = LOGIC_ERR_TXT['ill_data'] elif -2 == st: err_msg = LOGIC_ERR_TXT['need_access'] result = {"state": st, "err": err_msg} return result
def query_file_list(self, parent_item_id): # item_list = CommunityDao.query_data_item_by_parent(parent_item_id, True, pan_id, limit=1000) params = [] sp: SearchParams = SearchParams.build_params(0, 1000) # sp.add_must(is_match=False, field="path", value=parent_path) sp.add_must(is_match=False, field="parent", value=parent_item_id) # sp.add_must(is_match=False, field="isdir", value=0) # if pan_id and pan_id > 0: # sp.add_must(is_match=False, field="sourceid", value=pan_id) es_body = build_query_item_es_body(sp) print("local es_body:", es_body) es_result = es_dao_local().es_search_exec(es_body) hits_rs = es_result["hits"] total = hits_rs["total"] print("local files es total:", total) for _s in hits_rs["hits"]: icon_val = "jstree-file" ori_fn_name = _s["_source"]["filename"] ori_aliasname = '' if "aliasname" in _s["_source"] and _s["_source"]["aliasname"]: ori_aliasname = _s["_source"]["aliasname"] aliasname = ori_aliasname fn_name = ori_fn_name txt = fn_name if aliasname: fn_name, extname = split_filename(fn_name) alias_fn, alias_extname = split_filename(aliasname) if not alias_extname: alias_extname = extname aliasname = "{}{}".format( alias_fn, "." + alias_extname if alias_extname.strip() else "") txt = "[{}]{}".format(fn_name, aliasname) is_dir = _s["_source"]["isdir"] == 1 t_tag = ES_TAG_MAP['FREE'] is_free = False tags = _s["_source"]["tags"] if not tags: tags = [] isp = False has_children = False a_attr = {} if not is_dir and _s["_source"]["pin"] == 1: a_attr = {'style': 'color:red'} if PRODUCT_TAG in tags: if not a_attr: a_attr = {'style': 'color:green'} isp = True if t_tag in tags: is_free = True if is_dir: # icon_val = "jstree-folder" icon_val = "folder" has_children = True else: f_type = guess_file_type(txt) if f_type: icon_val = "jstree-file file-%s" % f_type node_text = txt format_size = scale_size(_s["_source"]["size"]) price = self.parse_price(format_size, 2) if format_size: node_text = "{}({})".format(node_text, format_size) if is_free: node_text = "[{}]{}".format(t_tag, node_text) if not a_attr: a_attr = {'style': 'color:green'} if isp: node_text = "[{}]{}".format(PRODUCT_TAG, node_text) fs_id = _s["_source"]["fs_id"] item_id = _s["_source"]["id"] item_fuzzy_id = obfuscate_id(item_id) node_param = { "id": item_fuzzy_id, "text": node_text, "data": { "path": _s["_source"]["path"], "server_ctime": _s["_source"].get("server_ctime", 0), "isdir": _s["_source"]["isdir"], "source": _s["_source"]["source"], "fs_id": fs_id, "pin": _s["_source"]["pin"], "_id": item_fuzzy_id, "isp": isp, "tags": tags, "sourceid": _s["_source"]["sourceid"], "p_id": _s["_source"]["id"], "price": price, "fn": ori_fn_name, "alias": ori_aliasname }, "children": has_children, "icon": icon_val } if a_attr: node_param['a_attr'] = a_attr params.append(node_param) return params
def query_file(self, item_id): data_item: DataItem = DataDao.get_data_item_by_id(item_id) need_sync = False logger.info("query_file dlink:{}".format(data_item.dlink)) if not data_item.dlink_updated_at or not data_item.dlink: need_sync = True elif data_item.dlink_updated_at: dt = arrow.get( data_item.dlink_updated_at).replace(tzinfo=self.default_tz) if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now(): need_sync = True account_id = data_item.account_id acc: Accounts = DataDao.account_by_id(account_id) flv_json = None need_thumbs = False # data_item_ext = None # if data_item.category == 1 and is_video_media(data_item.filename): # data_item_ext = DataDao.get_data_item_ext_by_id(data_item.id) if is_image_media(data_item.filename) and data_item.category == 3: need_thumbs = True if need_sync: pan_acc: PanAccounts = self.get_pan_account( data_item.panacc, data_item.account_id) # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)]) sync_dlink, thumbs = restapi.get_dlink_by_sync_file( pan_acc.access_token, int(data_item.fs_id), need_thumbs) if sync_dlink: data_item.dlink = "{}&access_token={}".format( sync_dlink, pan_acc.access_token) data_item.dlink_updated_at = get_now_datetime() data_item_params = { "dlink": data_item.dlink, "dlink_updated_at": data_item.dlink_updated_at } if need_thumbs: if "url3" in thumbs: data_item_params["thumb"] = thumbs["url3"] data_item.thumb = data_item_params["thumb"] elif "url2" in thumbs: data_item_params["thumb"] = thumbs["url2"] data_item.thumb = data_item_params["thumb"] elif "url1" in thumbs: data_item_params["thumb"] = thumbs["url1"] data_item.thumb = data_item_params["thumb"] elif "icon" in thumbs: data_item_params["thumb"] = thumbs["icon"] data_item.thumb = data_item_params["thumb"] DataDao.update_data_item(data_item.id, data_item_params) # not authorized # if data_item.category == 1 and is_video_media(data_item.filename): # flv_json = restapi.get_media_flv_info(pan_acc.access_token, data_item.path) # if flv_json and "mlink" in flv_json: # flv_params = {"fs_id": data_item.fs_id, "mlink": flv_json["mlink"], # "start_at_time": flv_json["mlink_start_at"]} # if data_item_ext: # data_item_ext.mlink = flv_params["mlink"] # data_item_ext.start_at_time = flv_params["start_at_time"] # DataDao.update_data_item_ext(data_item.id, flv_params) # else: # data_item_ext = DataDao.new_data_item_ext(data_item.id, flv_params) used_pan_acc_id = data_item.panacc if data_item: data_item.size = int(data_item.size / 1024) f_type = guess_file_type(data_item.filename) params = {"item": DataItem.to_dict(data_item, ['id', 'parent'])} params["item"]["id"] = obfuscate_id(data_item.id) params["item"]["type"] = f_type params["item"]["media_type"] = self.check_data_item_media_type( data_item.category, data_item.filename) params["item"]["dlink_tokens"] = [used_pan_acc_id] # if data_item.category == 1 and is_video_media(data_item.filename) and data_item_ext: # params["item"]["mlink"] = data_item_ext.mlink # params["item"]["start_at_time"] = data_item_ext.start_at_time return params
def query_file_list(self, parent_item_id): item_list = DataDao.query_data_item_by_parent(parent_item_id, True, limit=1000) params = [] for item in item_list: _item_path = item.path txt = item.filename if item.aliasname: txt = item.aliasname item_fuzzy_id = obfuscate_id(item.id) format_size = scale_size(item.size) # print("id:", item.id, ",item_fuzzy_id:", item_fuzzy_id) params.append({ "id": item_fuzzy_id, "text": txt, "data": { "path": _item_path, "_id": item_fuzzy_id, "format_size": format_size, "fs_id": item.fs_id, "category": item.category, "source": "local", "isdir": item.isdir }, "children": True, "icon": "folder" }) # print("dirs total:", len(params)) sp: SearchParams = SearchParams.build_params(0, 1000) # sp.add_must(is_match=False, field="path", value=parent_path) sp.add_must(is_match=False, field="parent", value=parent_item_id) sp.add_must(is_match=False, field="isdir", value=0) es_body = build_query_item_es_body(sp) # logger.info("es_body:{}".format(es_body)) es_result = es_dao_local().es_search_exec(es_body) hits_rs = es_result["hits"] total = hits_rs["total"] logger.info("files es total:{}".format(total)) for _s in hits_rs["hits"]: icon_val = "file" fn_name = _s["_source"]["filename"] category = _s["_source"]["category"] format_size = scale_size(_s["_source"]["size"]) media_type = self.check_data_item_media_type(category, fn_name) txt = fn_name aliasname = None if "aliasname" in _s["_source"] and _s["_source"]["aliasname"]: aliasname = _s["_source"]["aliasname"] if aliasname: fn_name, extname = split_filename(fn_name) alias_fn, alias_extname = split_filename(aliasname) if not alias_extname: alias_extname = extname aliasname = "{}{}".format( alias_fn, "." + alias_extname if alias_extname.strip() else "") # __idx = fn_name.rfind(".") # if __idx > 0: # fn_name = fn_name[0:__idx] txt = "[{}]{}".format(fn_name, aliasname) f_type = guess_file_type(txt) if f_type: icon_val = "file file-%s" % f_type item_fuzzy_id = obfuscate_id(_s["_source"]["id"]) params.append({ "id": obfuscate_id(_s["_source"]["id"]), "text": txt, "data": { "path": _s["_source"]["path"], "isdir": _s["_source"]["isdir"], "source": "local", "media_type": media_type, "format_size": format_size, "category": category, "fs_id": _s["_source"]["fs_id"], "_id": item_fuzzy_id }, "children": False, "icon": icon_val }) return params
def update_product(cls, data_id, params): pro_no = obfuscate_id(data_id) _params = {p: params[p] for p in params if p in Product.field_names()} with db: Product.update(**_params).where(Product.pro_no == pro_no).execute()
def login_check_user(self, acc: Accounts, need_update_login_time=True, source="BD"): need_renew_pan_acc = [] if acc: pan_acc_list = DataDao.pan_account_list(acc.id) # pan_acc: PanAccounts = DataDao.pan_account_list(acc.id) need_renew_access_token = False l = len(pan_acc_list) for pan_acc in pan_acc_list: if pan_acc.client_id != self.client_id or pan_acc.client_secret != self.client_secret: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": False, 'auth': self.pan_auth }) elif pan_acc.access_token and pan_acc.token_updated_at: tud = arrow.get(pan_acc.token_updated_at).replace( tzinfo=self.default_tz) if (arrow.now(self.default_tz) - tud).total_seconds() > PAN_ACCESS_TOKEN_TIMEOUT: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) else: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) if l == 0: need_renew_access_token = True # if pan_acc and pan_acc['access_token'] and pan_acc['token_updated_at']: # tud = arrow.get(pan_acc['token_updated_at']).replace(tzinfo=self.default_tz) # if (arrow.now(self.default_tz) - tud).total_seconds() < PAN_ACCESS_TOKEN_TIMEOUT: # need_renew_access_token = False lud = arrow.get( acc.login_updated_at).replace(tzinfo=self.default_tz) diff = arrow.now(self.default_tz) - lud params = {} if (need_update_login_time and diff.total_seconds() > LOGIN_TOKEN_TIMEOUT) or not acc.login_token: if not acc.fuzzy_id: acc.fuzzy_id = obfuscate_id(acc.id) params["fuzzy_id"] = acc.fuzzy_id # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload(acc) acc.login_token = login_token params["login_token"] = login_token lud = params["login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) else: tk = acc.login_token if tk: user_payload = get_payload_from_token(tk) if user_payload: tm = user_payload['tm'] ctm = get_now_ts() if ctm - tm > LOGIN_TOKEN_TIMEOUT: # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload( acc) acc.login_token = login_token params["login_token"] = login_token lud = params[ "login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) log.debug("login_token:{}".format(acc.login_token)) result = {"need_renew_access_token": need_renew_access_token} if need_renew_access_token: result['auth'] = self.pan_auth result['token'] = acc.login_token result['login_at'] = int(arrow.get(lud).timestamp * 1000) # print('login_at:', result['login_at']) result['pan_acc_list'] = need_renew_pan_acc self.__patch_acc_ext(acc, result, source) # account_ext = DataDao.account_ext_by_acc_id(acc.id) # result['username'] = account_ext.username # result['portrait'] = account_ext.portrait result['id'] = acc.fuzzy_id return result return None