def update_dir_size(self, data_item: DataItem, recursive=False, reset_sub_dir=True): if not data_item or data_item.filename == TOP_DIR_FILE_NAME or data_item.isdir == 0: return data_item_dict = DataItem.to_dict(data_item) rs = self.update_dir_size_by_dict(data_item_dict, recursive, reset_sub_dir) data_item.size = data_item_dict['size'] data_item.sized = data_item_dict['sized'] return rs
def save_data_item(cls, is_dir, params): data_item = DataItem(category=params['category'], isdir=is_dir, filename=params['filename'], fs_id=params['fs_id'], path=params['path'], size=params['size'], md5_val=params.get('md5_val', ''), account_id=params.get('account_id'), parent=params.get('parent', 0), panacc=params.get('panacc', 0)) with db: data_item.save(force_insert=True) cls.sync_data_item_to_es(data_item)
def new_root_item(cls, user_id, pan_id): data_item = DataItem(category=6, isdir=1, filename=TOP_DIR_FILE_NAME, fs_id='0', path='/', size=0, md5_val='', account_id=user_id, parent=0, panacc=pan_id) with db: data_item.save(force_insert=True) return data_item.id, data_item
def new_local_visible_by_parent(cls, parent_id, show): offset = 0 size = 500 cdi: DataItem = None ln = size dog = 1000000 while ln == size and dog > 0: dog = dog - 1 ms = DataItem.select().where( DataItem.parent == parent_id, DataItem.isdir == 0).offset(offset).limit(size) ln = len(ms) item_list = [] for cdi in ms: rs = LocalVisible.select().where( LocalVisible.id == cdi.id).exists() if not rs: if show == 1: item_list.append(cdi) else: if show == 0: item_list.append(cdi) if item_list: cls.del_save_local_list(item_list, show) # print("dog:", dog, ",offset:", offset) offset = offset + size
def update_data_item(cls, pk_id, params): _params = {p: params[p] for p in params if p in DataItem.field_names()} with db: # old_data_item = DataItem.get_by_id(pk=pk_id) DataItem.update(**_params).where(DataItem.id == pk_id).execute() # for f in fields: # if f in params: # setattr(old_data_item, f, params[f]) # print("{}:{}".format(f, params[f])) # old_data_item.save() # print("update data item:", old_data_item) es_up_params = es_dao_local().filter_update_params(_params) if es_up_params: logger.info( "will update es item es_up_params:{}".format(es_up_params)) es_dao_local().update_fields(pk_id, **es_up_params)
def get_root_item_by_user_id(cls, user_id): mode_select = DataItem.select(DataItem, PanAccounts).join( PanAccounts, on=(DataItem.panacc == PanAccounts.id), attr="pan").where(DataItem.filename == TOP_DIR_FILE_NAME, PanAccounts.user_id == user_id) # print("get_root_item_by_user_id:", mode_select) return mode_select
def query_data_item_by_parent(cls, parent_id, is_dir=True, offset=0, limit=100): return DataItem.select().where( DataItem.isdir == (1 if is_dir else 0), DataItem.parent == parent_id).limit(limit).offset(offset)
def sum_size_dir(cls, parent_id): model_rs: ModelSelect = DataItem.select( fn.SUM(DataItem.size).alias('total')).where( DataItem.parent == parent_id) if model_rs: # print('model_rs:', model_rs.dicts()) v = model_rs.dicts()[0].get('total') if v: return v return 0
def sync_db_file_list_to_es(): is_dir = True size = 100 # size = 1 offset = 0 get_size = 0 count = 0 sql = "select * from dataitem where isdir=%s limit %s,%s" % (1, offset, size) print("sql:", sql) data_item_list = DataItem.raw(sql) if data_item_list: get_size = len(data_item_list) count = count + get_size for data_item in data_item_list: if data_item.sized == 0: mpan_service.update_dir_size(data_item, recursive=False, reset_sub_dir=False) DataDao.sync_data_item_to_es(data_item) dog = 100000 while get_size == size and dog > 0: offset = offset + size sql = "select * from dataitem where isdir=%s limit %s,%s" % (1, offset, size) data_item_list = DataItem.raw(sql) get_size = 0 if data_item_list: get_size = len(data_item_list) count = count + get_size for data_item in data_item_list: if data_item.sized == 0: mpan_service.update_dir_size(data_item, recursive=False, reset_sub_dir=False) DataDao.sync_data_item_to_es(data_item) time.sleep(0.3) print("sync_dir_file_list did count:", count) dog = dog - 1
def recursive_check_dir_size(dir_list: list, pos, rs: dict): if pos >= len(dir_list): return rs p_dir_dict = dir_list[pos] p_dir_id = p_dir_dict['id'] sub_dir: DataItem = DataDao.find_need_update_size_dir( parent_id=p_dir_dict['id']) if sub_dir: recursive_check_dir_size([DataItem.to_dict(sub_dir)], 0, rs) recursive_check_dir_size(dir_list, pos, rs) else: ori_size = p_dir_dict['size'] s: int = DataDao.sum_size_dir(parent_id=p_dir_id) if not p_dir_dict['sized'] or s != ori_size: rs['change'] = True DataDao.update_data_item(p_dir_id, {'size': s, 'sized': 1}) p_dir_dict['size'] = s p_dir_dict['sized'] = 1 print("changed:", True) print('dir id:', p_dir_dict['id'], ',size:', s, ',ori_size:', ori_size) recursive_check_dir_size(dir_list, pos + 1, rs)
def del_data_item_by_id(cls, pk_id): with db: DataItem.delete().where(DataItem.id == pk_id).execute()
def del_data_item_by_parent_synced(cls, parent, synced, is_dir=True): with db: DataItem.delete().where( DataItem.parent == parent, DataItem.synced == synced, DataItem.isdir == (1 if is_dir else 0)).execute()
def del_data_item_by_parent_pin(cls, parent, pin, is_dir=True): with db: DataItem.delete().where( DataItem.parent == parent, DataItem.pin == pin, DataItem.isdir == (1 if is_dir else 0)).execute()
def update_data_item_by_parent_id(cls, parent_id, params): _params = {p: params[p] for p in params if p in DataItem.field_names()} with db: # old_data_item = DataItem.get_by_id(pk=pk_id) DataItem.update(**_params).where( DataItem.parent == parent_id).execute()
def get(self): path = self.request.path if path.endswith("/init"): pan_acc_list = CommunityDao.pan_account_list(self.request.user_id) for pan in pan_acc_list: if compare_dt_by_now(pan['expires_at']) <= 0: pan['expired'] = 1 else: pan['expired'] = 0 params = {'items': pan_acc_list} # print("params:", params) self.render('panmanage.html', **params) elif path.endswith("/ftree"): pan_id = self.get_argument("panid", "0") params = {'pan_id': pan_id} self.render('mantree.html', **params) elif path.endswith("/fload"): # pan_id = self.get_argument("panid", "0") source = self.get_argument("source", "") node_id = self.get_argument("id") parent_path = self.get_argument("path") # if not parent_path.endswith("/"): # parent_path = "%s/" % parent_path # print("user_payload:", self.user_payload) params = [] if not source or "local" == source: # print("fload node_id:", node_id, ", pan_id:", pan_id, ", source:", source) if not '#' == node_id: node_id_val = decrypt_id(node_id) parent_id = int(node_id_val) params = mpan_service.query_file_list(parent_id) else: params = mpan_service.fetch_root_item_by_user(self.user_id) # pan_id = int(pan_id) # print("parent_id, pan_id:", parent_id, pan_id) # params = mpan_service.query_file_list(parent_id, pan_id) if not source or "shared" == source: # print("fload node_id:", node_id, ",fs_id:", self.get_argument("fs_id", "0"), ", source:", source) shared_params = [] if not '#' == node_id: parent_id = int(self.get_argument("fs_id", "0")) if parent_id == 0: shared_params = mpan_service.query_share_list(None) else: shared_params = mpan_service.query_share_list( parent_id) else: node_param = { "id": "s_0", "text": "外部分享(shared)", "data": { "path": "/", "fs_id": 0, "server_ctime": 0, "isdir": 1, "source": "shared", "_id": 0, "pin": 0 }, "children": True, "icon": "folder" } shared_params = [node_param] if shared_params: if not params: params = shared_params else: params = params + shared_params self.to_write_json(params) elif path.endswith("/show"): source = self.get_argument("source", "") parent = self.get_argument("parent", "") node_id = self.get_argument("id") logger.info("source:{},parent:{},node_id:{}".format( source, parent, node_id)) if "local" == source: if parent: mpan_service.update_local_sub_dir(parent, {'pin': 1}) else: mpan_service.update_local_item(node_id, {'pin': 1}) else: if parent: mpan_service.update_shared_sub_dir(parent, {'pin': 1}) else: mpan_service.update_shared_item(node_id, {'pin': 1}) self.to_write_json({}) elif path.endswith("/hide"): source = self.get_argument("source", "") parent = self.get_argument("parent", "") node_fuzzy_id = self.get_argument("id") node_id = decrypt_id(node_fuzzy_id) logger.info("hide source:{},parent:{},node_id:{}".format( source, parent, node_id)) if "local" == source: if parent: mpan_service.update_local_sub_dir(parent, {'pin': 0}) else: mpan_service.update_local_item(node_id, {'pin': 0}) else: if parent: mpan_service.update_shared_sub_dir(parent, {'pin': 0}) else: mpan_service.update_shared_item(node_id, {'pin': 0}) self.to_write_json({}) elif path.endswith("/clear"): item_fuzzy_id = self.get_argument("id", None) item_id = int(decrypt_id(item_fuzzy_id)) # pan_id = int(self.get_argument("panid", "0")) source = self.get_argument("source", "") rs = sync_pan_service.clear(item_id, source) self.to_write_json(rs) elif path.endswith("/clearbyid"): item_id = int(self.get_argument("id", "0")) # pan_id = int(self.get_argument("panid", "0")) source = self.get_argument("source", "") logger.info("clearbyid item_id:{}, source:{}".format( item_id, source)) rs = sync_pan_service.clear(item_id, source) logger.info("clearbyid rs:{}".format(rs)) self.to_write_json(rs) elif path.endswith("/rename"): item_fuzzy_id = self.get_argument("itemid", None) item_id = int(decrypt_id(item_fuzzy_id)) old_name = self.get_argument("old_name", "") alias_name = self.get_argument("alias_name", "") source = self.get_argument("source", "") result = sync_pan_service.rename(item_id, old_name, alias_name, source) self.to_write_json(result) elif path.endswith("/free"): item_fuzzy_id = self.get_argument("itemid", None) item_id = int(decrypt_id(item_fuzzy_id)) source = self.get_argument("source", "") desc = self.get_argument("desc", "") tags = self.get_argument("tags", "") if tags: tags = tags.split(',') else: tags = [] if "local" != source: self.to_write_json({"state": -1}) else: rs = mpan_service.free(self.user_id, item_id, desc, tags) self.to_write_json(rs) elif path.endswith("/unfree"): item_fuzzy_id = self.get_argument("itemid", None) item_id = int(decrypt_id(item_fuzzy_id)) fs_id = self.get_argument("fs_id", "") source = self.get_argument("source", "") tags = self.get_argument("tags", "") if tags: tags = tags.split(',') else: tags = [] if "local" != source: self.to_write_json({"state": -1}) else: rs = mpan_service.unfree(self.user_id, item_id, fs_id, tags) self.to_write_json(rs) elif path.endswith("/fparts"): # pan_id = self.get_argument("id", "0") # print("hello fparts!") item_list = CommunityDao.query_share_logs_by_hours(-24, 0, 100) # print("item_list:", item_list) # for item in item_list: # print("item:", item.filename) params = {"list": item_list} # print("params:", params) self.render('fparts.html', **params) elif path.endswith("/clearshare"): share_item_id = int(self.get_argument("id", "0")) sync_pan_service.clear_share_log(share_item_id) self.to_write_json({'state': 0}) elif path.endswith("/pan_acc_list"): need_renew_pan_acc = pan_service.all_pan_acc_list_by_user( self.user_id) result = {"result": "ok", "pan_acc_list": need_renew_pan_acc} # print("result:", result) self.to_write_json(result) elif path.endswith("/batchupdate"): pan_id = self.get_argument("panid", "0") pan_acc = pan_service.get_pan_account(pan_id, self.user_id) cls = [ fn for fn in DataItem.field_names() if fn not in ["id", "created_at", "updated_at", "pan_acc", "account_id"] ] params = { "pan_id": int(pan_id), "name": pan_acc.name, "columns": cls } self.render('batchupdate.html', **params) elif path.endswith("/batchupdatedo"): pan_id = self.get_argument("panid", "0") cname = self.get_argument("cname") datas = self.get_argument("datas", "") lines = datas.split("\n") kv = {} cnt = 0 for line in lines: vals = line.split("\t") if len(vals) == 2: cnt = cnt + 1 kv[vals[0]] = vals[1] DataDao.update_data_item(int(vals[0]), {cname: vals[1].strip()}) # print("update id:", vals[0], ",", cname, "=", vals[1]) rs = { "state": 0, "cnt": cnt, "lines_cnt": len(lines), "cname": cname } # print("kv:", kv) # print("cnt:", cnt, ",lines cnt:", len(lines), "cname:", cname, "pan_id:", pan_id) self.to_write_json(rs) else: self.to_write_json({})
def get_data_item_by_fs_id(cls, fs_id): return DataItem.select().where(DataItem.fs_id == fs_id).first()
def get_data_item_by_id(cls, pk_id): return DataItem.select().where(DataItem.id == pk_id).first()
def check_data_item_exists_by_parent(cls, item_id, parent_id): return DataItem.select().where(DataItem.id == item_id, DataItem.parent == parent_id).exists()
def get_root_item_by_pan_id(cls, pan_id): return DataItem.select().where(DataItem.filename == TOP_DIR_FILE_NAME, DataItem.panacc == pan_id)
def query_data_item_by_parent_all(cls, parent_id, offset=0, limit=100): return DataItem.select().where( DataItem.parent == parent_id).limit(limit).offset(offset)
def query_leaf_data_item(cls, is_dir=True, offset=0, limit=100): # Parent = DataItem.alias() return DataItem.select().where( DataItem.isdir == (1 if is_dir else 0), DataItem.pin == 0).limit(limit).offset(offset)
def update_dir_size_by_dict(self, data_item: dict, recursive=False, reset_sub_dir=True): if data_item['filename'] == TOP_DIR_FILE_NAME or data_item[ 'isdir'] == 0: return def recover_sized_zero(parent_id): DataDao.update_data_item_by_parent_id(parent_id, {'sized': 0}) if recursive: size = 100 offset = 0 rs_len = 100 while rs_len == size: sub_dir_list = DataDao.query_data_item_by_parent( parent_id, offset=offset, limit=size) rs_len = len(sub_dir_list) for s_dir in sub_dir_list: recover_sized_zero(s_dir.id) def recursive_check_dir_size(dir_list: list, pos, rs: dict): if pos >= len(dir_list): return rs p_dir_dict = dir_list[pos] p_dir_id = p_dir_dict['id'] sub_dir: DataItem = DataDao.find_need_update_size_dir( parent_id=p_dir_dict['id']) if sub_dir: recursive_check_dir_size([DataItem.to_dict(sub_dir)], 0, rs) recursive_check_dir_size(dir_list, pos, rs) else: ori_size = p_dir_dict['size'] s: int = DataDao.sum_size_dir(parent_id=p_dir_id) if not p_dir_dict['sized'] or s != ori_size: rs['change'] = True DataDao.update_data_item(p_dir_id, {'size': s, 'sized': 1}) p_dir_dict['size'] = s p_dir_dict['sized'] = 1 print("changed:", True) print('dir id:', p_dir_dict['id'], ',size:', s, ',ori_size:', ori_size) recursive_check_dir_size(dir_list, pos + 1, rs) if reset_sub_dir: recover_sized_zero(data_item['id']) _rs = {'change': False} recursive_check_dir_size([data_item], 0, _rs) print("_rs['change']:", _rs['change'], data_item['parent']) if _rs['change']: _data_item: dict = None _data_item = data_item while _data_item['parent']: p_data_item: DataItem = DataDao.get_data_item_by_id( _data_item['parent']) if p_data_item and p_data_item.filename != TOP_DIR_FILE_NAME: _ori_size = p_data_item.size _s: int = DataDao.sum_size_dir(parent_id=p_data_item.id) print('upate parent dir id:', p_data_item.id, ',size:', _s, ',ori_size:', _ori_size) if _s != _ori_size: DataDao.update_data_item(p_data_item.id, { 'size': _s, 'sized': 1 }) else: break _data_item = DataItem.to_dict(p_data_item) else: break return _rs['change']
def find_need_update_size_dir(cls, parent_id) -> DataItem: return DataItem.select().where(DataItem.parent == parent_id, DataItem.isdir == 1, DataItem.sized == 0).first()
def get(self): path = self.request.path # print(path) if path.endswith("/list"): parent = self.get_argument("parent", default='55') item_list = DataDao.query_data_item_by_parent(int(parent), True) params = {"list": item_list} # print("params:", params) # for item in item_list: # print(item.filename) self.render('list.html', **params) elif path.endswith("/fload"): source = self.get_argument("source", "") node_id = self.get_argument("id") # parent_path = self.get_argument("path") # if not parent_path.endswith("/"): # parent_path = "%s/" % parent_path logger.info("fload node_id:{},source:{}".format(node_id, source)) # parent_id = 55 params = [] if not '#' == node_id: # if "shared" == source: # params = pan_service.query_shared_file_list(parent_id, self.request.user_id) if "assets" == source: if 'assets_0' == node_id: params = ProductDao.query_assets_by_ref_id_for_tree( self.ref_id) elif "free" == source: if 'free_0' == node_id: params = pan_service.query_root_list() elif "self" == source: if 'self_0' == node_id: if not self.default_pan_id: pan_acc = auth_service.default_pan_account( self.user_id) self.default_pan_id = pan_acc.id if self.default_pan_id: params = pan_service.query_client_root_list( self.default_pan_id) else: node_id_val = decrypt_id(node_id) parent_id = int(node_id_val) params = pan_service.query_client_sub_list( parent_id, self.ref_id) elif "empty" == source: pass else: node_id_val = decrypt_id(node_id) parent_id = int(node_id_val) params = pan_service.query_file_list(parent_id) else: # params = pan_service.query_root_list(self.request.user_id) params.append({ "id": "free_0", "text": PAN_TREE_TXT['free_root'], "data": { "source": "free" }, "children": True, "icon": "folder" }) params.append({ "id": "assets_0", "text": PAN_TREE_TXT['buy_root'], "data": { "source": "assets" }, "children": True, "icon": "folder" }) params.append({ "id": "self_0", "text": PAN_TREE_TXT['self_root'], "data": { "source": "self" }, "children": True, "icon": "folder" }) params.append({ "id": "empty_0", "text": PAN_TREE_TXT['empty_root'], "data": { "source": "empty" }, "children": False, "icon": "file" }) self.to_write_json(params) elif path.endswith("/search"): params = {} self.render('search.html', **params) elif path.endswith("/load"): kw = self.get_body_argument("kw") source = self.get_body_argument("source") print("kw:", kw) print("source:", source) kw = kw.replace(' ', '%') page = self.get_body_argument("page") size = 100 offset = int(page) * size sp: SearchParams = SearchParams.build_params(offset, size) sp.add_must(value=kw) es_dao_fun = es_dao_local if source: sp.add_must(field='source', value=source) es_dao_fun = es_dao_share # es_dao_fun = es_dao_dir es_body = build_query_item_es_body(sp) print("es_body:", json.dumps(es_body)) es_result = es_dao_fun().es_search_exec(es_body) hits_rs = es_result["hits"] total = hits_rs["total"] datas = [_s["_source"] for _s in hits_rs["hits"]] # print("es_result:", es_result) # item_list = DataDao.query_file_list_by_keyword(kw, offset=offset, limit=size) # objs = [object_to_dict(o, FIELDS) for o in item_list] # has_next = len(objs) == size has_next = offset + size < total rs = {"data": datas, "has_next": has_next} # print("rs:", rs) self.to_write_json(rs) elif path.endswith("/finfo"): # item_id = self.get_argument("id") item_fuzzy_id = self.get_argument("id") item_id = int(decrypt_id(item_fuzzy_id)) params = pan_service.query_file(item_id) self.to_write_json(params) elif path.endswith("/readydownload"): fs_id = self.get_argument("fs_id") print("readydownload fs_id:", fs_id) params, share_log, data_item = pan_service.share_folder(fs_id) # sub_params = [] min_size = 6000 # min_size = 60 if data_item.size > min_size: sub_params = pan_service.sub_account_transfer(share_log) result = {"subs": sub_params} else: result = {"master": params} # result = {"master": params, "subs": sub_params} self.to_write_json(result) elif path.endswith("/check_transfer"): transfer_log_id = self.get_argument("id") rs = {} print("transfer_log_id:", transfer_log_id) if transfer_log_id: t = pan_service.recheck_transfer_d_link(int(transfer_log_id)) if t: rs = t self.to_write_json(rs) elif path.endswith("/check_shared_log"): shared_log_id = self.get_argument("id") rs = {} print("shared_log_id:", shared_log_id) if shared_log_id: t = pan_service.recheck_shared_d_link(int(shared_log_id)) if t: rs = t self.to_write_json(rs) elif path.endswith("/sync_used"): pan_account_ids_str = self.get_argument("ids") used_str = self.get_argument("useds") if pan_account_ids_str and used_str: _ids = pan_account_ids_str.split(",") useds = used_str.split(",") params = [] ul = len(useds) for i in range(len(_ids)): _id = _ids[i] if i < ul: used = useds[i] params.append({'id': int(_id), 'used': int(used)}) if params: DataDao.update_pan_account_used(params) self.to_write_json({}) elif path.endswith("/dlink"): item_id = self.get_argument("id") params = pan_service.query_file(item_id) self.render('dlink.html', **params) elif path.endswith("/manage"): pan_id = self.get_argument("panid", "0") params = {'pan_id': pan_id} self.render('ftree.html', **params) elif path.endswith("/helptokens"): res = pan_service.pan_accounts_dict() self.to_write_json(res) elif path.endswith("/syncallnodes"): item_fuzzy_id = self.get_argument("id", None) item_id = int(decrypt_id(item_fuzzy_id)) pan_id = self.get_argument('panid', "0") logger.info("syncallnodes pan_id:{}".format(pan_id)) pan_id = int(pan_id) recursion = self.get_argument("recursion") if recursion == "1": recursion = True else: recursion = False if not item_id: if pan_id: root_item: DataItem = sync_pan_service.fetch_root_item( pan_id) logger.info('root_item:{}'.format( DataItem.to_dict(root_item))) if root_item: item_id = root_item.id else: item_id = sync_pan_service.new_root_item( self.request.user_id, pan_id) else: item_id = 55 item_id = int(item_id) rs = sync_pan_service.sync_from_root(item_id, recursion, pan_id, self.request.user_id) self.to_write_json(rs) elif path.endswith("/synccommunity"): # print("in...:") bd = self.request.body data_obj = json.loads(bd) print('/synccommunity payload:', self.request.user_id) open_service.sync_community_item_to_es(self.request.user_id, data_obj) self.to_write_json({'state': 0}) pass elif path.endswith("/syncstate"): self.release_db = False pan_id = self.get_argument('panid', "0") dir_item_id = sync_pan_service.check_sync_state( pan_id, self.request.user_id) if dir_item_id: self.to_write_json({'state': 1, 'item': dir_item_id}) else: self.to_write_json({'state': 0})
def query_file(self, item_id): data_item: DataItem = DataDao.get_data_item_by_id(item_id) need_sync = False logger.info("query_file dlink:{}".format(data_item.dlink)) if not data_item.dlink_updated_at or not data_item.dlink: need_sync = True elif data_item.dlink_updated_at: dt = arrow.get( data_item.dlink_updated_at).replace(tzinfo=self.default_tz) if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now(): need_sync = True account_id = data_item.account_id acc: Accounts = DataDao.account_by_id(account_id) flv_json = None need_thumbs = False # data_item_ext = None # if data_item.category == 1 and is_video_media(data_item.filename): # data_item_ext = DataDao.get_data_item_ext_by_id(data_item.id) if is_image_media(data_item.filename) and data_item.category == 3: need_thumbs = True if need_sync: pan_acc: PanAccounts = self.get_pan_account( data_item.panacc, data_item.account_id) # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)]) sync_dlink, thumbs = restapi.get_dlink_by_sync_file( pan_acc.access_token, int(data_item.fs_id), need_thumbs) if sync_dlink: data_item.dlink = "{}&access_token={}".format( sync_dlink, pan_acc.access_token) data_item.dlink_updated_at = get_now_datetime() data_item_params = { "dlink": data_item.dlink, "dlink_updated_at": data_item.dlink_updated_at } if need_thumbs: if "url3" in thumbs: data_item_params["thumb"] = thumbs["url3"] data_item.thumb = data_item_params["thumb"] elif "url2" in thumbs: data_item_params["thumb"] = thumbs["url2"] data_item.thumb = data_item_params["thumb"] elif "url1" in thumbs: data_item_params["thumb"] = thumbs["url1"] data_item.thumb = data_item_params["thumb"] elif "icon" in thumbs: data_item_params["thumb"] = thumbs["icon"] data_item.thumb = data_item_params["thumb"] DataDao.update_data_item(data_item.id, data_item_params) # not authorized # if data_item.category == 1 and is_video_media(data_item.filename): # flv_json = restapi.get_media_flv_info(pan_acc.access_token, data_item.path) # if flv_json and "mlink" in flv_json: # flv_params = {"fs_id": data_item.fs_id, "mlink": flv_json["mlink"], # "start_at_time": flv_json["mlink_start_at"]} # if data_item_ext: # data_item_ext.mlink = flv_params["mlink"] # data_item_ext.start_at_time = flv_params["start_at_time"] # DataDao.update_data_item_ext(data_item.id, flv_params) # else: # data_item_ext = DataDao.new_data_item_ext(data_item.id, flv_params) used_pan_acc_id = data_item.panacc if data_item: data_item.size = int(data_item.size / 1024) f_type = guess_file_type(data_item.filename) params = {"item": DataItem.to_dict(data_item, ['id', 'parent'])} params["item"]["id"] = obfuscate_id(data_item.id) params["item"]["type"] = f_type params["item"]["media_type"] = self.check_data_item_media_type( data_item.category, data_item.filename) params["item"]["dlink_tokens"] = [used_pan_acc_id] # if data_item.category == 1 and is_video_media(data_item.filename) and data_item_ext: # params["item"]["mlink"] = data_item_ext.mlink # params["item"]["start_at_time"] = data_item_ext.start_at_time return params