Exemplo n.º 1
0
 def fetch_tmp_manual_file(self, key, _pan_acc: PanAccounts,
                           share_log: ShareLogs, parent_dir):
     # TMP_MANUAL_PATH
     jsonrs = restapi.file_search(_pan_acc.access_token,
                                  key=key,
                                  parent_dir=parent_dir)
     result = []
     for finfo in jsonrs:
         if "fs_id" in finfo:
             md5_val = finfo["md5"]
             if md5_val == share_log.md5_val:
                 dlink = ""
                 # sync_list = restapi.sync_file(_pan_acc.access_token, [int(finfo["fs_id"])])
                 sync_dlink, thumbs = restapi.get_dlink_by_sync_file(
                     _pan_acc.access_token, int(finfo["fs_id"]))
                 expired_at = None
                 if sync_dlink:
                     dlink = "%s&access_token=%s" % (sync_dlink,
                                                     _pan_acc.access_token)
                     expired_at = arrow.now(self.default_tz).shift(
                         hours=+DLINK_TIMEOUT).datetime
                 transfer_log_id, transfer_log = DataDao.new_transfer_log(
                     share_log.id, str(finfo["fs_id"]), finfo["path"],
                     finfo["server_filename"], finfo["size"],
                     finfo["category"], finfo["md5"], dlink,
                     _pan_acc.user_id, expired_at, _pan_acc.id)
                 if transfer_log_id:
                     result.append(TransferLogs.to_dict(transfer_log))
                     DataDao.update_pan_account_by_pk(
                         _pan_acc.id, {'use_count': _pan_acc.use_count + 1})
                     continue
     return result
Exemplo n.º 2
0
 def deep_clear(di, pan_acc):
     if di:
         if di.isdir == 1:
             # 迭代处理
             size = 50
             l = size
             while l == size:
                 sub_items = DataDao.query_data_item_by_parent_all(di.id, limit=size)
                 l = 0
                 if sub_items:
                     l = len(sub_items)
                     for sub_item in sub_items:
                         deep_clear(sub_item, pan_acc)
                 time.sleep(0.2)
         else:
             fs_id = di.fs_id
             share_log: ShareLogs = DataDao.query_shared_log_by_fs_id(fs_id)
             if share_log:
                 self.clear_share_log(share_log.id)
             transfer_logs = DataDao.query_transfer_logs_by_fs_id(fs_id)
             if transfer_logs:
                 for tl in transfer_logs:
                     CommunityDao.del_transfer_log_by_id(tl.id)
         log.info("deep clear delete data item:{}, filename:{}".format(di.id, di.filename))
         _es_dao_item.delete(di.id)
         DataDao.del_data_item_by_id(di.id)
Exemplo n.º 3
0
 def check_file_by_key_search(self, key, _md5_val, share_log_id,
                              pan_acc: PanAccounts):
     jsonrs = restapi.file_search(pan_acc.access_token,
                                  key=key,
                                  parent_dir=self.TMP_PATH)
     for finfo in jsonrs:
         if "fs_id" in finfo:
             md5_val = finfo["md5"]
             if md5_val == _md5_val:
                 dlink = ""
                 sync_list = restapi.sync_file(pan_acc.access_token,
                                               [int(finfo["fs_id"])])
                 expired_at = None
                 for sync_item in sync_list:
                     if finfo["fs_id"] == sync_item['fs_id']:
                         dlink = sync_item['dlink']
                         dlink = "%s&access_token=%s" % (
                             dlink, pan_acc.access_token)
                         expired_at = arrow.now(self.default_tz).shift(
                             hours=+DLINK_TIMEOUT).datetime
                         break
                 transfer_log_id, transfer_log = DataDao.new_transfer_log(
                     share_log_id, str(finfo["fs_id"]), finfo["path"],
                     finfo["server_filename"], finfo["size"],
                     finfo["category"], finfo["md5"], dlink,
                     pan_acc.user_id, expired_at, pan_acc.id)
                 if transfer_log_id:
                     # if transfer_log.dlink:
                     #     transfer_log.dlink = restapi.query_file_head(transfer_log.dlink)
                     # result.append(TransferLogs.to_dict(transfer_log))
                     DataDao.update_pan_account_by_pk(
                         pan_acc.id, {'use_count': pan_acc.use_count + 1})
                     return transfer_log
     return None
Exemplo n.º 4
0
    def recheck_transfer_d_link(self, transfer_log_id):
        # print("recheck_transfer_d_link transfer_log_id:", transfer_log_id)
        tl: TransferLogs = DataDao.query_transfer_logs_by_pk_id(
            transfer_log_id)

        if tl:
            # print("expired_at:", arrow.get(tl.expired_at).replace(tzinfo=self.default_tz), ",now:", arrow.now())
            if arrow.get(tl.expired_at).replace(
                    tzinfo=self.default_tz) < arrow.now():
                __pan_acc = self.get_pan_account(tl.pan_account_id,
                                                 tl.account_id)
                sync_dlink, thumbs = restapi.get_dlink_by_sync_file(
                    __pan_acc.access_token, int(tl.fs_id))
                # print("sync_dlink:", sync_dlink)
                if sync_dlink:
                    # tl.dlink = sync_dlink
                    tl.dlink = "%s&access_token=%s" % (sync_dlink,
                                                       __pan_acc.access_token)
                    tl.expired_at = arrow.now(
                        self.default_tz).shift(hours=+DLINK_TIMEOUT).datetime
                    # print("new expired_at:", tl.expired_at)
                    DataDao.update_transfer_log_by_pk(
                        tl.id, {
                            "dlink": tl.dlink,
                            "expired_at": tl.expired_at
                        })
                    return TransferLogs.to_dict(tl)
            else:
                return TransferLogs.to_dict(tl)
        else:
            logger.info("[transfer_log:%s] not exists!" % transfer_log_id)
        return None
Exemplo n.º 5
0
 def cb(pan_accounts):
     now = arrow.now(self.default_tz)
     pan: PanAccounts = None
     need_sleep = False
     for pan in pan_accounts:
         if pan.refresh_token:
             if need_sleep:
                 time.sleep(3)
             jsonrs = restapi.refresh_token(pan.refresh_token, True)
             access_token = jsonrs["access_token"]
             refresh_token = jsonrs["refresh_token"]
             expires_in = jsonrs["expires_in"] - 20 * 60  # seconds
             expires_at = now.shift(seconds=+expires_in).datetime
             now_tm = get_now_datetime()
             DataDao.update_pan_account_by_pk(
                 pan.id, {
                     "access_token": access_token,
                     "refresh_token": refresh_token,
                     "expires_at": expires_at,
                     "token_updated_at": now_tm
                 })
             pan.access_token = access_token
             pan.refresh_token = refresh_token
             pan.expires_at = expires_at
             pan.token_updated_at = now_tm
             try:
                 log.info("sync pan user[{},{}] info to db!".format(
                     access_token, pan.user_id))
                 self.sync_pan_user_info(access_token, pan.user_id)
             except Exception as e:
                 log.error("sync_pan_user_info err:", exc_info=True)
             need_sleep = True
     return pan
Exemplo n.º 6
0
    def _default_new_user_build_user_payload(self, account: Accounts, params):
        auth_user_dict = AuthDao.auth_user(account.id)
        fuzzy_id = obfuscate_id(account.id)
        auth_user_dict['id'] = fuzzy_id
        auth_user_dict['_id'] = account.id
        auth_user_dict['login_updated_at'] = account.login_updated_at

        access_token = params.get('access_token')
        refresh_token = params.get('refresh_token')
        expires_at = params.get('expires_at')

        account_ext_ctx = params.get('accext', {})

        client_id = PAN_SERVICE['client_id']
        client_secret = PAN_SERVICE['client_secret']
        account_ext_ctx['account_id'] = account.id
        log.info("will new account ext:{}".format(account_ext_ctx))

        acc_ext: AccountExt = DataDao.new_accounts_ext(**account_ext_ctx)
        log.info("new account ext ok acc_ext id:{}".format(acc_ext.id))
        pan_acc_id = DataDao.new_pan_account(account.id,
                                             account.name,
                                             client_id,
                                             client_secret,
                                             access_token,
                                             refresh_token,
                                             expires_at,
                                             get_now_datetime(),
                                             pin=1,
                                             bd_uid=acc_ext.user_id)
        auth_user_dict['_p'] = obfuscate_id(pan_acc_id)
        # print("auth_user_dict:", auth_user_dict)
        tk = make_account_token(auth_user_dict)
        # print('make_account_token:', tk)
        return tk, auth_user_dict
Exemplo n.º 7
0
 def save_user(self, mobile_no, passwd):
     acc: Accounts = DataDao.account_by_name(mobile_no)
     if acc:
         return None, None, "exist"
     else:
         user_token, user_ext_dict = DataDao.new_account(
             mobile_no, passwd, get_now_datetime(),
             lambda account: auth_service.build_user_payload(account))
         fuzzy_id = user_ext_dict['id']
         return user_token, fuzzy_id, None
Exemplo n.º 8
0
 def sync_pan_user_info(self, access_token, account_id):
     jsonrs = restapi.sync_user_info(access_token, True)
     if jsonrs:
         userid = jsonrs.get('userid', '')
         username = jsonrs.get('username', '')
         realname = jsonrs.get('realname', '')
         portrait = jsonrs.get('portrait', '')
         userdetail = jsonrs.get('userdetail', '')
         birthday = jsonrs.get('birthday', '')
         marriage = jsonrs.get('marriage', '')
         sex = jsonrs.get('sex', '')
         blood = jsonrs.get('blood', '')
         figure = jsonrs.get('figure', '')
         constellation = jsonrs.get('constellation', '')
         education = jsonrs.get('education', '')
         trade = jsonrs.get('trade', '')
         job = jsonrs.get('job', '')
         is_realname = jsonrs.get('is_realname', '')
         if not DataDao.check_account_ext_exist(userid):
             return DataDao.new_accounts_ext(userid,
                                             username,
                                             realname,
                                             portrait,
                                             userdetail,
                                             birthday,
                                             marriage,
                                             sex,
                                             blood,
                                             figure,
                                             constellation,
                                             education,
                                             trade,
                                             job,
                                             is_realname,
                                             account_id=account_id)
         else:
             DataDao.update_account_ext_by_user_id(
                 userid,
                 dict(username=username,
                      realname=realname,
                      portrait=portrait,
                      userdetail=userdetail,
                      birthday=birthday,
                      marriage=marriage,
                      sex=sex,
                      blood=blood,
                      figure=figure,
                      constellation=constellation,
                      education=education,
                      trade=trade,
                      job=job,
                      is_realname=is_realname,
                      account_id=account_id))
     return None
Exemplo n.º 9
0
 def recover_sized_zero(parent_id):
     DataDao.update_data_item_by_parent_id(parent_id, {'sized': 0})
     if recursive:
         size = 100
         offset = 0
         rs_len = 100
         while rs_len == size:
             sub_dir_list = DataDao.query_data_item_by_parent(
                 parent_id, offset=offset, limit=size)
             rs_len = len(sub_dir_list)
             for s_dir in sub_dir_list:
                 recover_sized_zero(s_dir.id)
Exemplo n.º 10
0
 def fetch_root_item_by_user(self, user_id):
     pan_acc_map = {}
     if MASTER_ACCOUNT_ID == user_id:
         _pan_acc_list = DataDao.pan_account_list(user_id, 100)
         pa: PanAccounts = None
         for pa in _pan_acc_list:
             pan_acc_map[pa.id] = pa
     root_item_ms = DataDao.get_root_item_by_user_id(user_id)
     params = []
     for item in root_item_ms:
         _item_path = item.path
         pan = item.pan
         if pan.id in pan_acc_map:
             pan_acc_map.pop(pan.id)
         params.append({
             "id": obfuscate_id(item.id),
             "text": pan.name,
             "data": {
                 "path": _item_path,
                 "_id": obfuscate_id(item.id),
                 "server_ctime": item.server_ctime,
                 "isdir": 1,
                 "source": 'local',
                 "fn": "",
                 "alias": "",
                 "pos": 0
             },
             "children": True,
             "icon": "folder"
         })
     if pan_acc_map:
         for pan_id in pan_acc_map:
             _pan = pan_acc_map[pan_id]
             _, root_item = DataDao.new_root_item(user_id, pan_id)
             params.append({
                 "id": obfuscate_id(root_item.id),
                 "text": _pan.name,
                 "data": {
                     "path": root_item.path,
                     "_id": obfuscate_id(root_item.id),
                     "server_ctime": root_item.server_ctime,
                     "isdir": 1,
                     "source": 'local',
                     "fn": "",
                     "alias": "",
                     "pos": 0
                 },
                 "children": True,
                 "icon": "folder"
             })
     return params
Exemplo n.º 11
0
 def __clear_data_items(self, parent_id, synced=-1, is_dir=True):
     will_del_data_items = DataDao.query_data_item_by_parent_synced(parent_id, synced=synced, is_dir=is_dir, limit=500)
     di: DataItem = None
     doc_ids = []
     for di in will_del_data_items:
         if di.isdir == 1:
             DataDao.update_data_item_by_parent_id(di.id, {"synced": -1})
             self.__clear_data_items(di.id, -1, True)
             self.__clear_data_items(di.id, -1, False)
         doc_ids.append(di.id)
     if doc_ids:
         log.info("delete file by parent_id:{}".format(parent_id))
         self.es_dao_item.bulk_delete(doc_ids)
         DataDao.del_data_item_by_parent_synced(parent_id, synced, is_dir)
Exemplo n.º 12
0
    def __clear_data_item(self, item_id):
        # out_pan_acc: PanAccounts = DataDao.pan_account_by_id(pan_id)
        _es_dao_item = self.es_dao_item

        def deep_clear(di, pan_acc):
            if di:
                if di.isdir == 1:
                    # 迭代处理
                    size = 50
                    l = size
                    while l == size:
                        sub_items = DataDao.query_data_item_by_parent_all(di.id, limit=size)
                        l = 0
                        if sub_items:
                            l = len(sub_items)
                            for sub_item in sub_items:
                                deep_clear(sub_item, pan_acc)
                        time.sleep(0.2)
                else:
                    fs_id = di.fs_id
                    share_log: ShareLogs = DataDao.query_shared_log_by_fs_id(fs_id)
                    if share_log:
                        self.clear_share_log(share_log.id)
                    transfer_logs = DataDao.query_transfer_logs_by_fs_id(fs_id)
                    if transfer_logs:
                        for tl in transfer_logs:
                            CommunityDao.del_transfer_log_by_id(tl.id)
                log.info("deep clear delete data item:{}, filename:{}".format(di.id, di.filename))
                _es_dao_item.delete(di.id)
                DataDao.del_data_item_by_id(di.id)

        root_di: DataItem = DataDao.get_data_item_by_id(item_id)
        if root_di:
            pan_acc: PanAccounts = auth_service.get_pan_account(root_di.panacc, root_di.account_id)
            deep_clear(root_di, pan_acc)
            if root_di.parent:
                p_data_item = DataDao.get_data_item_by_fs_id(root_di.parent)
                if p_data_item:
                    mpan_service.update_dir_size(p_data_item)
            jsonrs = restapi.del_file(pan_acc.access_token, root_di.path)
            if "errno" in jsonrs and jsonrs["errno"]:
                errmsg = jsonrs.get("errmsg", "")
                if not errmsg:
                    errmsg = "clear failed!"
                return {"state": -1, "errmsg": errmsg}
            else:
                return {"state": 0}
        else:
            log.info("deep clear , can not find root item:{}".format(item_id))
            return {"state": -1, "errmsg": "[{}] not exists.".format(item_id)}
Exemplo n.º 13
0
        def req_file_list(data_item: DataItem, pan_acc: PanAccounts):
            parent_id = 55
            from_dir = '/'
            if data_item:
                from_dir = data_item.path
                parent_id = data_item.id
            else:
                return
            log.info("sync file:{}, filename:{}".format(data_item.id, data_item.filename))
            if data_item.isdir == 1:
                json_data_list = restapi.file_list(pan_acc.access_token, from_dir)
                if json_data_list is not None:
                    log.info("update synced is -1, parent_id:{}".format(parent_id))
                    DataDao.update_data_item_by_parent_id(parent_id, {"synced": -1})
                else:
                    log.warn("json_data_list is null!")
                if json_data_list:
                    for fi in json_data_list:
                        item_map = dict(category=fi['category'],
                                        isdir=fi['isdir'],
                                        filename=fi['server_filename'],
                                        server_ctime=fi['server_ctime'],
                                        fs_id=fi['fs_id'],
                                        path=fi['path'],
                                        size=fi['size'],
                                        md5_val=fi.get('md5', ''),
                                        account_id=pan_acc.user_id,
                                        panacc=pan_acc.id,
                                        parent=parent_id,
                                        synced=0,
                                        pin=0
                                        )
                        di: DataItem = DataDao.get_data_item_by_fs_id(item_map['fs_id'])

                        if di:
                            item_map.pop('pin')
                            DataDao.update_data_item(di.id, item_map)
                            data_item: DataItem = DataDao.get_data_item_by_id(di.id)
                            # print("will update data item:", item_map)
                        else:
                            DataDao.save_data_item(fi['isdir'], item_map)
                            # print("will save data item:", item_map)
                        time.sleep(0.1)
                else:
                    log.info("have not any sub files!")
                self.__clear_data_items(parent_id, -1, True)
                self.__clear_data_items(parent_id, -1, False)
            DataDao.update_data_item(data_item.id, {"synced": 1})
Exemplo n.º 14
0
 def free(self, user_id, item_id, desc, es_tags):
     result = {'state': 0}
     f_tag = ES_TAG_MAP['FREE']
     data_item: DataItem = DataDao.get_data_item_by_id(item_id)
     item_id_str = str(item_id)
     urc: UserRootCfg = ManDao.check_root_cfg_fetch(fs_id=item_id_str)
     if urc:
         if urc.pin != 0:
             ManDao.update_root_cfg_by_id(urc.id, {'pin': 0})
             # tag free
     else:
         urc_id = ManDao.new_root_cfg(item_id_str, data_item.filename,
                                      user_id, data_item.panacc, desc)
         if not urc_id:
             result['state'] = -3
             result['errmsg'] = "新建免费资源根目录失败!"
     if es_tags:
         es_tags.append(f_tag)
     else:
         es_tags = [f_tag]
     if result['state'] == 0:
         _params = {'tags': es_tags}
         es_up_params = es_dao_local().filter_update_params(_params)
         if es_up_params:
             isok = es_dao_local().update_fields(item_id, **es_up_params)
             if not isok:
                 result['state'] = -2
                 result['errmsg'] = "索引更新失败!"
     return result
Exemplo n.º 15
0
 def user_list(self, kw, size, offset):
     items = DataDao.query_user_list_by_keyword(kw,
                                                offset=offset,
                                                limit=size)
     item_json_list = []
     # print("items:", items)
     for item in items:
         item_json_list.append(Accounts.to_dict(item, BASE_FIELDS + ['id']))
     return item_json_list
Exemplo n.º 16
0
 def get_pan_account(self, pan_account_id, account_id) -> PanAccounts:
     # if get_now_ts() - self.PAN_ACC_CACHE_LAST_TIME > PAN_ACC_CACHE_TIMEOUT:
     #     pan_acc_list = DataDao.pan_account_list(account_id)
     #     self.PAN_ACC_CACHE = {pan_acc_dict.id: pan_acc_dict for pan_acc_dict in pan_acc_list}
     #     self.PAN_ACC_CACHE_LAST_TIME = get_now_ts()
     pan_acc_cache = self.checkout_pan_accounts(account_id)
     if pan_account_id in pan_acc_cache:
         return pan_acc_cache[pan_account_id]
     return DataDao.pan_account_by_id(pan_account_id)
Exemplo n.º 17
0
 def clear_share_log(self, share_log_id):
     transfer_logs = CommunityDao.query_transfer_logs_by_share_log_id(share_log_id)
     pan_map_cache = {}
     if transfer_logs:
         tl: TransferLogs = None
         for tl in transfer_logs:
             pan_id = tl.pan_account_id
             if pan_id not in pan_map_cache:
                 pan_acc: PanAccounts = DataDao.pan_account_by_id(pan_id)
                 pan_map_cache[pan_id] = pan_acc
             pan_acc = pan_map_cache[pan_id]
             restapi.del_file(pan_acc.access_token, tl.path)
             di: DataItem = DataDao.query_data_item_by_fs_id(tl.fs_id)
             if di:
                 self.es_dao_item.delete(di.id)
                 DataDao.del_data_item_by_id(di.id)
             CommunityDao.del_transfer_log_by_id(tl.id)
     CommunityDao.del_share_log_by_id(share_log_id)
Exemplo n.º 18
0
 def rename(self, item_id, old_name, alias_name, source):
     result = {'state': 0}
     if "local" == source:
         data_item: DataItem = DataDao.get_data_item_by_id(item_id)
         # print("rename data_item:", DataItem.to_dict(data_item))
         # print("old_name:", old_name)
         # print("alias_name:", alias_name)
         if data_item.filename != old_name or data_item.aliasname != alias_name:
             params = {"filename": old_name, "aliasname": alias_name}
             fs_id = int(data_item.fs_id)  # 重要, 网盘服务中类型为Int,本地为了兼容所有类型id使用了varchar
             pan_acc: PanAccounts = auth_service.get_pan_account(data_item.panacc, data_item.account_id)
             file_info_json = restapi.sync_file(pan_acc.access_token, [fs_id], False)
             if file_info_json:
                 find_item = False
                 for sync_item in file_info_json:
                     if sync_item['fs_id'] == fs_id:
                         find_item = True
                         origin_filename = sync_item['filename']
                         if origin_filename != old_name:
                             origin_path = sync_item['path']
                             _jsonrs = restapi.file_rename(pan_acc.access_token, origin_path, old_name)
                             err_no = _jsonrs.get("errno", None)
                             if 'info' in _jsonrs and not err_no:
                                 info_list = _jsonrs['info']
                                 # print("rename info_list:", info_list)
                                 if origin_path != data_item.path:
                                     params["path"] = origin_path
                                 DataDao.update_data_item(data_item.id, params)
                             else:
                                 err_msg = _jsonrs.get("errmsg", "")
                                 result['state'] = -1
                                 result['err'] = err_msg
                                 return result
                         else:
                             DataDao.update_data_item(data_item.id, params)
                 if not find_item:
                     result['state'] = -2
                     result['errmsg'] = "not find file in NetDisk,[{}]".format(data_item.filename)
                     return result
             else:
                 result['state'] = -2
                 result['err'] = "not find file in NetDisk,[{}]".format(data_item.filename)
                 return result
         return result
Exemplo n.º 19
0
def sync_db_file_list_to_es():
    is_dir = True
    size = 100
    # size = 1
    offset = 0
    get_size = 0
    count = 0
    sql = "select * from dataitem where isdir=%s limit %s,%s" % (1, offset,
                                                                 size)
    print("sql:", sql)
    data_item_list = DataItem.raw(sql)

    if data_item_list:
        get_size = len(data_item_list)
        count = count + get_size
        for data_item in data_item_list:
            if data_item.sized == 0:
                mpan_service.update_dir_size(data_item,
                                             recursive=False,
                                             reset_sub_dir=False)
            DataDao.sync_data_item_to_es(data_item)
    dog = 100000
    while get_size == size and dog > 0:
        offset = offset + size
        sql = "select * from dataitem where isdir=%s limit %s,%s" % (1, offset,
                                                                     size)
        data_item_list = DataItem.raw(sql)
        get_size = 0
        if data_item_list:
            get_size = len(data_item_list)
            count = count + get_size
            for data_item in data_item_list:
                if data_item.sized == 0:
                    mpan_service.update_dir_size(data_item,
                                                 recursive=False,
                                                 reset_sub_dir=False)
                DataDao.sync_data_item_to_es(data_item)
        time.sleep(0.3)
        print("sync_dir_file_list did count:", count)
        dog = dog - 1
Exemplo n.º 20
0
 def all_pan_acc_list_by_user(self, user_id):
     # acc: Accounts = DataDao.account_by_id(user_id)
     pan_acc_list = DataDao.pan_account_list(user_id)
     need_renew_pan_acc = []
     for pan_acc in pan_acc_list:
         need_renew_pan_acc.append({
             "id": pan_acc.id,
             "name": pan_acc.name,
             "use_cnt": pan_acc.use_count,
             "refresh": True,
             'auth': self.pan_auth
         })
     return need_renew_pan_acc
Exemplo n.º 21
0
 def check_mid_parent(perent_item_id):
     pos = len(p_fuzzy_id_list) - 1
     check_pass = True
     while pos > 0:
         pos = pos - 1
         _item_id = decrypt_id(p_fuzzy_id_list[pos])
         if DataDao.check_data_item_exists_by_parent(
                 _item_id, perent_item_id):
             perent_item_id = _item_id
         else:
             check_pass = False
             break
     return check_pass, perent_item_id
Exemplo n.º 22
0
 def load_pan_acc_list_by_user(self, user_id):
     acc: Accounts = DataDao.account_by_id(user_id)
     pan_acc_list = DataDao.pan_account_list(user_id)
     l = len(pan_acc_list)
     result = {}
     need_renew_pan_acc = []
     for pan_acc in pan_acc_list:
         if pan_acc.client_id != self.client_id or pan_acc.client_secret != self.client_secret:
             need_renew_access_token = True
             need_renew_pan_acc.append({
                 "id": pan_acc.id,
                 "name": pan_acc.name,
                 "use_cnt": pan_acc.use_count,
                 "refresh": False,
                 'auth': self.pan_auth
             })
         elif pan_acc.access_token and pan_acc.token_updated_at:
             tud = arrow.get(
                 pan_acc.token_updated_at).replace(tzinfo=self.default_tz)
             if (arrow.now(self.default_tz) -
                     tud).total_seconds() > PAN_ACCESS_TOKEN_TIMEOUT:
                 need_renew_access_token = True
                 need_renew_pan_acc.append({
                     "id": pan_acc.id,
                     "name": pan_acc.name,
                     "use_cnt": pan_acc.use_count,
                     "refresh": True,
                     'auth': self.pan_auth
                 })
         else:
             need_renew_access_token = True
             need_renew_pan_acc.append({
                 "id": pan_acc.id,
                 "name": pan_acc.name,
                 "use_cnt": pan_acc.use_count,
                 "refresh": True,
                 'auth': self.pan_auth
             })
     return need_renew_pan_acc
Exemplo n.º 23
0
 def recursive_check_dir_size(dir_list: list, pos, rs: dict):
     if pos >= len(dir_list):
         return rs
     p_dir_dict = dir_list[pos]
     p_dir_id = p_dir_dict['id']
     sub_dir: DataItem = DataDao.find_need_update_size_dir(
         parent_id=p_dir_dict['id'])
     if sub_dir:
         recursive_check_dir_size([DataItem.to_dict(sub_dir)], 0, rs)
         recursive_check_dir_size(dir_list, pos, rs)
     else:
         ori_size = p_dir_dict['size']
         s: int = DataDao.sum_size_dir(parent_id=p_dir_id)
         if not p_dir_dict['sized'] or s != ori_size:
             rs['change'] = True
             DataDao.update_data_item(p_dir_id, {'size': s, 'sized': 1})
             p_dir_dict['size'] = s
             p_dir_dict['sized'] = 1
             print("changed:", True)
         print('dir id:', p_dir_dict['id'], ',size:', s, ',ori_size:',
               ori_size)
         recursive_check_dir_size(dir_list, pos + 1, rs)
Exemplo n.º 24
0
 def query_root_list(self, account_id=None):
     if account_id:
         root_item_list = DataDao.query_root_files_by_user_id(account_id)
     else:
         root_item_list = DataDao.query_free_root_files()
     params = []
     for item in root_item_list:
         item_id = item.fs_id
         if "local" == item.source:
             fuzzy_id = obfuscate_id(int(item_id))
             # print("query_root_list item_id:", item_id, ",fuzzy_id:", fuzzy_id)
             params.append({
                 "id": fuzzy_id,
                 "text": item.desc,
                 "data": {
                     "source": item.source,
                     "_id": fuzzy_id,
                     "tag": "free",
                     "path": "#"
                 },
                 "children": True,
                 "icon": "folder"
             })
     return params
Exemplo n.º 25
0
 def __run():
     dir_data_item_id = item_id
     key = "sync:pan:dir:%s_%s" % (user_id, pan_id)
     not_exists = cache_service.put(key, get_now_ts())
     if not_exists:
         rs_key = "synced:pan:dir:%s_%s" % (user_id, pan_id)
         cache_service.rm(rs_key)
         root_data_item: DataItem = DataDao.get_data_item_by_id(dir_data_item_id)
         self.sync_dir_file_list(root_data_item, recursion)
         self.__thread = None
         cache_service.rm(key)
         cache_service.put(rs_key, root_data_item.id)
         mpan_service.update_dir_size(root_data_item)
         try_release_conn()
     pass
Exemplo n.º 26
0
 def sync_dir_file_list(self, data_item, recursion=False):
     # parent_id = 0
     # get_size = 0
     # count = 0
     is_dir = True
     size = 500
     offset = 0
     self.get_file_list_by_list([data_item])
     if data_item.isdir and recursion:
         data_item_list = DataDao.query_data_item_by_parent_pin(parent_id=data_item.id, pin=0, is_dir=is_dir,
                                                                offset=offset, limit=size)
         if data_item_list and recursion:
             # get_size = len(data_item_list)
             # count = count + get_size
             self.get_file_list_by_list(data_item_list)
             for di in data_item_list:
                 self.sync_dir_file_list(di, recursion)
Exemplo n.º 27
0
        def to_do(key, rs_key):
            _result = {'state': 0}
            data_item: DataItem = DataDao.get_data_item_by_id(item_id)
            _rs, share_log = open_service.build_shared_log(data_item)
            if not share_log:
                if 'state' in _rs:
                    _result['state'] = _rs['state']
                if 'err' in _rs:
                    _result['state'] = -9
                    _result['err'] = _rs['err']
            else:
                if share_log.is_black == 1:
                    _result['state'] = -9
                    _result['err'] = share_log.err
                else:
                    _result['item'] = {
                        "link": share_log.link,
                        "pass": share_log.password
                    }
                    _result['pos'] = 1
            # copy

            return _result
Exemplo n.º 28
0
    def recheck_shared_d_link(self, shared_log_id):
        share_log: ShareLogs = DataDao.query_shared_log_by_pk_id(shared_log_id)
        if share_log:
            data_item: DataItem = DataDao.get_data_item_by_fs_id(
                share_log.fs_id)
            need_sync = False
            # print("query_file dlink:", data_item.dlink)
            if not data_item.dlink_updated_at or not data_item.dlink:
                need_sync = True
            elif data_item.dlink_updated_at:
                dt = arrow.get(
                    data_item.dlink_updated_at).replace(tzinfo=self.default_tz)
                if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now():
                    need_sync = True

            if need_sync:
                account_id = data_item.account_id
                acc: Accounts = DataDao.account_by_id(account_id)
                pan_acc: PanAccounts = self.get_pan_account(
                    data_item.panacc, data_item.account_id)
                # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)])
                sync_dlink, thumbs = restapi.get_dlink_by_sync_file(
                    pan_acc.access_token, int(data_item.fs_id))
                if sync_dlink:
                    data_item.dlink = sync_dlink
                    data_item.dlink_updated_at = get_now_datetime()
                    DataDao.update_data_item(
                        data_item.id, {
                            "dlink": data_item.dlink,
                            "dlink_updated_at": data_item.dlink_updated_at
                        })

            share_log.dlink = data_item.dlink
            DataDao.update_share_log_by_pk(share_log.id,
                                           {'dlink': data_item.dlink})
            return ShareLogs.to_dict(share_log)
        return None
Exemplo n.º 29
0
    def get(self):
        path = self.request.path
        # print(path)
        if path.endswith("/list"):
            parent = self.get_argument("parent", default='55')
            item_list = DataDao.query_data_item_by_parent(int(parent), True)
            params = {"list": item_list}
            # print("params:", params)
            # for item in item_list:
            #     print(item.filename)
            self.render('list.html', **params)
        elif path.endswith("/fload"):
            source = self.get_argument("source", "")
            node_id = self.get_argument("id")
            # parent_path = self.get_argument("path")
            # if not parent_path.endswith("/"):
            #     parent_path = "%s/" % parent_path
            logger.info("fload node_id:{},source:{}".format(node_id, source))
            # parent_id = 55
            params = []
            if not '#' == node_id:
                # if "shared" == source:
                #     params = pan_service.query_shared_file_list(parent_id, self.request.user_id)
                if "assets" == source:
                    if 'assets_0' == node_id:
                        params = ProductDao.query_assets_by_ref_id_for_tree(
                            self.ref_id)
                elif "free" == source:
                    if 'free_0' == node_id:
                        params = pan_service.query_root_list()
                elif "self" == source:
                    if 'self_0' == node_id:
                        if not self.default_pan_id:
                            pan_acc = auth_service.default_pan_account(
                                self.user_id)
                            self.default_pan_id = pan_acc.id
                        if self.default_pan_id:
                            params = pan_service.query_client_root_list(
                                self.default_pan_id)
                    else:
                        node_id_val = decrypt_id(node_id)
                        parent_id = int(node_id_val)
                        params = pan_service.query_client_sub_list(
                            parent_id, self.ref_id)
                elif "empty" == source:
                    pass
                else:
                    node_id_val = decrypt_id(node_id)
                    parent_id = int(node_id_val)
                    params = pan_service.query_file_list(parent_id)
            else:
                # params = pan_service.query_root_list(self.request.user_id)
                params.append({
                    "id": "free_0",
                    "text": PAN_TREE_TXT['free_root'],
                    "data": {
                        "source": "free"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "assets_0",
                    "text": PAN_TREE_TXT['buy_root'],
                    "data": {
                        "source": "assets"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "self_0",
                    "text": PAN_TREE_TXT['self_root'],
                    "data": {
                        "source": "self"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "empty_0",
                    "text": PAN_TREE_TXT['empty_root'],
                    "data": {
                        "source": "empty"
                    },
                    "children": False,
                    "icon": "file"
                })

            self.to_write_json(params)
        elif path.endswith("/search"):
            params = {}
            self.render('search.html', **params)
        elif path.endswith("/load"):
            kw = self.get_body_argument("kw")
            source = self.get_body_argument("source")
            print("kw:", kw)
            print("source:", source)
            kw = kw.replace(' ', '%')
            page = self.get_body_argument("page")
            size = 100
            offset = int(page) * size
            sp: SearchParams = SearchParams.build_params(offset, size)
            sp.add_must(value=kw)
            es_dao_fun = es_dao_local
            if source:
                sp.add_must(field='source', value=source)
                es_dao_fun = es_dao_share
                # es_dao_fun = es_dao_dir
            es_body = build_query_item_es_body(sp)
            print("es_body:", json.dumps(es_body))
            es_result = es_dao_fun().es_search_exec(es_body)
            hits_rs = es_result["hits"]
            total = hits_rs["total"]
            datas = [_s["_source"] for _s in hits_rs["hits"]]

            # print("es_result:", es_result)
            # item_list = DataDao.query_file_list_by_keyword(kw, offset=offset, limit=size)
            # objs = [object_to_dict(o, FIELDS) for o in item_list]
            # has_next = len(objs) == size
            has_next = offset + size < total
            rs = {"data": datas, "has_next": has_next}
            # print("rs:", rs)
            self.to_write_json(rs)
        elif path.endswith("/finfo"):
            # item_id = self.get_argument("id")
            item_fuzzy_id = self.get_argument("id")
            item_id = int(decrypt_id(item_fuzzy_id))
            params = pan_service.query_file(item_id)
            self.to_write_json(params)
        elif path.endswith("/readydownload"):
            fs_id = self.get_argument("fs_id")
            print("readydownload fs_id:", fs_id)
            params, share_log, data_item = pan_service.share_folder(fs_id)
            # sub_params = []
            min_size = 6000
            # min_size = 60
            if data_item.size > min_size:
                sub_params = pan_service.sub_account_transfer(share_log)
                result = {"subs": sub_params}
            else:
                result = {"master": params}
            # result = {"master": params, "subs": sub_params}
            self.to_write_json(result)
        elif path.endswith("/check_transfer"):
            transfer_log_id = self.get_argument("id")
            rs = {}
            print("transfer_log_id:", transfer_log_id)
            if transfer_log_id:
                t = pan_service.recheck_transfer_d_link(int(transfer_log_id))
                if t:
                    rs = t
            self.to_write_json(rs)
        elif path.endswith("/check_shared_log"):
            shared_log_id = self.get_argument("id")
            rs = {}
            print("shared_log_id:", shared_log_id)
            if shared_log_id:
                t = pan_service.recheck_shared_d_link(int(shared_log_id))
                if t:
                    rs = t
            self.to_write_json(rs)
        elif path.endswith("/sync_used"):
            pan_account_ids_str = self.get_argument("ids")
            used_str = self.get_argument("useds")
            if pan_account_ids_str and used_str:
                _ids = pan_account_ids_str.split(",")
                useds = used_str.split(",")
                params = []
                ul = len(useds)
                for i in range(len(_ids)):
                    _id = _ids[i]
                    if i < ul:
                        used = useds[i]
                        params.append({'id': int(_id), 'used': int(used)})

                if params:
                    DataDao.update_pan_account_used(params)

            self.to_write_json({})
        elif path.endswith("/dlink"):
            item_id = self.get_argument("id")
            params = pan_service.query_file(item_id)
            self.render('dlink.html', **params)
        elif path.endswith("/manage"):
            pan_id = self.get_argument("panid", "0")
            params = {'pan_id': pan_id}
            self.render('ftree.html', **params)
        elif path.endswith("/helptokens"):
            res = pan_service.pan_accounts_dict()
            self.to_write_json(res)
        elif path.endswith("/syncallnodes"):
            item_fuzzy_id = self.get_argument("id", None)
            item_id = int(decrypt_id(item_fuzzy_id))
            pan_id = self.get_argument('panid', "0")
            logger.info("syncallnodes pan_id:{}".format(pan_id))
            pan_id = int(pan_id)
            recursion = self.get_argument("recursion")
            if recursion == "1":
                recursion = True
            else:
                recursion = False
            if not item_id:
                if pan_id:
                    root_item: DataItem = sync_pan_service.fetch_root_item(
                        pan_id)
                    logger.info('root_item:{}'.format(
                        DataItem.to_dict(root_item)))
                    if root_item:
                        item_id = root_item.id
                    else:
                        item_id = sync_pan_service.new_root_item(
                            self.request.user_id, pan_id)
                else:
                    item_id = 55
            item_id = int(item_id)
            rs = sync_pan_service.sync_from_root(item_id, recursion, pan_id,
                                                 self.request.user_id)
            self.to_write_json(rs)
        elif path.endswith("/synccommunity"):
            # print("in...:")
            bd = self.request.body
            data_obj = json.loads(bd)
            print('/synccommunity payload:', self.request.user_id)
            open_service.sync_community_item_to_es(self.request.user_id,
                                                   data_obj)
            self.to_write_json({'state': 0})
            pass
        elif path.endswith("/syncstate"):
            self.release_db = False
            pan_id = self.get_argument('panid', "0")
            dir_item_id = sync_pan_service.check_sync_state(
                pan_id, self.request.user_id)
            if dir_item_id:
                self.to_write_json({'state': 1, 'item': dir_item_id})
            else:
                self.to_write_json({'state': 0})
Exemplo n.º 30
0
 def new_root_item(self, user_id, pan_id):
     root_item_id, root_item = DataDao.new_root_item(user_id, pan_id)
     return root_item_id