Ejemplo n.º 1
0
 def update_dir_size(self,
                     data_item: DataItem,
                     recursive=False,
                     reset_sub_dir=True):
     if not data_item or data_item.filename == TOP_DIR_FILE_NAME or data_item.isdir == 0:
         return
     data_item_dict = DataItem.to_dict(data_item)
     rs = self.update_dir_size_by_dict(data_item_dict, recursive,
                                       reset_sub_dir)
     data_item.size = data_item_dict['size']
     data_item.sized = data_item_dict['sized']
     return rs
Ejemplo n.º 2
0
 def recursive_check_dir_size(dir_list: list, pos, rs: dict):
     if pos >= len(dir_list):
         return rs
     p_dir_dict = dir_list[pos]
     p_dir_id = p_dir_dict['id']
     sub_dir: DataItem = DataDao.find_need_update_size_dir(
         parent_id=p_dir_dict['id'])
     if sub_dir:
         recursive_check_dir_size([DataItem.to_dict(sub_dir)], 0, rs)
         recursive_check_dir_size(dir_list, pos, rs)
     else:
         ori_size = p_dir_dict['size']
         s: int = DataDao.sum_size_dir(parent_id=p_dir_id)
         if not p_dir_dict['sized'] or s != ori_size:
             rs['change'] = True
             DataDao.update_data_item(p_dir_id, {'size': s, 'sized': 1})
             p_dir_dict['size'] = s
             p_dir_dict['sized'] = 1
             print("changed:", True)
         print('dir id:', p_dir_dict['id'], ',size:', s, ',ori_size:',
               ori_size)
         recursive_check_dir_size(dir_list, pos + 1, rs)
Ejemplo n.º 3
0
    def get(self):
        path = self.request.path
        # print(path)
        if path.endswith("/list"):
            parent = self.get_argument("parent", default='55')
            item_list = DataDao.query_data_item_by_parent(int(parent), True)
            params = {"list": item_list}
            # print("params:", params)
            # for item in item_list:
            #     print(item.filename)
            self.render('list.html', **params)
        elif path.endswith("/fload"):
            source = self.get_argument("source", "")
            node_id = self.get_argument("id")
            # parent_path = self.get_argument("path")
            # if not parent_path.endswith("/"):
            #     parent_path = "%s/" % parent_path
            logger.info("fload node_id:{},source:{}".format(node_id, source))
            # parent_id = 55
            params = []
            if not '#' == node_id:
                # if "shared" == source:
                #     params = pan_service.query_shared_file_list(parent_id, self.request.user_id)
                if "assets" == source:
                    if 'assets_0' == node_id:
                        params = ProductDao.query_assets_by_ref_id_for_tree(
                            self.ref_id)
                elif "free" == source:
                    if 'free_0' == node_id:
                        params = pan_service.query_root_list()
                elif "self" == source:
                    if 'self_0' == node_id:
                        if not self.default_pan_id:
                            pan_acc = auth_service.default_pan_account(
                                self.user_id)
                            self.default_pan_id = pan_acc.id
                        if self.default_pan_id:
                            params = pan_service.query_client_root_list(
                                self.default_pan_id)
                    else:
                        node_id_val = decrypt_id(node_id)
                        parent_id = int(node_id_val)
                        params = pan_service.query_client_sub_list(
                            parent_id, self.ref_id)
                elif "empty" == source:
                    pass
                else:
                    node_id_val = decrypt_id(node_id)
                    parent_id = int(node_id_val)
                    params = pan_service.query_file_list(parent_id)
            else:
                # params = pan_service.query_root_list(self.request.user_id)
                params.append({
                    "id": "free_0",
                    "text": PAN_TREE_TXT['free_root'],
                    "data": {
                        "source": "free"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "assets_0",
                    "text": PAN_TREE_TXT['buy_root'],
                    "data": {
                        "source": "assets"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "self_0",
                    "text": PAN_TREE_TXT['self_root'],
                    "data": {
                        "source": "self"
                    },
                    "children": True,
                    "icon": "folder"
                })
                params.append({
                    "id": "empty_0",
                    "text": PAN_TREE_TXT['empty_root'],
                    "data": {
                        "source": "empty"
                    },
                    "children": False,
                    "icon": "file"
                })

            self.to_write_json(params)
        elif path.endswith("/search"):
            params = {}
            self.render('search.html', **params)
        elif path.endswith("/load"):
            kw = self.get_body_argument("kw")
            source = self.get_body_argument("source")
            print("kw:", kw)
            print("source:", source)
            kw = kw.replace(' ', '%')
            page = self.get_body_argument("page")
            size = 100
            offset = int(page) * size
            sp: SearchParams = SearchParams.build_params(offset, size)
            sp.add_must(value=kw)
            es_dao_fun = es_dao_local
            if source:
                sp.add_must(field='source', value=source)
                es_dao_fun = es_dao_share
                # es_dao_fun = es_dao_dir
            es_body = build_query_item_es_body(sp)
            print("es_body:", json.dumps(es_body))
            es_result = es_dao_fun().es_search_exec(es_body)
            hits_rs = es_result["hits"]
            total = hits_rs["total"]
            datas = [_s["_source"] for _s in hits_rs["hits"]]

            # print("es_result:", es_result)
            # item_list = DataDao.query_file_list_by_keyword(kw, offset=offset, limit=size)
            # objs = [object_to_dict(o, FIELDS) for o in item_list]
            # has_next = len(objs) == size
            has_next = offset + size < total
            rs = {"data": datas, "has_next": has_next}
            # print("rs:", rs)
            self.to_write_json(rs)
        elif path.endswith("/finfo"):
            # item_id = self.get_argument("id")
            item_fuzzy_id = self.get_argument("id")
            item_id = int(decrypt_id(item_fuzzy_id))
            params = pan_service.query_file(item_id)
            self.to_write_json(params)
        elif path.endswith("/readydownload"):
            fs_id = self.get_argument("fs_id")
            print("readydownload fs_id:", fs_id)
            params, share_log, data_item = pan_service.share_folder(fs_id)
            # sub_params = []
            min_size = 6000
            # min_size = 60
            if data_item.size > min_size:
                sub_params = pan_service.sub_account_transfer(share_log)
                result = {"subs": sub_params}
            else:
                result = {"master": params}
            # result = {"master": params, "subs": sub_params}
            self.to_write_json(result)
        elif path.endswith("/check_transfer"):
            transfer_log_id = self.get_argument("id")
            rs = {}
            print("transfer_log_id:", transfer_log_id)
            if transfer_log_id:
                t = pan_service.recheck_transfer_d_link(int(transfer_log_id))
                if t:
                    rs = t
            self.to_write_json(rs)
        elif path.endswith("/check_shared_log"):
            shared_log_id = self.get_argument("id")
            rs = {}
            print("shared_log_id:", shared_log_id)
            if shared_log_id:
                t = pan_service.recheck_shared_d_link(int(shared_log_id))
                if t:
                    rs = t
            self.to_write_json(rs)
        elif path.endswith("/sync_used"):
            pan_account_ids_str = self.get_argument("ids")
            used_str = self.get_argument("useds")
            if pan_account_ids_str and used_str:
                _ids = pan_account_ids_str.split(",")
                useds = used_str.split(",")
                params = []
                ul = len(useds)
                for i in range(len(_ids)):
                    _id = _ids[i]
                    if i < ul:
                        used = useds[i]
                        params.append({'id': int(_id), 'used': int(used)})

                if params:
                    DataDao.update_pan_account_used(params)

            self.to_write_json({})
        elif path.endswith("/dlink"):
            item_id = self.get_argument("id")
            params = pan_service.query_file(item_id)
            self.render('dlink.html', **params)
        elif path.endswith("/manage"):
            pan_id = self.get_argument("panid", "0")
            params = {'pan_id': pan_id}
            self.render('ftree.html', **params)
        elif path.endswith("/helptokens"):
            res = pan_service.pan_accounts_dict()
            self.to_write_json(res)
        elif path.endswith("/syncallnodes"):
            item_fuzzy_id = self.get_argument("id", None)
            item_id = int(decrypt_id(item_fuzzy_id))
            pan_id = self.get_argument('panid', "0")
            logger.info("syncallnodes pan_id:{}".format(pan_id))
            pan_id = int(pan_id)
            recursion = self.get_argument("recursion")
            if recursion == "1":
                recursion = True
            else:
                recursion = False
            if not item_id:
                if pan_id:
                    root_item: DataItem = sync_pan_service.fetch_root_item(
                        pan_id)
                    logger.info('root_item:{}'.format(
                        DataItem.to_dict(root_item)))
                    if root_item:
                        item_id = root_item.id
                    else:
                        item_id = sync_pan_service.new_root_item(
                            self.request.user_id, pan_id)
                else:
                    item_id = 55
            item_id = int(item_id)
            rs = sync_pan_service.sync_from_root(item_id, recursion, pan_id,
                                                 self.request.user_id)
            self.to_write_json(rs)
        elif path.endswith("/synccommunity"):
            # print("in...:")
            bd = self.request.body
            data_obj = json.loads(bd)
            print('/synccommunity payload:', self.request.user_id)
            open_service.sync_community_item_to_es(self.request.user_id,
                                                   data_obj)
            self.to_write_json({'state': 0})
            pass
        elif path.endswith("/syncstate"):
            self.release_db = False
            pan_id = self.get_argument('panid', "0")
            dir_item_id = sync_pan_service.check_sync_state(
                pan_id, self.request.user_id)
            if dir_item_id:
                self.to_write_json({'state': 1, 'item': dir_item_id})
            else:
                self.to_write_json({'state': 0})
Ejemplo n.º 4
0
    def update_dir_size_by_dict(self,
                                data_item: dict,
                                recursive=False,
                                reset_sub_dir=True):
        if data_item['filename'] == TOP_DIR_FILE_NAME or data_item[
                'isdir'] == 0:
            return

        def recover_sized_zero(parent_id):
            DataDao.update_data_item_by_parent_id(parent_id, {'sized': 0})
            if recursive:
                size = 100
                offset = 0
                rs_len = 100
                while rs_len == size:
                    sub_dir_list = DataDao.query_data_item_by_parent(
                        parent_id, offset=offset, limit=size)
                    rs_len = len(sub_dir_list)
                    for s_dir in sub_dir_list:
                        recover_sized_zero(s_dir.id)

        def recursive_check_dir_size(dir_list: list, pos, rs: dict):
            if pos >= len(dir_list):
                return rs
            p_dir_dict = dir_list[pos]
            p_dir_id = p_dir_dict['id']
            sub_dir: DataItem = DataDao.find_need_update_size_dir(
                parent_id=p_dir_dict['id'])
            if sub_dir:
                recursive_check_dir_size([DataItem.to_dict(sub_dir)], 0, rs)
                recursive_check_dir_size(dir_list, pos, rs)
            else:
                ori_size = p_dir_dict['size']
                s: int = DataDao.sum_size_dir(parent_id=p_dir_id)
                if not p_dir_dict['sized'] or s != ori_size:
                    rs['change'] = True
                    DataDao.update_data_item(p_dir_id, {'size': s, 'sized': 1})
                    p_dir_dict['size'] = s
                    p_dir_dict['sized'] = 1
                    print("changed:", True)
                print('dir id:', p_dir_dict['id'], ',size:', s, ',ori_size:',
                      ori_size)
                recursive_check_dir_size(dir_list, pos + 1, rs)

        if reset_sub_dir:
            recover_sized_zero(data_item['id'])
        _rs = {'change': False}
        recursive_check_dir_size([data_item], 0, _rs)
        print("_rs['change']:", _rs['change'], data_item['parent'])
        if _rs['change']:
            _data_item: dict = None
            _data_item = data_item
            while _data_item['parent']:
                p_data_item: DataItem = DataDao.get_data_item_by_id(
                    _data_item['parent'])
                if p_data_item and p_data_item.filename != TOP_DIR_FILE_NAME:
                    _ori_size = p_data_item.size
                    _s: int = DataDao.sum_size_dir(parent_id=p_data_item.id)
                    print('upate parent dir id:', p_data_item.id, ',size:', _s,
                          ',ori_size:', _ori_size)
                    if _s != _ori_size:
                        DataDao.update_data_item(p_data_item.id, {
                            'size': _s,
                            'sized': 1
                        })
                    else:
                        break
                    _data_item = DataItem.to_dict(p_data_item)
                else:
                    break
        return _rs['change']
Ejemplo n.º 5
0
    def query_file(self, item_id):
        data_item: DataItem = DataDao.get_data_item_by_id(item_id)
        need_sync = False
        logger.info("query_file dlink:{}".format(data_item.dlink))
        if not data_item.dlink_updated_at or not data_item.dlink:
            need_sync = True
        elif data_item.dlink_updated_at:
            dt = arrow.get(
                data_item.dlink_updated_at).replace(tzinfo=self.default_tz)
            if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now():
                need_sync = True
        account_id = data_item.account_id
        acc: Accounts = DataDao.account_by_id(account_id)
        flv_json = None
        need_thumbs = False
        # data_item_ext = None
        # if data_item.category == 1 and is_video_media(data_item.filename):
        #     data_item_ext = DataDao.get_data_item_ext_by_id(data_item.id)
        if is_image_media(data_item.filename) and data_item.category == 3:
            need_thumbs = True
        if need_sync:
            pan_acc: PanAccounts = self.get_pan_account(
                data_item.panacc, data_item.account_id)
            # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)])
            sync_dlink, thumbs = restapi.get_dlink_by_sync_file(
                pan_acc.access_token, int(data_item.fs_id), need_thumbs)
            if sync_dlink:
                data_item.dlink = "{}&access_token={}".format(
                    sync_dlink, pan_acc.access_token)
                data_item.dlink_updated_at = get_now_datetime()
                data_item_params = {
                    "dlink": data_item.dlink,
                    "dlink_updated_at": data_item.dlink_updated_at
                }
                if need_thumbs:
                    if "url3" in thumbs:
                        data_item_params["thumb"] = thumbs["url3"]
                        data_item.thumb = data_item_params["thumb"]
                    elif "url2" in thumbs:
                        data_item_params["thumb"] = thumbs["url2"]
                        data_item.thumb = data_item_params["thumb"]
                    elif "url1" in thumbs:
                        data_item_params["thumb"] = thumbs["url1"]
                        data_item.thumb = data_item_params["thumb"]
                    elif "icon" in thumbs:
                        data_item_params["thumb"] = thumbs["icon"]
                        data_item.thumb = data_item_params["thumb"]
                DataDao.update_data_item(data_item.id, data_item_params)
            # not authorized
            # if data_item.category == 1 and is_video_media(data_item.filename):
            #     flv_json = restapi.get_media_flv_info(pan_acc.access_token, data_item.path)
            #     if flv_json and "mlink" in flv_json:
            #         flv_params = {"fs_id": data_item.fs_id, "mlink": flv_json["mlink"],
            #                       "start_at_time": flv_json["mlink_start_at"]}
            #         if data_item_ext:
            #             data_item_ext.mlink = flv_params["mlink"]
            #             data_item_ext.start_at_time = flv_params["start_at_time"]
            #             DataDao.update_data_item_ext(data_item.id, flv_params)
            #         else:
            #             data_item_ext = DataDao.new_data_item_ext(data_item.id, flv_params)

        used_pan_acc_id = data_item.panacc
        if data_item:
            data_item.size = int(data_item.size / 1024)

        f_type = guess_file_type(data_item.filename)
        params = {"item": DataItem.to_dict(data_item, ['id', 'parent'])}
        params["item"]["id"] = obfuscate_id(data_item.id)
        params["item"]["type"] = f_type
        params["item"]["media_type"] = self.check_data_item_media_type(
            data_item.category, data_item.filename)
        params["item"]["dlink_tokens"] = [used_pan_acc_id]
        # if data_item.category == 1 and is_video_media(data_item.filename) and data_item_ext:
        #     params["item"]["mlink"] = data_item_ext.mlink
        #     params["item"]["start_at_time"] = data_item_ext.start_at_time
        return params