コード例 #1
0
ファイル: pcs.py プロジェクト: yusiwen/bcloud
 def parse_share_page(content):
     tree = html.fromstring(content)
     script_sel = CSS('script')
     scripts = script_sel(tree)
     for script in scripts:
         if (script.text
                 and (script.text.find('viewsingle_param') > -1
                      or script.text.find('mpan.viewlist_param') > -1)):
             break
     else:
         logger.warn('pcs.parse_share_page: failed to get filelist, %s',
                     url)
         return None
     start = script.text.find('viewsingle_param.list=JSON.parse(')
     end = script.text.find(');mpan.viewsingle_param.username')
     if start == -1 or end == -1:
         start = script.text.find('listArr:JSON.parse(')
         end = script.text.find('),rootPath:')
         if start == -1 or end == -1:
             return None
         else:
             json_str = script.text[start + 19:end]
     else:
         json_str = script.text[start + 33:end]
     try:
         return json.loads(json.loads(json_str))
     except ValueError:
         logger.warn(traceback.format_exc())
         return None
コード例 #2
0
ファイル: decoder.py プロジェクト: 1060460048/bcloud
def decode_flashget(link):
    try:
        l = base64.decodestring(link[11:len(link)-7].encode()).decode()
    except ValueError:
        logger.warn(traceback.format_exc())
        l = base64.decodestring(link[11:len(link)-7].encode()).decode('gbk')
    return l[10:len(l)-10]
コード例 #3
0
ファイル: pcs.py プロジェクト: alex8224/bcloud
 def parse_share_page(content):
     tree = html.fromstring(content)
     script_sel = CSS('script')
     scripts = script_sel(tree)
     for script in scripts:
         if (script.text and (script.text.find('viewsingle_param') > -1 or
             script.text.find('mpan.viewlist_param') > -1)):
             break
     else:
         logger.warn('pcs.parse_share_page: failed to get filelist, %s', url)
         return None
     start = script.text.find('viewsingle_param.list=JSON.parse(')
     end = script.text.find(');mpan.viewsingle_param.username')
     if start == -1 or end == -1:
         start = script.text.find('listArr:JSON.parse(')
         end = script.text.find('),rootPath:')
         if start == -1 or end == -1:
             return None
         else:
             json_str = script.text[start+19:end]
     else:
         json_str = script.text[start+33:end]
     try:
         return json.loads(json.loads(json_str))
     except ValueError:
         logger.warn(traceback.format_exc())
         return None
コード例 #4
0
 def parse_share_page(content):
     tree = html.fromstring(content)
     script_sel = CSS('script')
     scripts = script_sel(tree)
     for script in scripts:
         if script.text and (script.text.find('yunData.setData') > -1
                             or script.text.find('window.yunData') > -1):
             break
     else:
         logger.warn('pcs.parse_share_page: failed to get filelist, %s',
                     url)
         return None
     type1 = ',"third":0,"bdstoken":'
     type2 = ',"uk":'
     start = script.text.find('"file_list":')
     end = script.text.find(type1)
     if start == -1: return None
     if end == -1:
         end = script.text.find(type2)
         if end == -1:
             return None
     json_str = script.text[start + 12:end]
     try:
         return json.loads(json_str)
     except ValueError:
         logger.warn(traceback.format_exc())
         return None
コード例 #5
0
def decode_flashget(link):
    try:
        l = base64.decodestring(link[11:len(link) - 7].encode()).decode()
    except ValueError:
        logger.warn(traceback.format_exc())
        l = base64.decodestring(link[11:len(link) - 7].encode()).decode('gbk')
    return l[10:len(l) - 10]
コード例 #6
0
 def dump_image(url, filepath):
     req = net.urlopen(url)
     if not req or not req.data:
         logger.warn('update_share_image:, failed to request %s' % url)
         return False
     with open(filepath, 'wb') as fh:
         fh.write(req.data)
     return True
コード例 #7
0
ファイル: gutil.py プロジェクト: hou-dao/bcloud
 def dump_image(url, filepath):
     req = net.urlopen(url)
     if not req or not req.data:
         logger.warn('update_liststore_image(), failed to request %s' % url)
         return False
     with open(filepath, 'wb') as fh:
         fh.write(req.data)
     return True
コード例 #8
0
ファイル: gutil.py プロジェクト: zhangxueyangjuxie/bcloud
def update_share_image(liststore, tree_iters, col, large_col, pcs_files,
                       dir_name, icon_size, large_icon_size):
    '''下载文件缩略图, 并将它显示到liststore里.

    需要同时更新两列里的图片, 用不同的缩放尺寸.
    pcs_files - 里面包含了几个必要的字段.
    dir_name  - 缓存目录, 下载到的图片会保存这个目录里.
    '''

    def update_image(filepath, tree_iter):
        try:
            tree_path = liststore.get_path(tree_iter)
            if tree_path is None:
                return
            pix = GdkPixbuf.Pixbuf.new_from_file(filepath)
            width = pix.get_width()
            height = pix.get_height()
            small_pix = pix.scale_simple(icon_size,
                                         height * icon_size // width,
                                         GdkPixbuf.InterpType.NEAREST)
            liststore[tree_path][col] = small_pix
            liststore[tree_path][large_col] = pix
        except GLib.GError:
            logger.error(traceback.format_exc())

    def dump_image(url, filepath):
        req = net.urlopen(url)
        if not req or not req.data:
            logger.warn('update_share_image:, failed to request %s' % url)
            return False
        with open(filepath, 'wb') as fh:
            fh.write(req.data)
        return True

    for tree_iter, pcs_file in zip(tree_iters, pcs_files):
        if 'thumbs' not in pcs_file:
            continue
        elif 'url2' in pcs_file['thumbs']:
            key = 'url2'
        elif 'url1' in pcs_file['thumbs']:
            key = 'url1'
        elif 'url3' in pcs_file['thumbs']:
            key = 'url3'
        else:
            continue
        fs_id = pcs_file['fs_id']
        url = pcs_file['thumbs'][key]
        filepath = os.path.join(dir_name, 'share-{0}.jpg'.format(fs_id))
        if os.path.exists(filepath) and os.path.getsize(filepath):
            GLib.idle_add(update_image, filepath, tree_iter)
        elif not url or len(url) < 10:
            logger.warn('update_share_image: failed to get url %s' % url)
        else:
            status = dump_image(url, filepath)
            if status:
                GLib.idle_add(update_image, filepath, tree_iter)
コード例 #9
0
def decode_thunder(link):
    # AAhttp://127.0.0.1
    if link.startswith('QUFodHRwOi8vMTI3LjAuMC4'):
        return ''
    try:
        l = base64.decodestring(link[10:].encode()).decode('gbk')
    except ValueError:
        logger.warn(traceback.format_exc())
        l = base64.decodestring(link[10:].encode()).decode()
    return l[2:-2]
コード例 #10
0
ファイル: decoder.py プロジェクト: 1060460048/bcloud
def decode_thunder(link):
    # AAhttp://127.0.0.1
    if link.startswith('QUFodHRwOi8vMTI3LjAuMC4'):
        return ''
    try:
        l = base64.decodestring(link[10:].encode()).decode('gbk')
    except ValueError:
        logger.warn(traceback.format_exc())
        l = base64.decodestring(link[10:].encode()).decode()
    return l[2:-2]
コード例 #11
0
ファイル: gutil.py プロジェクト: CzBiX/bcloud
def update_share_image(liststore, tree_iters, col, large_col, pcs_files,
                       dir_name, icon_size, large_icon_size):
    '''下载文件缩略图, 并将它显示到liststore里.

    需要同时更新两列里的图片, 用不同的缩放尺寸.
    pcs_files - 里面包含了几个必要的字段.
    dir_name  - 缓存目录, 下载到的图片会保存这个目录里.
    '''
    def update_image(filepath, tree_iter):
        try:
            tree_path = liststore.get_path(tree_iter)
            if tree_path is None:
                return
            pix = GdkPixbuf.Pixbuf.new_from_file(filepath)
            width = pix.get_width()
            height = pix.get_height()
            small_pix = pix.scale_simple(icon_size,
                                         height * icon_size // width,
                                         GdkPixbuf.InterpType.NEAREST)
            liststore[tree_path][col] = small_pix
            liststore[tree_path][large_col] = pix 
        except GLib.GError:
            logger.error(traceback.format_exc())

    def dump_image(url, filepath):
        req = net.urlopen(url)
        if not req or not req.data:
            logger.warn('update_share_image:, failed to request %s' % url)
            return False
        with open(filepath, 'wb') as fh:
            fh.write(req.data)
        return True

    for tree_iter, pcs_file in zip(tree_iters, pcs_files):
        if 'thumbs' not in pcs_file:
            continue
        elif 'url2' in pcs_file['thumbs']:
            key = 'url2'
        elif 'url1' in pcs_file['thumbs']:
            key = 'url1'
        elif 'url3' in pcs_file['thumbs']:
            key = 'url3'
        else:
            continue
        fs_id = pcs_file['fs_id']
        url = pcs_file['thumbs'][key]
        filepath = os.path.join(dir_name, 'share-{0}.jpg'.format(fs_id))
        if os.path.exists(filepath) and os.path.getsize(filepath):
            GLib.idle_add(update_image, filepath, tree_iter)
        elif not url or len(url) < 10:
            logger.warn('update_share_image: failed to get url %s' % url)
        else:
            status = dump_image(url, filepath)
            if status:
                GLib.idle_add(update_image, filepath, tree_iter)
コード例 #12
0
ファイル: SharePage.py プロジェクト: readmagic/bcloud
        def on_load_url(filelist, error=None):
            self.url_entry.props.secondary_icon_name = REFRESH_ICON
            if timestamp != self.url_entry.timestamp:
                logger.debug("SharePage.load_url, dirname not match, ignored")
                return
            if error or not filelist:
                self.app.toast(_("Failed to get files, please reload this page"))
                logger.warn("SharePage.load_url: %s, %s, %s" % (self.curr_url, filelist, error))
                self.has_next = False
                return
            state = self.select_all_button.get_active()
            tree_iters = []

            # 插入.. 点击后返回上个目录
            if filelist and self.dirname and self.dirname != "/":
                parent_dirname = os.path.dirname(self.dirname)
                pixbuf, type_ = self.app.mime.get(parent_dirname, True, icon_size=ICON_SIZE)
                large_pixbuf, type_ = self.app.mime.get(parent_dirname, True, icon_size=LARGE_ICON_SIZE)
                self.liststore.append([state, pixbuf, large_pixbuf, "..", parent_dirname, True, 0, "0", 0, ""])

            for file_ in filelist:
                isdir = file_["isdir"] == "1"
                pixbuf, type_ = self.app.mime.get(file_["path"], isdir, icon_size=ICON_SIZE)
                large_pixbuf, type_ = self.app.mime.get(file_["path"], isdir, icon_size=LARGE_ICON_SIZE)
                size = int(file_.get("size", 0))
                human_size = util.get_human_size(size)[0]
                mtime = int(file_.get("server_mtime", 0))
                human_mtime = time.ctime(mtime)
                tree_iter = self.liststore.append(
                    [
                        state,
                        pixbuf,
                        large_pixbuf,
                        file_["server_filename"],
                        file_["path"],
                        isdir,
                        size,
                        human_size,
                        mtime,
                        human_mtime,
                    ]
                )
                tree_iters.append(tree_iter)
            cache_path = Config.get_cache_path(self.app.profile["username"])
            gutil.async_call(
                gutil.update_share_image,
                self.liststore,
                tree_iters,
                ICON_COL,
                LARGE_ICON_COL,
                filelist,
                cache_path,
                ICON_SIZE,
                LARGE_ICON_SIZE,
            )
コード例 #13
0
def update_liststore_image(liststore,
                           tree_iters,
                           col,
                           pcs_files,
                           dir_name,
                           icon_size=96):
    '''下载文件缩略图, 并将它显示到liststore里.
    
    pcs_files - 里面包含了几个必要的字段.
    dir_name  - 缓存目录, 下载到的图片会保存这个目录里.
    size      - 指定图片的缩放大小, 默认是96px.
    '''
    def update_image(filepath, tree_iter):
        try:
            pix = GdkPixbuf.Pixbuf.new_from_file_at_size(
                filepath, icon_size, icon_size)
            tree_path = liststore.get_path(tree_iter)
            if tree_path is None:
                return
            liststore[tree_path][col] = pix
        except GLib.GError:
            logger.error(traceback.format_exc())

    def dump_image(url, filepath):
        req = net.urlopen(url)
        if not req or not req.data:
            logger.warn('update_liststore_image(), failed to request %s' % url)
            return False
        with open(filepath, 'wb') as fh:
            fh.write(req.data)
        return True

    for tree_iter, pcs_file in zip(tree_iters, pcs_files):
        if 'thumbs' not in pcs_file:
            continue
        if 'url1' in pcs_file['thumbs']:
            key = 'url1'
        elif 'url2' in pcs_file['thumbs']:
            key = 'url2'
        elif 'url3' in pcs_file['thumbs']:
            key = 'url3'
        else:
            continue
        fs_id = pcs_file['fs_id']
        url = pcs_file['thumbs'][key]
        filepath = os.path.join(dir_name, '{0}.jpg'.format(fs_id))
        if os.path.exists(filepath) and os.path.getsize(filepath):
            GLib.idle_add(update_image, filepath, tree_iter)
        elif not url or len(url) < 10:
            logger.warn('update_liststore_image(), failed to get url')
        else:
            status = dump_image(url, filepath)
            if status:
                GLib.idle_add(update_image, filepath, tree_iter)
コード例 #14
0
def get_user_uk(cookie, tokens):
    '''获取用户的uk'''
    url = 'http://yun.baidu.com'
    req = net.urlopen(url, headers={'Cookie': cookie.header_output()})
    if req:
        content = req.data.decode()
        match = re.findall('"uk":(\d+),"task_key"', content)
        if len(match) == 1:
            return match[0]
        else:
            logger.warn('pcs.get_user_uk(), failed to parse uk, %s' % url)
    return None
コード例 #15
0
ファイル: pcs.py プロジェクト: hou-dao/bcloud
def get_user_uk(cookie, tokens):
    '''获取用户的uk'''
    url = 'http://yun.baidu.com'
    req = net.urlopen(url, headers={'Cookie': cookie.header_output()})
    if req:
        content = req.data.decode()
        match = re.findall('/share/home\?uk=(\d+)" target=', content)
        if len(match) == 1:
            return match[0]
        else:
            logger.warn('get_user_uk(), failed to parse uk, %s' % url)
    return None
コード例 #16
0
ファイル: pcs.py プロジェクト: shanlihou/bcloud-modify
def get_user_uk(cookie, tokens):
    print(
        '/usr/local/lib/python3.4/dist-packages/bcloud/pcs.py:get_user_uk 42')
    '''获取用户的uk'''
    url = 'http://yun.baidu.com'
    req = net.urlopen(url, headers={'Cookie': cookie.header_output()})
    if req:
        content = req.data.decode()
        match = re.findall('/share/home\?uk=(\d+)" target=', content)
        if len(match) == 1:
            return match[0]
        else:
            logger.warn('pcs.get_user_uk(), failed to parse uk, %s' % url)
    return None
コード例 #17
0
ファイル: gutil.py プロジェクト: hou-dao/bcloud
def update_liststore_image(liststore, tree_iters, col, pcs_files, dir_name,
                           icon_size=96):
    '''下载文件缩略图, 并将它显示到liststore里.
    
    pcs_files - 里面包含了几个必要的字段.
    dir_name  - 缓存目录, 下载到的图片会保存这个目录里.
    size      - 指定图片的缩放大小, 默认是96px.
    '''
    def update_image(filepath, tree_iter):
        try:
            pix = GdkPixbuf.Pixbuf.new_from_file_at_size(filepath, icon_size,
                                                         icon_size)
            tree_path = liststore.get_path(tree_iter)
            if tree_path is None:
                return
            liststore[tree_path][col] = pix
        except GLib.GError:
            logger.error(traceback.format_exc())

    def dump_image(url, filepath):
        req = net.urlopen(url)
        if not req or not req.data:
            logger.warn('update_liststore_image(), failed to request %s' % url)
            return False
        with open(filepath, 'wb') as fh:
            fh.write(req.data)
        return True

    for tree_iter, pcs_file in zip(tree_iters, pcs_files):
        if 'thumbs' not in pcs_file:
            continue
        if 'url1' in pcs_file['thumbs']:
            key = 'url1'
        elif 'url2' in pcs_file['thumbs']:
            key = 'url2'
        elif 'url3' in pcs_file['thumbs']:
            key = 'url3'
        else:
            continue
        fs_id = pcs_file['fs_id']
        url = pcs_file['thumbs'][key]
        filepath = os.path.join(dir_name, '{0}.jpg'.format(fs_id))
        if os.path.exists(filepath) and os.path.getsize(filepath):
            GLib.idle_add(update_image, filepath, tree_iter)
        elif not url or len(url) < 10:
            logger.warn('update_liststore_image(), failed to get url')
        else:
            status = dump_image(url, filepath)
            if status:
                GLib.idle_add(update_image, filepath, tree_iter)
コード例 #18
0
ファイル: decoder.py プロジェクト: 1060460048/bcloud
def decode(link):
    if not isinstance(link, str) or len(link) < 10:
        logger.error('unknown link: %s' % link)
        return ''
    link_prefix = link[:7].lower()
    if link_prefix in _router:
        try:
            return _router[link_prefix](link)
        except ValueError:
            logger.error(traceback.format_exc())
            return ''
    else:
        logger.warn('unknown protocol: %s' % link)
        return ''
コード例 #19
0
def decode(link):
    if not isinstance(link, str) or len(link) < 10:
        logger.error('unknown link: %s' % link)
        return ''
    link_prefix = link[:7].lower()
    if link_prefix in _router:
        try:
            return _router[link_prefix](link)
        except ValueError:
            logger.error(traceback.format_exc())
            return ''
    else:
        logger.warn('unknown protocol: %s' % link)
        return ''
コード例 #20
0
ファイル: gutil.py プロジェクト: ltsmat/bcloud
def update_avatar(cookie, dir_name):
    '''获取用户头像信息'''
    filepath = os.path.join(dir_name, 'avatar.jpg')
    if (os.path.exists(filepath) and
            time.time() - os.stat(filepath).st_mtime <= AVATAR_UPDATE_INTERVAL):
        return filepath
    img_url = pcs.get_avatar(cookie)
    if not img_url:
        return None
    else:
        req = net.urlopen(img_url)
        if not req or not req.data:
            logger.warn('gutil.update_avatar(), failed to request %s' % url)
            return None
        with open(filepath, 'wb') as fh:
            fh.write(req.data)
        return filepath
コード例 #21
0
ファイル: pcs.py プロジェクト: rover12421/bcloud
    def parse_share_page(content):
        # tree = html.fromstring(content)
        # script_sel = CSS('script')
        # scripts = script_sel(tree)
        # for script in scripts:
        #     if (script.text and (script.text.find('viewsingle_param') > -1 or
        #         script.text.find('mpan.viewlist_param') > -1)):
        #         break
        # else:
        #     logger.warn('pcs.parse_share_page: failed to get filelist, %s', url)
        #     return None
        # start = script.text.find('viewsingle_param.list=JSON.parse(')
        # end = script.text.find(');mpan.viewsingle_param.username')
        # if start == -1 or end == -1:
        #     start = script.text.find('listArr:JSON.parse(')
        #     end = script.text.find('),rootPath:')
        #     if start == -1 or end == -1:
        #         return None
        #     else:
        #         json_str = script.text[start+19:end]
        # else:
        #     json_str = script.text[start+33:end]
        # try:
        #     return json.loads(json.loads(json_str))
        # except ValueError:
        #     logger.warn(traceback.format_exc())
        #     return None
        json_reg = re.compile('window\.yunData\s*=\s*\{(.+)\};')
        json_match = json_reg.search(content)

        if json_match:
            try:
                json_str = "".join([
                    "{",
                    json_match.group(1),
                    "}"
                ])
                return json.loads(json_str)['file_list']
            except ValueError:
                logger.warn(traceback.format_exc())

        return None
コード例 #22
0
ファイル: gutil.py プロジェクト: kktt007/bcloud
    def dump_image(url, filepath):
        try:
            req = request.urlopen(url, timeout=net.TIMEOUT)
            data = req.read()
        except OSError:
            # 若正在生成缩略图,请求返回404
            return False
        except:
            logger.error(traceback.format_exc())
            return False

        if not data:
            logger.warn('update_share_image(): failed to request %s' % url)
            return False
        elif 'Content-Type' in req.headers:
            # 检查文件类型
            ctype = req.getheader('Content-Type')
            if not ctype.startswith("image"):
                return False
        with open(filepath, 'wb') as fh:
            fh.write(data)
        return True
コード例 #23
0
ファイル: gutil.py プロジェクト: CzBiX/bcloud
def update_avatar(cookie, tokens, dir_name):
    '''获取用户头像信息'''
    uk = pcs.get_user_uk(cookie, tokens)
    if not uk:
        return None
    user_info = pcs.get_user_info(tokens, uk)
    if not user_info:
        return None
    img_path = os.path.join(dir_name, 'avatar.jpg')
    if (os.path.exists(img_path) and
            time.time() - os.stat(img_path).st_mtime <= AVATAR_UPDATE_INTERVAL):
        return (uk, user_info['uname'], img_path)
    img_url = user_info['avatar_url']
    if not img_url:
        return None
    req = net.urlopen(img_url)
    if not req or not req.data:
        logger.warn('gutil.update_avatar(), failed to request %s' % url)
        return None
    with open(img_path, 'wb') as fh:
        fh.write(req.data)
    return (uk, user_info['uname'], img_path)
コード例 #24
0
def update_avatar(cookie, tokens, dir_name):
    '''获取用户头像信息'''
    uk = pcs.get_user_uk(cookie, tokens)
    if not uk:
        return None
    user_info = pcs.get_user_info(tokens, uk)
    if not user_info:
        return None
    img_path = os.path.join(dir_name, 'avatar.jpg')
    if (os.path.exists(img_path) and time.time() - os.stat(img_path).st_mtime
            <= AVATAR_UPDATE_INTERVAL):
        return (uk, user_info['uname'], img_path)
    img_url = user_info['avatar_url']
    if not img_url:
        return None
    req = net.urlopen(img_url)
    if not req or not req.data:
        logger.warn('gutil.update_avatar(), failed to request %s' % url)
        return None
    with open(img_path, 'wb') as fh:
        fh.write(req.data)
    return (uk, user_info['uname'], img_path)
コード例 #25
0
ファイル: gutil.py プロジェクト: hou-dao/bcloud
import dbus
from gi.repository import GdkPixbuf
from gi.repository import Gio
from gi.repository import Gtk
from gi.repository import GLib

from bcloud import Config
from bcloud.log import logger
from bcloud import net
from bcloud import util
try:
    import keyring
    keyring_available = True
except (ImportError, ValueError):
    logger.warn(traceback.format_exc())
    keyring_available = False

DEFAULT_PROFILE = {
    'window-size': (960, 680),
    'use-status-icon': True,
    'use-dark-theme': False, # 默认禁用深色主题
    'use-notify': True,
    'first-run': True,
    'save-dir': Config.HOME_DIR,
    'use-streaming': True,  # 使用流媒体方式播放视频
    'username': '',
    'password': '',
    'remember-password': False,
    'auto-signin': False,
    'upload-hidden-files': True,  # 同时上传隐藏文件.
コード例 #26
0
def decode_qqdl(link):
    try:
        return base64.decodestring(link[7:].encode()).decode()
    except ValueError:
        logger.warn(traceback.format_exc())
        return base64.decodestring(link[7:].encode()).decode('gbk')
コード例 #27
0
ファイル: gutil.py プロジェクト: kktt007/bcloud
def update_share_image(liststore, tree_iters, col, large_col, pcs_files,
                       dir_name, icon_size, large_icon_size):
    '''下载文件缩略图, 并将它显示到liststore里.

    需要同时更新两列里的图片, 用不同的缩放尺寸.
    pcs_files - 里面包含了几个必要的字段.
    dir_name  - 缓存目录, 下载到的图片会保存这个目录里.
    '''
    def update_image(filepath, tree_iter):
        try:
            tree_path = liststore.get_path(tree_iter)
            if tree_path is None:
                return
            pix = GdkPixbuf.Pixbuf.new_from_file(filepath)
            width = pix.get_width()
            height = pix.get_height()
            small_pix = pix.scale_simple(icon_size,
                                         height * icon_size // width,
                                         GdkPixbuf.InterpType.NEAREST)
            liststore[tree_path][col] = small_pix
            liststore[tree_path][large_col] = pix
        except GLib.GError:
            logger.error(traceback.format_exc())

    def dump_image(url, filepath):
        try:
            req = request.urlopen(url, timeout=net.TIMEOUT)
            data = req.read()
        except OSError:
            # 若正在生成缩略图,请求返回404
            return False
        except:
            logger.error(traceback.format_exc())
            return False

        if not data:
            logger.warn('update_share_image(): failed to request %s' % url)
            return False
        elif 'Content-Type' in req.headers:
            # 检查文件类型
            ctype = req.getheader('Content-Type')
            if not ctype.startswith("image"):
                return False
        with open(filepath, 'wb') as fh:
            fh.write(data)
        return True

    for tree_iter, pcs_file in zip(tree_iters, pcs_files):
        fs_id = pcs_file['fs_id']
        filepath = os.path.join(dir_name, 'share-{0}.jpg'.format(fs_id))

        if os.path.exists(filepath) and os.path.getsize(filepath):
            if time.time() - os.stat(filepath).st_mtime < 30 * 24 * 60 * 60:
                # 只使用时间不超过一个月的缓存
                GLib.idle_add(update_image, filepath, tree_iter)
                continue
        if 'thumbs' in pcs_file and pcs_file['thumbs']:
            for key in ('url1', 'url2', 'url3'):
                if key not in pcs_file['thumbs']:
                    continue
                url = pcs_file['thumbs'][key]
                if not url or len(url) < 10:
                    logger.warn('update_share_image(): failed to get url')
                else:
                    status = dump_image(url, filepath)
                    if status:
                        GLib.idle_add(update_image, filepath, tree_iter)
                        break
コード例 #28
0
ファイル: Downloader.py プロジェクト: StevenLOL/bcloud
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
                row[SAVEDIR_COL], row[SAVENAME_COL]) 

        if os.path.exists(filepath):
            if self.download_mode == DownloadMode.IGNORE:
                self.emit('downloaded', row[FSID_COL])
                logger.debug('File exists, ignored!')
                return
            elif self.download_mode == DownloadMode.NEWCOPY:
                name, ext = os.path.splitext(filepath)
                filepath = '{0}_{1}{2}'.format(name, util.curr_time(), ext)

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            logger.warn('Failed to get url to download')
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'rb+')
            fh.seek(0)
        else:
            req = net.urlopen_simple(url)
            if not req:
                logger.warn('Failed to get url to download')
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)', str(req.headers))
                if not match:
                    logger.warn('Failed to get url to download')
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            if size <= SMALL_FILE_SIZE:
                threads = 1
            else:
                threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            try:
                fh.truncate(size)
            except (OSError, IOError):
                e = truncate.format_exc()
                logger.error(e)
                self.emit('disk-error', row[FSID_COL], tmp_filepath)
                return

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                if start_size + received >= end_size:
                    # part of file has been downloaded
                    continue
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(tasks):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                # error occurs
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                # flush data and status to disk
                if conf_count > THRESHOLD_TO_FLUSH:
                    with lock:
                        if not fh.closed:
                            fh.flush()
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received, received_total)
        except Exception:
            logger.error(traceback.format_exc())
            row[STATE_COL] = State.ERROR
        with lock:
            if not fh.closed:
                fh.close()
        for task in tasks:
            if task.isAlive():
                task.stop()
        with open(conf_filepath, 'w') as fh:
            json.dump(status, fh)

        if row[STATE_COL] == State.CANCELED:
            os.remove(tmp_filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.ERROR:
            self.emit('network-error', row[FSID_COL])
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
コード例 #29
0
        def on_load_url(filelist, error=None):
            self.url_entry.props.secondary_icon_name = REFRESH_ICON
            if timestamp != self.url_entry.timestamp:
                logger.debug('SharePage.load_url, dirname not match, ignored')
                return
            if error:
                self.app.toast(
                    _('Failed to get files, please reload this page'))
                logger.warn('SharePage.load_url: %s, %s, %s' %
                            (self.curr_url, filelist, error))
                self.has_next = False
                return
            state = self.select_all_button.get_active()
            tree_iters = []

            # 插入.. 点击后返回上个目录
            if self.dirname and self.dirname != '/':
                parent_dirname = os.path.dirname(self.dirname)
                pixbuf, type_ = self.app.mime.get(parent_dirname,
                                                  True,
                                                  icon_size=ICON_SIZE)
                large_pixbuf, type_ = self.app.mime.get(
                    parent_dirname, True, icon_size=LARGE_ICON_SIZE)
                self.liststore.append([
                    state,
                    pixbuf,
                    large_pixbuf,
                    '..',
                    parent_dirname,
                    True,
                    0,
                    '0',
                    0,
                    '',
                ])

            for file_ in filelist:
                if file_['isdir'] == '1' or file_['isdir'] == 1:
                    isdir = True
                else:
                    isdir = False
                pixbuf, type_ = self.app.mime.get(file_['path'],
                                                  isdir,
                                                  icon_size=ICON_SIZE)
                large_pixbuf, type_ = self.app.mime.get(
                    file_['path'], isdir, icon_size=LARGE_ICON_SIZE)
                size = int(file_.get('size', 0))
                human_size = util.get_human_size(size)[0]
                mtime = int(file_.get('server_mtime', 0))
                human_mtime = time.ctime(mtime)
                tree_iter = self.liststore.append([
                    state,
                    pixbuf,
                    large_pixbuf,
                    file_['server_filename'],
                    file_['path'],
                    isdir,
                    size,
                    human_size,
                    mtime,
                    human_mtime,
                ])
                tree_iters.append(tree_iter)
            cache_path = Config.get_cache_path(self.app.profile['username'])
            gutil.async_call(gutil.update_share_image, self.liststore,
                             tree_iters, ICON_COL, LARGE_ICON_COL, filelist,
                             cache_path, ICON_SIZE, LARGE_ICON_SIZE)
コード例 #30
0
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
            row[SAVEDIR_COL], row[SAVENAME_COL])

        if os.path.exists(filepath):
            if self.download_mode == DownloadMode.IGNORE:
                self.emit('downloaded', row[FSID_COL])
                logger.debug('File exists, ignored!')
                return
            elif self.download_mode == DownloadMode.NEWCOPY:
                name, ext = os.path.splitext(filepath)
                filepath = '{0}_{1}{2}'.format(name, util.curr_time(), ext)

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            logger.warn('Failed to get url to download')
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'rb+')
            fh.seek(0)
        else:
            req = net.urlopen_simple(url)
            if not req:
                logger.warn('Failed to get url to download')
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)',
                                  str(req.headers))
                if not match:
                    logger.warn('Failed to get url to download')
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            if size == 0:
                open(filepath, 'a').close()
                self.emit('downloaded', row[FSID_COL])
                return
            elif size <= SMALL_FILE_SIZE:
                threads = 1
            else:
                threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            try:
                fh.truncate(size)
            except (OSError, IOError):
                e = truncate.format_exc()
                logger.error(e)
                self.emit('disk-error', row[FSID_COL], tmp_filepath)
                return

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                if start_size + received >= end_size:
                    # part of file has been downloaded
                    continue
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(tasks):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                # error occurs
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                # flush data and status to disk
                if conf_count > THRESHOLD_TO_FLUSH:
                    with lock:
                        if not fh.closed:
                            fh.flush()
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received, received_total)
        except Exception:
            logger.error(traceback.format_exc())
            row[STATE_COL] = State.ERROR
        with lock:
            if not fh.closed:
                fh.close()
        for task in tasks:
            if task.isAlive():
                task.stop()
        with open(conf_filepath, 'w') as fh:
            json.dump(status, fh)

        if row[STATE_COL] == State.CANCELED:
            if os.path.exists(tmp_filepath):
                os.remove(tmp_filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.ERROR:
            self.emit('network-error', row[FSID_COL])
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
コード例 #31
0
from bcloud import Config
from bcloud.log import logger
from bcloud import net
from bcloud import pcs
from bcloud import util
try:
    import keyring
    keyring_available = True
    try:
        keyring.set_password("test", "utest", "ptest")
        keyring.get_password("test", "utest")
        keyring.delete_password("test", "utest")
    except:
        keyring_available = False
except (ImportError, ValueError):
    logger.warn(traceback.format_exc())
    keyring_available = False

DEFAULT_PROFILE = {
    'window-size': (960, 680),
    'use-status-icon': True,
    # 启动时自动最小化
    'startup-minimized': False,
    # 默认禁用深色主题
    'use-dark-theme': False,
    # 是否显示用户头像
    'display-avatar': True,
    'use-notify': True,
    'first-run': True,
    'username': '',
    'password': '',
コード例 #32
0
ファイル: decoder.py プロジェクト: 1060460048/bcloud
def decode_qqdl(link):
    try:
        return base64.decodestring(link[7:].encode()).decode()
    except ValueError:
        logger.warn(traceback.format_exc())
        return base64.decodestring(link[7:].encode()).decode('gbk')