Пример #1
0
    def add_remote_download_task(self, link):
        """
            向百度网盘中添加离线下载链接
        """

        ruokuai_captcha_handler = functools.partial(
            self.ruokuai_captcha_handler, self.captcha_params)

        # 初始化pcs,这里设置了验证码处理函数和验证码处理参数
        pcs = PCS(
            self.baidu_username,
            self.baidu_password,
            ruokuai_captcha_handler,
            None,
        )

        # 获取下载路径中的文件,防止文件重复添加
        rsp = pcs.list_files(BASE_PATH)
        result = rsp.json()
        exist_list = []
        if result['errno'] == 0:
            exist_list = result['list']
        else:
            print(json.dumps(result))

        exist_names = [exist['server_filename'] for exist in exist_list]

        if link not in exist_names:
            # 网盘中不存在的才添加
            pcs.add_download_task(link, BASE_PATH)
        else:
            print(link + ' 已经存在于网盘中')
Пример #2
0
 def __init__(self):
     print("BaiduYun")
     self.username = input("Username: "******"initialize BaiduYun for %s " % self.username)
     print("you may need to recognize a photo and input the message code")
     self.password = getpass.getpass()
     self.pcs = PCS(self.username, self.password)
Пример #3
0
def upload(file):
    pcs = PCS('username', 'userpwd')
    remote_path = '/apps/raspi/' + '-'.join(file.split('-')[:3])
    with open('file', 'rb') as f:
        try:
            pcs.upload(remote_path, f, file)
        except:
            pass
 def test_download(self):
     pcs = PCS(self.username, self.password)
     print 'Quota :'
     pprint(literal_eval( pcs.quota().content))
     headers = {'Range': 'bytes=0-99'}
     r = pcs.download('/test.txt', headers=headers)
     print '/test.txt content:'
     print r.content
Пример #5
0
 def __init__(self, filepath, username, password):
     """
     Login
     """
     (self.filepath, self.filename,
      self.dirname, self.filesize) = (filepath, os.path.basename(filepath),
                                      os.path.dirname(filepath),
                                      os.path.getsize(filepath))
     self.path = self.dirname + '\\' + self.filename.split('.')[0]
     self.pcs = PCS(username, password)  #Login
Пример #6
0
class BaiduYun:
    def __init__(self):
        print("BaiduYun")
        self.username = input("Username: "******"initialize BaiduYun for %s " % self.username)
        print("you may need to recognize a photo and input the message code")
        self.password = getpass.getpass()
        self.pcs = PCS(self.username, self.password)

    def byte2json(self, content):
        content = str(content, 'utf-8')
        json_str = json.loads(content)
        return json_str

    def upload(self, localFilename, remoteDir):
        if self.exists(os.path.join(remoteDir,
                                    os.path.basename(localFilename))):
            # print("file exists")
            return
        f = open(localFilename, 'rb')
        content = f.read()
        f.close()
        try:
            ret = self.pcs.upload(remoteDir, content,
                                  os.path.basename(localFilename))
        except:
            print("\nfailed to upload %s" % localFilename)
            pass

    def mkdir(self, remoteDir):
        try:
            if not self.exists(remoteDir):
                self.pcs.mkdir(remoteDir)
        except:
            pass

    def exists(self, remoteFilename):
        res = False
        try:
            remoteDir = os.path.dirname(remoteFilename)
            filelist = self.byte2json(self.pcs.list_files(remoteDir).content)
            for item in filelist["list"]:
                # print(item["path"])
                if remoteFilename == item["path"]:
                    return True
        except:
            pass
        return res
Пример #7
0
 def do_login(self, args, opts):
     print 'logging in, please wait ...'
     self.pcs = PCS(opts.username, opts.password, captcha_callback = handle_captcha)
     # self.pcs.get_fastest_pcs_server()
     res = {}
     for retry in range(3):
         res = json.loads(self.pcs.quota().content)
         if res['errno'] == 0:
             break
         else:
             res = {}
             time.sleep(retry+1)
     if res.get('errno') == 0:
         print 'Login success. storage used: %s/%s' % (readable_size(res['used']), readable_size(res['total']))
     else:
         print 'login failed: %r' % res
Пример #8
0
def _get_pcs(conf):
    def _captcha_callback(img_url):
        code = _recognize_img(img_url)
        if code:
            return code

        logger.info('%s\nOpen url above, then input verify code.' % img_url)
        return raw_input('captcha> ')

    global _pcs
    if _pcs:
        return _pcs

    try:
        username = conf.get('baidupan', 'username')
        password = conf.get('baidupan', 'password')
        if not username or not password:
            logger.error('No username or password found.')
            sys.exit(-1)

        # _pcs = PCS(username, password,
        #            captcha_callback=_captcha_callback)
        _pcs = PCS(username, password)

        logger.info('Baidupan login successfully.')

        return _pcs
    except LoginFailed:
        import traceback
        traceback.print_exc()
        logger.error('Failed to login in baidupan.')
        sys.exit(-1)
Пример #9
0
Файл: blend.py Проект: xete/bdnd
	def __init__(self, mode='non-interactive'):
		'''
		mode: interactive or not
		'''
		try:
			username = raw_input('username: '******'stty -echo')
			password = raw_input('password: '******'\n'
			os.system('stty echo')
			self.env = PCS(username, password)
		except:
			return None
		if not self.env:
			return None
		self._parse = json_parser.parser().parse
		self._cwd = '/'
		self._dirs = {}
		self._commands = {
			'ls': self._list_files,
			'readlink': self._download_url,
			'cd': self._change_dir,
		}
		self._mode = mode
		if self._mode == 'interactive':
			self._loop()
Пример #10
0
 def do_login(self, args, opts):
     print 'logging in, please wait ...'
     self.pcs = PCS(opts.username, opts.password, captcha_callback = handle_captcha)
     self.pcs.get_fastest_pcs_server()
     res = json.loads(self.pcs.quota().content)
     if res['errno'] != 0:
         self.pcs = None
         print 'login failed: %r' % res
         return
     print 'Login success. storage used: %s/%s' % (readable_size(res['used']), readable_size(res['total']))
Пример #11
0
    def add_remote_download_task(self, link):
        """
            向百度网盘中添加离线下载链接
        """

        ruokuai_captcha_handler = functools.partial(
            self.ruokuai_captcha_handler,
            self.captcha_params
        )

        # 初始化pcs,这里设置了验证码处理函数和验证码处理参数
        pcs = PCS(
            self.baidu_username,
            self.baidu_password,
            ruokuai_captcha_handler,
            None,
        )

        # 获取下载路径中的文件,防止文件重复添加
        rsp = pcs.list_files(BASE_PATH)
        result = rsp.json()
        exist_list = []
        if result['errno'] == 0:
            exist_list = result['list']
        else:
            print(json.dumps(result))

        exist_names = [exist['server_filename'] for exist in exist_list]

        if link not in exist_names:
            # 网盘中不存在的才添加
            pcs.add_download_task(
                link,
                BASE_PATH
            )
        else:
            print(link + ' 已经存在于网盘中')
Пример #12
0
    def __init__(self, username, password, *args, **kw):
        self.disk = PCS(username, password)
        self.buffer = {}
        self.traversed_folder = {}
        self.bufferLock = Lock()
        self.upload_blocks = {}  # 文件上传时用于记录块的md5,{PATH:{TMP:'',BLOCKS:''}
        self.create_tmp = {}  # {goutputstrem_path:file}
        self.upload_fails = {}  #
        self.fd = 3
        # 初始化百度服务器
        print '设置pcs服务器'
        pcs = self.disk.get_fastest_pcs_server()
        self.disk.set_pcs_server(pcs)
        print 'pcs api server:', pcs
        '''
        print '设置百度网盘服务器,时间比较长,请耐心等待'
        pan = self.disk.get_fastest_mirror()
        self.disk.set_pan_server(pan)
        print 'baidupan server',pan
        '''

        self.uploadLock = Lock()  # 上传文件时不刷新目录
        self.readLock = Lock()
        self.downloading_files = []
Пример #13
0
    def __init__(self, username, password, *args, **kw):
        self.disk = PCS(username, password,captcha_get)
        self.buffer = {}
        self.traversed_folder = {}
        self.bufferLock = Lock()
        self.upload_blocks = {} # 文件上传时用于记录块的md5,{PATH:{TMP:'',BLOCKS:''}
        self.create_tmp = {} # {goutputstrem_path:file}
        self.upload_fails = {} #
        self.fd = 3
        # 初始化百度服务器
        print '设置pcs服务器'
        pcs = self.disk.get_fastest_pcs_server()
        self.disk.set_pcs_server(pcs)
        print 'pcs api server:',pcs
        '''
        print '设置百度网盘服务器,时间比较长,请耐心等待'
        pan = self.disk.get_fastest_mirror()
        self.disk.set_pan_server(pan)
        print 'baidupan server',pan
        '''

        self.uploadLock = Lock() # 上传文件时不刷新目录
        self.readLock = Lock()
        self.downloading_files = []
Пример #14
0
class BaiduPan(Cmd):
    prompt = colored('dupan', 'yellow', attrs = ['bold']) + colored(' >> ', 'red', attrs = ['bold'])
    completekey = 'tab'
    editor = 'vim'
    timing = False
    debug = True

    download_root = os.path.join(CWD, 'download')
    cwd = '/'
    dirs = {}
    pcs = None

    def __init__(self):
        Cmd.__init__(self)
        try:
            import readline
            readline.set_completer_delims(' \t\n"') # initially it was ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?', but I dont want to break on too many
        except:
            pass

    @options([make_option('-u', '--username', help="specify username"),
              make_option('-p', '--password',help="specify password"),
             ])
    def do_login(self, args, opts):
        print 'logging in, please wait ...'
        self.pcs = PCS(opts.username, opts.password, captcha_callback = handle_captcha)
        self.pcs.get_fastest_pcs_server()
        res = json.loads(self.pcs.quota().content)
        if res['errno'] != 0:
            self.pcs = None
            print 'login failed: %r' % res
            return
        print 'Login success. storage used: %s/%s' % (readable_size(res['used']), readable_size(res['total']))

    def do_cd(self, args):
        if type(args) == type([]):
            args = args[0]

        if not isinstance(args, basestring) or not args:
            print 'cd /path/to/dir'
            return

        if args.startswith('/'):
            self.cwd = args
        else:
            self.cwd = os.path.join(self.cwd, args)
        self.cwd = os.path.normpath(self.cwd)

    def do_timing(self, args):

        if not args:
            print 'timing on|off'
            return

        if args.lower() == 'on':
            self.timing = True
        elif args.lower() == 'off':
            self.timing = False
        else:
            print 'timing on|off'
            return

    def do_pwd(self, args):
        print self.cwd

    def do_saveto(self, args):
        path = args

        if not path:
            print 'current download root: %s' % self.download_root
            return

        if not path.startswith('/'):
            path = os.path.normpath(os.path.join(os.getcwd(), path))

        self.download_root = path
        print 'will save to %s' % path

    def do_ls(self, args):
        if not self.pcs:
            print 'please login first'
            return

        if not args:
            path = self.cwd
        else:
            path = args

        path = os.path.normpath(os.path.join(self.cwd, path))

        print path
        res = json.loads(self.pcs.list_files(path).content)

        # print json.dumps(res, indent = 4)
        '''
        {
            "isdir": 1,
            "category": 6,
            "server_filename": "cs",
            "local_mtime": 1395372049,
            "server_ctime": 1395372049,
            "server_mtime": 1395372049,
            "fs_id": 640464281820244,
            "path": "/cs",
            "size": 0,
            "local_ctime": 1395372049
        }
        '''

        if res.get('errno', None) != 0:
            log('invalid response: %r' % res, 'yellow')
            return

        print 'total %d' % len(res.get('list', []))

        content = []
        cnt = 0
        lst = res.get('list', [])
        idxsz = len(str(len(lst)-1))
        sizes = []
        sizesz = 0
        for fsitem in lst:
            t = readable_size(fsitem.get('size'))
            if len(t) > sizesz:
                sizesz = len(t)
            sizes.append(t)
        for i, fsitem in enumerate(lst):
            print '[ %s ]  %s  %s  %s   %s' % (str(cnt).rjust(idxsz), fsitem.get('isdir', 0) and 'd' or '-', sizes[i].ljust(sizesz, ' '), datetime.datetime.fromtimestamp(fsitem.get('server_mtime', 0)).strftime('%Y-%m-%d_%H:%M:%S'), colored(fsitem.get('server_filename'), fsitem.get('isdir', 0) and 'cyan' or 'white', attrs = ['bold']) + (fsitem.get('isdir', 0) and '/' or ''))
            cnt += 1
            content.append(fsitem.get('server_filename'))

        self.dirs[path] = lst

    @options([make_option('-i', '--index', help="the file index to delete, separate with comma, e.g. 3,5,2, also range supported, e.g. 1-4,5,7"),
             ])
    def do_meta(self, args, opts):
        if not self.pcs:
            print 'please login first'
            return

        args = split_command_line(args)

        fps = []
        if opts.index:
            if not self.dirs.get(self.cwd):
                print 'please use `ls` to list dir first to let me know which files you want to operate'
                return
            try:
                indexes = parse_index_param(opts.index, len(self.dirs.get(self.cwd)))
                fps = [self.dirs.get(self.cwd)[i]['server_filename'] for i in indexes]
            except Exception, ex:
                print ex
                return

        final = fps + args

        for path in final:
            path = os.path.normpath(os.path.join(self.cwd, path))

            print path
            o = json.loads(self.pcs.meta([path]).content)

            if o.get('errno', None) != 0:
                print ('invalid request: %r' % o)
                return

            size = o['info'][0]['size']

            info = o['info'][0]
            for k in info:
                print colored(k + ': ', 'cyan'), colored(info[k], 'white')
Пример #15
0
                i=suffixs.index(suffix)
                addSuffix(name)
                print(name)
            except :
                pass

#addSuffixInFolders("/home/guci/我的坚果云")
addSuffixInFolders("/home/guci/share/baidunetdisk")
addSuffixInFolders("/home/guci/projects")
addSuffixInFolders("/home/guci/gdrive/all/")
print('finished')


#%%
from baidupcsapi import PCS
pcs = PCS('*****@*****.**','guci700127')
print (pcs.quota().content)
#print (pcs.list_files('/').content)


#%%
def indexFile(file):
    print(file)
def listFolder(folder):
    print(folder)
    try:
        elements= json.loads(pcs.list_files(folder).content.decode())["list"] 
        #pcs.list_files('/',"name","asc").json().get("list")
        for element in elements:
            if element["isdir"]==1:
                listFolder(element["path"])
Пример #16
0
class BaiduPan(Cmd):
    prompt = colored('dupan', 'yellow', attrs = ['bold']) + colored(' >> ', 'red', attrs = ['bold'])
    completekey = 'tab'
    editor = 'vim'
    timing = False
    debug = True

    download_root = ROOT
    cwd = '/'
    dirs = {}
    pcs = None

    def __init__(self):
        Cmd.__init__(self)
        try:
            import readline
            readline.set_completer_delims(' \t\n"') # initially it was ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?', but I dont want to break on too many
        except:
            pass

    @options([make_option('-u', '--username', help="specify username"),
              make_option('-p', '--password',help="specify password"),
             ])
    def do_login(self, args, opts):
        print 'logging in, please wait ...'
        self.pcs = PCS(opts.username, opts.password, captcha_callback = handle_captcha)
        # self.pcs.get_fastest_pcs_server()
        res = {}
        for retry in range(3):
            res = json.loads(self.pcs.quota().content)
            if res['errno'] == 0:
                break
            else:
                res = {}
                time.sleep(retry+1)
        if res.get('errno') == 0:
            print 'Login success. storage used: %s/%s' % (readable_size(res['used']), readable_size(res['total']))
        else:
            print 'login failed: %r' % res

    def do_cd(self, args):
        if type(args) == type([]):
            args = args[0]

        if not isinstance(args, basestring) or not args:
            print 'cd /path/to/dir'
            return

        if args.startswith('/'):
            self.cwd = args
        else:
            self.cwd = os.path.join(self.cwd, args)
        self.cwd = os.path.normpath(self.cwd)

    def do_timing(self, args):

        if not args:
            print 'timing on|off'
            return

        if args.lower() == 'on':
            self.timing = True
        elif args.lower() == 'off':
            self.timing = False
        else:
            print 'timing on|off'
            return

    def do_pwd(self, args):
        print self.cwd

    def do_saveto(self, args):
        path = args

        if not path:
            print 'current download root: %s' % self.download_root
            return

        if not path.startswith('/'):
            path = os.path.normpath(os.path.join(os.getcwd(), path))

        self.download_root = path
        print 'will save to %s' % path

    def do_ls(self, args):
        if not self.pcs:
            print 'please login first'
            return

        if not args:
            path = self.cwd
        else:
            path = args

        path = os.path.normpath(os.path.join(self.cwd, path))

        print path
        res = json.loads(self.pcs.list_files(path).content)

        # print json.dumps(res, indent = 4)
        '''
        {
            "isdir": 1,
            "category": 6,
            "server_filename": "cs",
            "local_mtime": 1395372049,
            "server_ctime": 1395372049,
            "server_mtime": 1395372049,
            "fs_id": 640464281820244,
            "path": "/cs",
            "size": 0,
            "local_ctime": 1395372049
        }
        '''

        if res.get('errno', None) != 0:
            log('invalid response: %r' % res, 'yellow')
            return

        print 'total %d' % len(res.get('list', []))

        content = []
        cnt = 0
        lst = res.get('list', [])
        idxsz = len(str(len(lst)-1))
        sizes = []
        sizesz = 0
        for fsitem in lst:
            t = readable_size(fsitem.get('size'))
            if len(t) > sizesz:
                sizesz = len(t)
            sizes.append(t)
        for i, fsitem in enumerate(lst):
            print '[ %s ]  %s  %s  %s   %s' % (str(cnt).rjust(idxsz), fsitem.get('isdir', 0) and 'd' or '-', sizes[i].ljust(sizesz, ' '), datetime.datetime.fromtimestamp(fsitem.get('server_mtime', 0)).strftime('%Y-%m-%d_%H:%M:%S'), colored(fsitem.get('server_filename'), fsitem.get('isdir', 0) and 'cyan' or 'white', attrs = ['bold']) + (fsitem.get('isdir', 0) and '/' or ''))
            cnt += 1
            content.append(fsitem.get('server_filename'))

        self.dirs[path] = lst

    @options([make_option('-i', '--index', help="the file index to delete, separate with comma, e.g. 3,5,2, also range supported, e.g. 1-4,5,7"),
             ])
    def do_meta(self, args, opts):
        if not self.pcs:
            print 'please login first'
            return

        args = split_command_line(args)

        fps = []
        if opts.index:
            if not self.dirs.get(self.cwd):
                print 'please use `ls` to list dir first to let me know which files you want to operate'
                return
            try:
                indexes = parse_index_param(opts.index, len(self.dirs.get(self.cwd)))
                fps = [self.dirs.get(self.cwd)[i]['server_filename'] for i in indexes]
            except Exception, ex:
                print ex
                return

        final = fps + args

        for path in final:
            path = os.path.normpath(os.path.join(self.cwd, path))

            print path
            o = json.loads(self.pcs.meta([path]).content)

            if o.get('errno', None) != 0:
                print ('invalid request: %r' % o)
                return

            size = o['info'][0]['size']

            info = o['info'][0]
            for k in info:
                print colored(k + ': ', 'cyan'), colored(info[k], 'white')
Пример #17
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
上传文件到百度网盘
>>> python upload_to_baidu_wangpan.py data/finance_news/finance_news.2017-05-14.csv.bz2 /Crawler/stock/finance_news/
"""

import sys
import os, json, sys, tempfile
from baidupcsapi import PCS

pcs = PCS('username', 'password')
chinksize = 1024 * 1024 * 16
fid = 1
md5list = []
tmpdir = tempfile.mkdtemp('bdpcs')
with open(sys.argv[1], 'rb') as infile:
    while 1:
        data = infile.read(chinksize)
        if len(data) == 0: break
        smallfile = os.path.join(tmpdir, 'tmp%d' % fid)
        with open(smallfile, 'wb') as f:
            f.write(data)
        print('chunk%d size %d' % (fid, len(data)))
        fid += 1
        print('start uploading...')
        ret = pcs.upload_tmpfile(open(smallfile, 'rb'))
        md5list.append(json.loads(ret.content)['md5'])
        print('md5: %s' % (md5list[-1]))
        os.remove(smallfile)
Пример #18
0
class BaiduPCS(object):
    """
    Baidu disk uploader.
    """
    def __init__(self, filepath, username, password):
        """
        Login
        """
        (self.filepath, self.filename,
         self.dirname, self.filesize) = (filepath, os.path.basename(filepath),
                                         os.path.dirname(filepath),
                                         os.path.getsize(filepath))
        self.path = self.dirname + '\\' + self.filename.split('.')[0]
        self.pcs = PCS(username, password)  #Login

    def create_upload(self, num):
        self.uplog['md5'][num] = (json.loads(
            self.pcs.upload_tmpfile(self.block(
                (num - 1) * self.chinksize)).content)['md5'])
        self.count += 1
        with open(self.dirname + '\\' + self.filename.split('.')[0] + '.json',
                  'w') as self.new_uplog:
            json.dump(self.uplog, self.new_uplog)
        print('[' + str(self.count) + '/' + str(self.fid) +
              ' Uploaded   BlockID: ' + str(num) + '   md5: ' +
              self.uplog['md5'][num] + ']')

    def read_uplog(self):
        if os.path.exists(self.dirname + '\\' + self.filename.split('.')[0] +
                          '.json'):
            with open(
                    self.dirname + '\\' + self.filename.split('.')[0] +
                    '.json', 'r') as self.uplog_file:
                self.uplog = json.load(self.uplog_file)
            tmp_dict = {}
            for i in sorted(self.uplog['md5'].keys()):
                tmp_dict[int(i)] = self.uplog['md5'][i]
            self.uplog['md5'] = tmp_dict
        else:
            self.uplog_file = open(
                self.dirname + '\\' + self.filename.split('.')[0] + '.json',
                'w')
            self.uplog = {'block': 0, 'size': 0, 'md5': {}}

    def block(self, location=None):
        if location == None:
            return math.ceil(os.path.getsize(self.filepath) / self.chinksize)
        file = open(self.filepath, 'rb')
        file.seek(location, 0)
        return io.BytesIO(file.read(self.chinksize))

    def upload(self):
        """
        Biadu upload module
        """
        self.read_uplog()

        if int(self.uplog['size']) == 0:
            self.chinksize = 1024 * 1024 * 24
            self.uplog['size'] = self.chinksize
        else:
            self.chinksize = self.uplog['size']

        self.thread_num = 25

        if int(self.uplog['block']) == 0:
            self.fid = self.block()

        self.count = len(self.uplog['md5'])

        with open(self.dirname + '\\' + self.filename.split('.')[0] + '.json',
                  'w') as self.new_uplog:
            json.dump(self.uplog, self.new_uplog)

        print('start uploading...')
        self.threads = []
        for i in range(self.fid):
            if not i + 1 in self.uplog['md5']:
                while len(threading.enumerate()) - 1 >= self.thread_num:
                    time.sleep(1)
                self.t = threading.Thread(target=self.create_upload,
                                          kwargs={'num': i + 1})
                self.t.setDaemon(True)
                self.t.start()
                self.threads.append(self.t)

        for self.thread in self.threads:
            self.thread.join()

    def superfile(self):
        self.pcs.upload_superfile('/' + self.filename,
                                  [(self.uplog['md5'][k])
                                   for k in sorted(self.uplog['md5'].keys())])

    def CheckUpload(self):
        """
        Check upload status.
        Retry if file not uploaded.
        """
        if not self.fid == len(self.uplog['md5']):
            return 0
        return 1

    def quota_remaining(self):
        self.quota_info = json.loads(self.pcs.quota().content.decode(
            "utf-8", "ignore"))
        self.remaining = self.quota_info['total'] - self.quota_info['used']
        return self.remaining
Пример #19
0
from baidupcsapi import PCS
pcs = PCS('17624071108', '')
print(pcs.quota().content)
print(pcs.list_files('/').content)
Пример #20
0
    with open(filePath, 'rb') as infile:
        while 1:
            data = infile.read(chunkSize)
            if len(data) == 0: break
            smallfile = os.path.join(tmpDir, 'tmp%d' % fid)
            with open(smallfile, 'wb') as f:
                f.write(data)
            print('chunk%d size %d' % (fid, len(data)))
            fid += 1
            print('start uploading...')
            ret = pcs.upload_tmpfile(open(smallfile, 'rb'),
                                     callback=ProgressBar())
            md5list.append(json.loads(ret.content)['md5'])
            print('md5: %s' % (md5list[-1]))
            os.remove(smallfile)

    os.rmdir(tmpDir)
    pcs.upload_superfile(os.path.join(remotePath, os.path.basename(filePath)),
                         md5list)


pcs = PCS('18661168001', 'Carl656693')
chunkSize = 1024 * 1024 * 256
tmpDir = tempfile.mkdtemp('bdpcs')
remotePath = "/yunbeifeng"

for each in os.listdir('upload'):
    print 'upload', each
    upload(os.path.join('upload', each), remotePath)
    print 'finish', each
Пример #21
0
Файл: blend.py Проект: xete/bdnd
class connect():
	def __init__(self, mode='non-interactive'):
		'''
		mode: interactive or not
		'''
		try:
			username = raw_input('username: '******'stty -echo')
			password = raw_input('password: '******'\n'
			os.system('stty echo')
			self.env = PCS(username, password)
		except:
			return None
		if not self.env:
			return None
		self._parse = json_parser.parser().parse
		self._cwd = '/'
		self._dirs = {}
		self._commands = {
			'ls': self._list_files,
			'readlink': self._download_url,
			'cd': self._change_dir,
		}
		self._mode = mode
		if self._mode == 'interactive':
			self._loop()

	def _loop(self):
		while True:
			input = raw_input(self._cwd+'>> ')
			if input in ['q', 'quit', 'exit', 'dis', 'disconnect']:
				break
			# need to type 'quit' more than once
			elif input in ['relogin']:
				self = connect(mode='interactive')
			arg_list = input.split(' ')
			if arg_list[0] in self._commands:
				args = arg_list[1:]
				# unpack arguments out of a list or tuple
				try:
					self._commands[arg_list[0]](*args)
				except:
					pass
	
	def attach(self):
		self._loop()
	
	def _list_files(self, *args):
		ret = {}
		if len(args) == 0:
			args = {self._cwd}
		for d in args:
			ret[d] = self._parse(self.env.list_files(d).content, type='list_files')
		if self._mode != 'interactive':
			return ret	
		else:
			for r in ret:
				for item in ret[r]:
					print '  '+item

	def _download_url(self, *args):
		ret = {}
		for f in args:
			try:
				ret[f] = self.env.download_url(f)
			except:
				ret[f] = None
		if self._mode != 'interactive':
			return ret	
		else:
			for r in ret:
				for item in ret[r]:
					print '  '+item if item else 'None'

	def _change_dir(self, arg):
		def format_arg(arg):
			if arg == '/':
				return arg
			if arg.endswith('/'):
				arg = arg[:-1]
			if arg == '..':
				arg = os.path.dirname(self._cwd)
				return arg
			if arg == '.':
				arg = self._cwd
				return arg
			# covert to absolute path
			if not arg.startswith('/'):
				if self._cwd == '/':
					arg = self._cwd + arg
				else:
					arg = self._cwd +'/'+ arg
			return arg
		arg = format_arg(arg)
		# cached directories
		if arg in self._dirs.keys():
			self._cwd = arg
		else:
			d = self._parse(self.env.list_files(arg).content, type='list_files')
			if d != []:
				self._cwd = arg	
				self._dirs[arg] = d
Пример #22
0
class BaiduFS(Operations):
    '''Baidu netdisk filesystem'''
    def __init__(self, username, password, *args, **kw):
        self.disk = PCS(username, password)
        self.buffer = {}
        self.traversed_folder = {}
        self.bufferLock = Lock()
        self.upload_blocks = {}  # 文件上传时用于记录块的md5,{PATH:{TMP:'',BLOCKS:''}
        self.create_tmp = {}  # {goutputstrem_path:file}
        self.upload_fails = {}  #
        self.fd = 3
        # 初始化百度服务器
        print '设置pcs服务器'
        pcs = self.disk.get_fastest_pcs_server()
        self.disk.set_pcs_server(pcs)
        print 'pcs api server:', pcs
        '''
        print '设置百度网盘服务器,时间比较长,请耐心等待'
        pan = self.disk.get_fastest_mirror()
        self.disk.set_pan_server(pan)
        print 'baidupan server',pan
        '''

        self.uploadLock = Lock()  # 上传文件时不刷新目录
        self.readLock = Lock()
        self.downloading_files = []

    def unlink(self, path):
        print '*' * 10, 'UNLINK CALLED', path
        self.disk.delete([path])

    def _add_file_to_buffer(self, path, file_info):
        foo = File()
        foo['st_ctime'] = file_info['local_ctime']
        foo['st_mtime'] = file_info['local_mtime']
        foo['st_mode'] = (stat.S_IFDIR | 0777) if file_info['isdir'] \
            else (stat.S_IFREG | 0777)
        foo['st_nlink'] = 2 if file_info['isdir'] else 1
        foo['st_size'] = file_info['size']
        self.buffer[path] = foo

    def _del_file_from_buffer(self, path):
        self.buffer.pop(path)

    def getattr(self, path, fh=None):
        #print 'getattr *',path
        # 先看缓存中是否存在该文件

        if not self.buffer.has_key(path):
            print path, '未命中'
            #print self.buffer
            #print self.traversed_folder
            jdata = json.loads(self.disk.meta([path]).content)
            try:
                if 'info' not in jdata:
                    raise FuseOSError(errno.ENOENT)
                if jdata['errno'] != 0:
                    raise FuseOSError(errno.ENOENT)
                file_info = jdata['info'][0]
                self._add_file_to_buffer(path, file_info)
                st = self.buffer[path].getDict()
                return st
            except:
                raise FuseOSError(errno.ENOENT)
        else:
            #print path,'命中'
            return self.buffer[path].getDict()

    def readdir(self, path, offset):
        self.uploadLock.acquire()
        while True:
            try:
                foo = json.loads(self.disk.list_files(path).text)
                break
            except:
                print 'error'

        files = ['.', '..']
        abs_files = []  # 该文件夹下文件的绝对路径
        for file in foo['list']:
            files.append(file['server_filename'])
            abs_files.append(file['path'])
        # 缓存文件夹下文件信息,批量查询meta info

        # Update:解决meta接口一次不能查询超过100条记录
        # 分成 ceil(file_num / 100.0) 组,利用商群
        if not self.traversed_folder.has_key(
                path) or self.traversed_folder[path] == False:
            print '正在对', path, '缓存中'
            file_num = len(abs_files)
            group = int(math.ceil(file_num / 100.0))
            for i in range(group):
                obj = [f for n, f in enumerate(abs_files)
                       if n % group == i]  #一组数据
                while 1:
                    try:
                        ret = json.loads(self.disk.meta(obj).text)
                        break
                    except:
                        print 'error'

                for file_info in ret['info']:
                    if not self.buffer.has_key(file_info['path']):
                        self._add_file_to_buffer(file_info['path'], file_info)
            #print self.buffer
            print '对', path, '的缓存完成'
            self.traversed_folder[path] = True
        for r in files:
            yield r
        self.uploadLock.release()

    def _update_file_manual(self, path):
        while 1:
            try:
                jdata = json.loads(self.disk.meta([path]).content)
                break
            except:
                print 'error'

        if 'info' not in jdata:
            raise FuseOSError(errno.ENOENT)
        if jdata['errno'] != 0:
            raise FuseOSError(errno.ENOENT)
        file_info = jdata['info'][0]
        self._add_file_to_buffer(path, file_info)

    def rename(self, old, new):
        #logging.debug('* rename',old,os.path.basename(new))
        print '*' * 10, 'RENAME CALLED', old, os.path.basename(new), type(
            old), type(new)
        while True:
            try:
                ret = self.disk.rename([(old, os.path.basename(new))]).content
                jdata = json.loads(ret)
                break
            except:
                print 'error'

        if jdata['errno'] != 0:
            # 文件名已存在,删除原文件
            print self.disk.delete([new]).content
            print self.disk.rename([(old, os.path.basename(new))])
        self._update_file_manual(new)
        self.buffer.pop(old)

    def open(self, path, flags):
        self.readLock.acquire()
        print '*' * 10, 'OPEN CALLED', path, flags
        #print '[****]',path
        """
        Permission denied

        accmode = os.O_RDONLY | os.O_WRONLY | os.O_RDWR
        if (flags & accmode) != os.O_RDONLY:
            raise FuseOSError(errno.EACCES)
        """
        self.fd += 1
        self.readLock.release()

        return self.fd

    def create(self, path, mode, fh=None):
        # 创建文件
        # 中文路径有问题
        print '*' * 10, 'CREATE CALLED', path, mode, type(path)
        #if 'outputstream' not in path:
        tmp_file = tempfile.TemporaryFile('r+w+b')
        foo = self.disk.upload(os.path.dirname(path), tmp_file,
                               os.path.basename(path)).content
        ret = json.loads(foo)
        print ret
        print 'create-not-outputstream', ret
        if ret['path'] != path:
            # 文件已存在
            print '文件已存在'
            raise FuseOSError(errno.EEXIST)
        '''
        else:
            print 'create:',path
            foo = File()
            foo['st_ctime'] = int(time.time())
            foo['st_mtime'] = int(time.time())
            foo['st_mode'] = (stat.S_IFREG | 0777)
            foo['st_nlink'] = 1
            foo['st_size'] = 0
            self.buffer[path] = foo
        '''
        '''
        dict(st_mode=(stat.S_IFREG | mode), st_nlink=1,
                                st_size=0, st_ctime=time.time(), st_mtime=time.time(),
                                st_atime=time.time())
        '''
        self.fd += 1
        return 0

    def write(self, path, data, offset, fp):
        # 上传文件时会调用
        # 4kb ( 4096 bytes ) 每块,data中是块中的数据
        # 最后一块的判断:len(data) < 4096
        # 文件大小 = 最后一块的offset + len(data)

        # 4kb传太慢了,合计成2M传一次

        #print '*'*10,path,offset, len(data)

        def _block_size(stream):
            stream.seek(0, 2)
            return stream.tell()

        _BLOCK_SIZE = 16 * 2**20
        # 第一块的任务
        if offset == 0:
            #self.uploadLock.acquire()
            #self.readLock.acquire()
            # 初始化块md5列表
            self.upload_blocks[path] = {'tmp': None, 'blocks': []}
            # 创建缓冲区临时文件
            tmp_file = tempfile.TemporaryFile('r+w+b')
            self.upload_blocks[path]['tmp'] = tmp_file

        # 向临时文件写入数据,检查是否>= _BLOCK_SIZE 是则上传该块并将临时文件清空
        try:
            tmp = self.upload_blocks[path]['tmp']
        except KeyError:
            return 0
        tmp.write(data)

        if _block_size(tmp) > _BLOCK_SIZE:
            print path, '发生上传'
            tmp.seek(0)
            try:
                foo = self.disk.upload_tmpfile(tmp,
                                               callback=ProgressBar()).content
                foofoo = json.loads(foo)
                block_md5 = foofoo['md5']
            except:
                print foo

            # 在 upload_blocks 中插入本块的 md5
            self.upload_blocks[path]['blocks'].append(block_md5)
            # 创建缓冲区临时文件
            self.upload_blocks[path]['tmp'].close()
            tmp_file = tempfile.TemporaryFile('r+w+b')
            self.upload_blocks[path]['tmp'] = tmp_file
            print '创建临时文件', tmp_file.name

        # 最后一块的任务
        if len(data) < 4096:
            # 检查是否有重名,有重名则删除它
            while True:
                try:
                    foo = self.disk.meta([path]).content
                    foofoo = json.loads(foo)
                    break
                except:
                    print 'error'

            if foofoo['errno'] == 0:
                logging.debug('Deleted the file which has same name.')
                self.disk.delete([path])
            # 看看是否需要上传
            if _block_size(tmp) != 0:
                # 此时临时文件有数据,需要上传
                print path, '发生上传,块末尾,文件大小', _block_size(tmp)
                tmp.seek(0)
                while True:
                    try:
                        foo = self.disk.upload_tmpfile(
                            tmp, callback=ProgressBar()).content
                        foofoo = json.loads(foo)
                        break
                    except:
                        print 'exception, retry.'

                block_md5 = foofoo['md5']
                # 在 upload_blocks 中插入本块的 md5
                self.upload_blocks[path]['blocks'].append(block_md5)

            # 调用 upload_superfile 以合并块文件
            print '合并文件', path, type(path)
            self.disk.upload_superfile(path,
                                       self.upload_blocks[path]['blocks'])
            # 删除upload_blocks中数据
            self.upload_blocks.pop(path)
            # 更新本地文件列表缓存
            self._update_file_manual(path)
            #self.readLock.release()
            #self.uploadLock.release()
        return len(data)

    def mkdir(self, path, mode):
        logger.debug("mkdir is:" + path)
        self.disk.mkdir(path)

    def rmdir(self, path):
        logger.debug("rmdir is:" + path)
        self.disk.delete([path])

    def read(self, path, size, offset, fh):
        #print '*'*10,'READ CALLED',path,size,offset
        #logger.debug("read is: " + path)
        paras = {'Range': 'bytes=%s-%s' % (offset, offset + size - 1)}
        while True:
            try:
                foo = self.disk.download(path, headers=paras).content
                return foo
            except:
                pass

    access = None
    statfs = None
Пример #23
0
from baidupcsapi import PCS

pcs = PCS('lpbirdueng', 'lupeng')
#pcs.info(verify=False)
print pcs.quota().content
print pcs.list_files('/').content
Пример #24
0
                progressbar.Percentage(), ' ',
                progressbar.Bar(marker=progressbar.RotatingMarker('>')), ' ',
                progressbar.ETA()
            ]
            self.pbar = progressbar.ProgressBar(widgets=self.widgets,
                                                maxval=kwargs['size']).start()
            self.first_call = False

        if kwargs['size'] <= kwargs['progress']:
            self.pbar.finish()
        else:
            self.pbar.update(kwargs['progress'])


def backup():
    try:
        copy(
            './database/data.db', './database/data' +
            strftime("%Y-%m-%d %H:%M:%S", localtime()) + '.db')
    except IOError as e:
        print("Unable to copy file. %s" % e)


if __name__ == '__main__':

    pcs = PCS('wbj512291', 'ILY999@wbJ')
    print(pcs.quota().content)
    # test_file = open('data.db', 'rb').read()
    print(pcs.list_files('/').content)
    # ret = pcs.upload('/', test_file, 'data.db', callback=ProgressBar())