Example #1
0
 def testInterleavedHit1(self):
     a_blob = self.addWorkdirFile("a.txt", "aaa")
     self.wd.checkin()
     b_blob = self.addWorkdirFile("b.txt", "XaaaXaaaX")
     self.wd.checkin()
     x_blob = md5sum("X")
     xxx_blob = md5sum("XXX") # All the original pieces joined
     recipe = self.repo.get_recipe(b_blob)
     #print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 5)
     self.assertEquals(recipe['pieces'][0], {
             'source': xxx_blob,
             'repeat': 1, 'original': True, 'offset': 0, 'size': 1})
     self.assertEquals(recipe['pieces'][1], {
             'source': a_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     self.assertEquals(recipe['pieces'][2], {
             'source': xxx_blob,
             'repeat': 1, 'original': True, 'offset': 1, 'size': 1})
     self.assertEquals(recipe['pieces'][3], {
             'source': a_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     self.assertEquals(recipe['pieces'][4], {
             'source': xxx_blob,
             'repeat': 1, 'original': True, 'offset': 2, 'size': 1})
     rebuilt_content = self.wd.front.get_blob(b_blob).read()
     self.assertEquals(md5sum(rebuilt_content), "e18585992d1ea79a30a34e015c49719e")
Example #2
0
    def testConcatenatedFragments(self):
        self.addWorkdirFile("a.txt", "aaa")
        self.wd.checkin()
        self.addWorkdirFile("a.txt", "aaabbbaaaXaaaccc")
        self.wd.checkin()

        # Should now exist due to concatenation of original parts in previous commit
        self.assertTrue(
            md5sum("bbbXccc") in self.wd.get_front().get_all_raw_blobs())

        self.addWorkdirFile("a.txt", "cccbbb")
        self.wd.checkin()
        recipe = self.repo.get_recipe(md5sum("cccbbb"))
        self.assertEquals(len(recipe['pieces']), 2)
        self.assertEquals(
            recipe['pieces'][0], {
                'source': md5sum("bbbXccc"),
                'repeat': 1,
                'original': False,
                'offset': 4,
                'size': 3
            })
        self.assertEquals(
            recipe['pieces'][1], {
                'source': md5sum("bbbXccc"),
                'repeat': 1,
                'original': False,
                'offset': 0,
                'size': 3
            })
        self.assertEquals(
            self.wd.front.get_blob(md5sum("bbbXccc")).read(), "bbbXccc")
Example #3
0
 def addWorkdirFile(self, path, content):
     assert not os.path.isabs(path)
     filepath = os.path.join(self.workdir, path)
     md5 = md5sum(content)
     with open(filepath, "w") as f:
         f.write(content)
     return md5
Example #4
0
 def testRepeatedHit(self):
     a_blob = self.addWorkdirFile("a.txt", "aaa")
     self.wd.checkin()
     b_blob = self.addWorkdirFile("b.txt", "XXXaaaXXXaaaXXX")
     self.wd.checkin()
     x_blob = md5sum("X")
     recipe = self.repo.get_recipe(b_blob)
Example #5
0
 def _assertWorkdirEqualsTree(self, workdir, randtree):
     all_files = get_tree(os.path.abspath(workdir), skip = [".boar"], sep = "/")
     all_files.remove("manifest-md5.txt")
     self.assertEqual(set(all_files), set(map(backslashes_to_slashes, randtree.files)))
     for fn in all_files:
         path = os.path.join(workdir, fn)
         self.assertEqual(md5sum(randtree.get_file_data(fn)), md5sum_file(path))
def is_not_uploaded(filename, db, flickr, verbose=False):
    # Calculate md5 checksum.
    md5 = md5sum(filename)
    if verbose:
        print("filename was: " + filename)
        print("with md5 sum: " + md5)

    try:
        # First, look for entry in the local database.
        db_entries = db.find(md5=md5)
        if len(db_entries) == 1:
            photo_id = db_entries[0]['photo_id']
        elif len(db_entries) > 1:
            raise MultiplePhotosFound()
        else:
            # No entry found, try to find it on Flickr.
            photo = get_photo_by_checksum(flickr, md5=md5)
            photo_id = photo.attrib['id']
    except MultiplePhotosFound:
        if verbose:
            print("  ... multiple copies uploaded")
    except PhotoNotFound:
        if verbose:
            print("  ... not uploaded")
        return True
    else:
        if verbose:
            print("  ... already uploaded")
            print("  " + photo_id)
    return False
Example #7
0
 def testSplitMatch(self):
     a_blob = self.addWorkdirFile("a.txt", "aaa")
     b_blob = self.addWorkdirFile("b.txt", "bbb")
     self.wd.checkin()
     c_blob = self.addWorkdirFile("c.txt", "aaabbb")
     self.wd.checkin()
     recipe = self.repo.get_recipe(c_blob)
     #print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 2)
     self.assertEquals(
         recipe['pieces'][0], {
             'source': a_blob,
             'repeat': 1,
             'original': False,
             'offset': 0,
             'size': 3
         })
     self.assertEquals(
         recipe['pieces'][1], {
             'source': b_blob,
             'repeat': 1,
             'original': False,
             'offset': 0,
             'size': 3
         })
     rebuilt_content = self.wd.front.get_blob(c_blob).read()
     self.assertEquals(md5sum(rebuilt_content),
                       "6547436690a26a399603a7096e876a2d")
Example #8
0
 def write_md5sum(self, destination, prefix = ""):
     with open(destination, "wb") as f:
         for fn in sorted(self.files):
             f.write(md5sum(self.get_file_data(fn)))
             f.write(" *")
             f.write(os.path.join(prefix, fn.encode("utf-8")))
             f.write(os.linesep)
Example #9
0
 def testMultiplePossibleHits1(self):
     self.addWorkdirFile("a.txt", "aaabbbcccaaabbbaaabbbaaabbb")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "Xaaabbbcccaaabbbaaabbbaaabbb")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     self.assertEquals(len(recipe['pieces']), 2)
     self.assertEquals(
         recipe['pieces'][0], {
             'source': '02129bb861061d1a052c592e2dc6b383',
             'repeat': 1,
             'original': True,
             'offset': 0,
             'size': 1
         })
     self.assertEquals(
         recipe['pieces'][1], {
             'source': '00312b74e44d0712882387b8e0f0a57e',
             'repeat': 1,
             'original': False,
             'offset': 0,
             'size': 27
         })
     rebuilt_content = self.wd.front.get_blob(blob).read()
     self.assertEquals(md5sum(rebuilt_content),
                       "407badd3ba116d47c556d1366343048c")
Example #10
0
 def addWorkdirFile(self, path, content):
     assert not os.path.isabs(path)
     filepath = os.path.join(self.workdir, path)
     md5 = md5sum(content)
     with open(filepath, "w") as f:
         f.write(content)
     return md5
Example #11
0
def write_file(directory, path, content):
    assert not os.path.isabs(path)
    filepath = os.path.join(directory, path)
    md5 = md5sum(content)
    with open(filepath, "w") as f:
        f.write(content)
    return md5
Example #12
0
def write_file(directory, path, content):
    assert not os.path.isabs(path)
    filepath = os.path.join(directory, path)
    md5 = md5sum(content)
    with open(filepath, "w") as f:
        f.write(content)
    return md5
Example #13
0
 def testTailMatchFalsePositive(self):
     self.addWorkdirFile("a.txt", "aaabc")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "Xaaabb")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     rebuilt_content = self.wd.front.get_blob("acd3e6fdfcd9e03e3f941c0ed516be81").read()
     self.assertEquals(md5sum(rebuilt_content), "acd3e6fdfcd9e03e3f941c0ed516be81")
Example #14
0
 def addWorkdirFile(self, path, content):
     assert not os.path.isabs(path)
     if type(content) != bytes:
         content = str2bytes(content)
     filepath = os.path.join(self.workdir, path)
     md5 = md5sum(content)
     with open(filepath, "wb") as f:
         f.write(content)
     return md5
Example #15
0
 def testThatRepeatedHitsAreFound(self):
     self.addWorkdirFile("a.txt", "aaa")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "aaaaaa")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     self.assertEquals(len(recipe['pieces']), 1)
     rebuilt_content = self.wd.front.get_blob("0b4e7a0e5fe84ad35fb5f95b9ceeac79").read()
     self.assertEquals(md5sum(rebuilt_content), "0b4e7a0e5fe84ad35fb5f95b9ceeac79")
Example #16
0
def write_file(directory, path, content):
    assert not os.path.isabs(path)
    if type(content) != bytes:
        content = str2bytes(content)
    filepath = os.path.join(directory, path)
    md5 = md5sum(content)
    with open(filepath, "wb") as f:
        f.write(content)
    return md5
Example #17
0
 def testThatNonalignedEndingsAreDeduplicated(self):
     self.addWorkdirFile("a.txt", "aaab")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "Xaaab")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 2)
     rebuilt_content = self.wd.front.get_blob("1446f760b3a89d261a13d8b37c20ef11").read()
     self.assertEquals(md5sum(rebuilt_content), "1446f760b3a89d261a13d8b37c20ef11")
Example #18
0
    def testConcatenatedFragments(self):
        self.addWorkdirFile("a.txt", "aaa")
        self.wd.checkin()
        self.addWorkdirFile("a.txt", "aaabbbaaaXaaaccc")
        self.wd.checkin()

        # Should now exist due to concatenation of original parts in previous commit
        self.assertTrue(md5sum("bbbXccc") in self.wd.get_front().get_all_raw_blobs())

        self.addWorkdirFile("a.txt", "cccbbb")
        self.wd.checkin()
        recipe = self.repo.get_recipe(md5sum("cccbbb"))
        self.assertEquals(len(recipe['pieces']), 2)
        self.assertEquals(recipe['pieces'][0], {
                'source': md5sum("bbbXccc"),
                'repeat': 1, 'original': False, 'offset': 4, 'size': 3})
        self.assertEquals(recipe['pieces'][1], {
                'source': md5sum("bbbXccc"),
                'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
        self.assertEquals(self.wd.front.get_blob(md5sum("bbbXccc")).read(), "bbbXccc")
Example #19
0
 def testInterleavedHit2(self):
     a_blob = self.addWorkdirFile("a.txt", "aaa")
     self.wd.checkin()
     b_blob = self.addWorkdirFile("b.txt", "aaaXaaa")
     self.wd.checkin()
     x_blob = md5sum("X")
     recipe = self.repo.get_recipe(b_blob)
     #print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 3)
     self.assertEquals(recipe['pieces'][0], {
             'source': a_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     self.assertEquals(recipe['pieces'][1], {
             'source': x_blob,
             'repeat': 1, 'original': True, 'offset': 0, 'size': 1})
     self.assertEquals(recipe['pieces'][2], {
             'source': a_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     rebuilt_content = self.wd.front.get_blob(b_blob).read()
     self.assertEquals(md5sum(rebuilt_content), "78c011eeafaad0783eb1d90392e08b46")
Example #20
0
def upload_file(flickr, path):
    real_sha1 = sha1sum(path)
    real_md5 = md5sum(path)

    tags = sha1_machine_tag_prefix + real_sha1 + " " +\
           md5_machine_tag_prefix + real_md5

    result = flickr.upload(filename=path,
                           title=os.path.basename(path),
                           tags=tags,
                           async=1)
Example #21
0
 def prepare(self):
     """Check that the file exists, optionally downloads it.
     Checks that the file is indeed an SQLite3 database.
     Optionally check the MD5."""
     if not os.path.exists(self.path):
         if self.retrieve:
             print "Downloading SQLite3 database..."
             download_from_url(self.retrieve, self.path, progress=True)
         else: raise Exception("The file '" + self.path + "' does not exist.")
     self.check_format()
     if self.md5: assert self.md5 == md5sum(self.path)
     self.prepared = True
Example #22
0
def add_file_simple(front, filename, contents):
    """Adds a file with contents to a new snapshot. The front instance
    "create_session()" must have been called before this function is
    used, or an exception will be thrown."""
    content_checksum = md5sum(contents)
    if not front.has_blob(content_checksum):
        front.add_blob_data(content_checksum, base64.b64encode(contents))
    now = int(time())
    front.add({'filename': filename,
               'md5sum': content_checksum,
               'ctime': now,
               'mtime': now,
               'size': len(contents)})
Example #23
0
 def testMultiplePossibleHits2(self):
     first_blob = self.addWorkdirFile("a.txt", "aaabbbaaabbbaaabbbaaabbbccc")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "aaabbbccc")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     #print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 1)
     self.assertEquals(recipe['pieces'][0], {
             'source': first_blob,
             'repeat': 1, 'original': False, 'offset': 18, 'size': 9})
     rebuilt_content = self.wd.front.get_blob(blob).read()
     self.assertEquals(md5sum(rebuilt_content), "d1aaf4767a3c10a473407a4e47b02da6")
Example #24
0
def add_file_simple(front, filename, contents):
    """Adds a file with contents to a new snapshot. The front instance
    "create_session()" must have been called before this function is
    used, or an exception will be thrown."""
    content_checksum = md5sum(contents)
    if not front.has_blob(content_checksum):
        front.add_blob_data(content_checksum, base64.b64encode(contents))
    now = int(time())
    front.add({
        'filename': filename,
        'md5sum': content_checksum,
        'ctime': now,
        'mtime': now,
        'size': len(contents)
    })
Example #25
0
 def testMultiplePossibleHits1(self):
     self.addWorkdirFile("a.txt", "aaabbbcccaaabbbaaabbbaaabbb")
     self.wd.checkin()
     blob = self.addWorkdirFile("b.txt", "Xaaabbbcccaaabbbaaabbbaaabbb")
     self.wd.checkin()
     recipe = self.repo.get_recipe(blob)
     self.assertEquals(len(recipe['pieces']), 2)
     self.assertEquals(recipe['pieces'][0], {
             'source': '02129bb861061d1a052c592e2dc6b383',
             'repeat': 1, 'original': True, 'offset': 0, 'size': 1})
     self.assertEquals(recipe['pieces'][1], {
             'source': '00312b74e44d0712882387b8e0f0a57e',
             'repeat': 1, 'original': False, 'offset': 0, 'size': 27})
     rebuilt_content = self.wd.front.get_blob(blob).read()
     self.assertEquals(md5sum(rebuilt_content), "407badd3ba116d47c556d1366343048c")
Example #26
0
 def testSplitMatch(self):
     a_blob = self.addWorkdirFile("a.txt", "aaa")
     b_blob = self.addWorkdirFile("b.txt", "bbb")
     self.wd.checkin()
     c_blob = self.addWorkdirFile("c.txt", "aaabbb")
     self.wd.checkin()
     recipe = self.repo.get_recipe(c_blob)
     #print_recipe(recipe)
     self.assertEquals(len(recipe['pieces']), 2)
     self.assertEquals(recipe['pieces'][0], {
             'source': a_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     self.assertEquals(recipe['pieces'][1], {
             'source': b_blob,
             'repeat': 1, 'original': False, 'offset': 0, 'size': 3})
     rebuilt_content = self.wd.front.get_blob(c_blob).read()
     self.assertEquals(md5sum(rebuilt_content), "6547436690a26a399603a7096e876a2d")
Example #27
0
 def get(self, arg):
     '''
     用户下载的方法
     :param arg: 客户端post的文件指针
     :return:
     '''
     file = os.path.join(self.data_path, arg['user'],
                         arg['file'])  #拼接文件的绝对路径
     if os.path.exists(file):
         filename = os.path.basename(file)
         filesize = os.stat(file).st_size
         md5 = common.md5sum(file)
         server_response = {                  #服务端回应的文件指针内容
             'filename': filename,
             'filesize': filesize,
             'md5': md5,
         }
         self.request.send(json.dumps(server_response).encode())
         client_ack = self.request.recv(1024).decode().split(
             '|')  # 接收客户端的状态
         if client_ack[0] == 'ok':  # 客户端准备就绪
             send_size = int(client_ack[1])
             fp = open(file, 'rb')
             fp.seek(send_size)
             while send_size < filesize:
                 data = fp.read(4096)
                 self.request.send(data)
                 send_size += len(data)
             fp.close()
             msg = self.log_template % (arg['user'], arg['action'], file,
                                        'success', '文件发送至客户端')
             self.__write_log(msg, "info")
         else:
             msg = self.log_template % (arg['user'], arg['action'], file,
                                        'faild', "客户端问题")
             self.__write_log(msg, "warning")
             return None  #客户端有问题,中断方法
     else:
         msg = self.log_template % (arg['user'], arg['action'], file,
                                    'faild', "文件不存在:%s" % file)
         self.__write_log(msg, "error")
         self.request.send('404'.encode())  #服务端没有该文件
Example #28
0
def already_uploaded(flickr, path):
    digest = md5sum(path)
    throttle()
    return len(search_for_checksum(flickr, digest)) != 0
Example #29
0
def submit_pic_job(url):
    func_name = 'worker_process_pic'
    job_req = gm_client.submit_job(func_name, url, unique=md5sum(url),
                                 background=True, wait_until_complete=False)
    return job_req.job.unique
Example #30
0
    def get(self, arg):
        '''
        下载文件
        :param arg: 下载的文件和本地路径
        :return:
        '''
        point = {
            'action': arg[0],
            'file': arg[1],
            'user': self.isLogin,
        }
        self.conn.send(json.dumps(point).encode())  #发送文件指针,包含动作和下载的文件名
        server_ack = self.conn.recv(1024).decode()
        if not server_ack == '404':
            server_ack = json.loads(server_ack)
            filename = server_ack['filename']
            filesize = server_ack['filesize']
            filemd5 = server_ack['md5']
            #判断本地路径
            if os.path.exists(arg[2]):  #存在,说明不需要重命名
                if os.path.isfile(arg[2]):  #判断目标文件是否存在
                    print("%s本地文件已存在" % arg[2])
                    self.conn.send('error'.encode())  # 发送客户端状态
                    return None  # 中断下载
                else:
                    tmp_filename = "%s_%s.tmp" % (filename, filemd5)  # 临时文件名
                    tmp_file = os.path.join(arg[2], tmp_filename)  #临时文件
                    file = os.path.join(arg[2], filename)  #真实文件
            else:  #不存在,说明可能需要重命名
                upper_file_path = os.path.dirname(arg[2])  #本地目标路径的上一级目录
                if os.path.exists(upper_file_path):  #上一级目录存在,说明需要重命名
                    filename = os.path.basename(arg[2])
                    tmp_filename = "%s_%s.tmp" % (filename, filemd5)  # 临时文件名
                    tmp_file = os.path.join(upper_file_path, tmp_filename)
                    file = arg[2]  #真实文件为目标目录,最后一个为文件名
                else:  #上一级目录不存在,说明输入的路径的确不存在
                    print(print("本地路径不存在:%s" % arg[2]))
                    self.conn.send('error'.encode())  # 发送客户端状态
                    return None  #中断下载

            #判断断点续传
            if os.path.exists(tmp_file):  #临时文件存在
                while True:
                    res = input("本地文件%s已存在,是否断点续传?[y/n]" % file)
                    if res == 'y':
                        start_size = os.stat(tmp_file).st_size
                        open_file_type = 'ab'
                        break
                    elif res == 'n':
                        start_size = 0
                        open_file_type = 'wb'
                        break
                    else:
                        print("请输入y和n")
            else:
                start_size = 0
                open_file_type = 'wb'
                #发送结果
            self.conn.send(bytes("ok|%s" % start_size, encoding='utf-8'))

            fw = open(tmp_file, open_file_type)
            fw.seek(start_size)
            recv_size = start_size
            while recv_size < filesize:
                data = self.conn.recv(4096)
                fw.write(data)
                recv_size += len(data)
                common.view_bar(recv_size, filesize)
            fw.close()
            newmd5 = common.md5sum(tmp_file)  # md5校验
            if newmd5 == filemd5:
                shutil.move(tmp_file, file)
                print("文件下载成功")
            else:
                print("文件下载失败,md5不一致,服务端md5:%s" % filemd5)
                res = input("是否删除临时文件? [y/n] ")
                if res == 'y':
                    os.remove(tmp_file)
        else:
            common.code_parsing(server_ack)
Example #31
0
install_dir = os.path.abspath('appdir')
c.recreate_dir(install_dir)
c.run('make INSTALL_ROOT={0} DESTDIR={0} install'.format(install_dir))

if c.is_inside_docker():
    c.run('{}  --appimage-extract'.format(linuxdeployqt_bin))
    linuxdeployqt_bin = os.path.abspath('squashfs-root/AppRun')

os.environ['LD_LIBRARY_PATH'] = dependencies_dir + '/lib'
os.environ['VERSION'] = app_version
# debug flags: -unsupported-bundle-everything -unsupported-allow-new-glibc
flags = '' if os.getenv("DEBUG") is None else '-unsupported-allow-new-glibc'

additional_files = glob(ssl_dir + '/lib/lib*.so.*') + \
    glob('/usr/lib/x86_64-linux-gnu/nss/*')
out_lib_dir = install_dir + '/usr/lib'
os.makedirs(out_lib_dir, exist_ok=True)
for f in additional_files:
    c.print('>> Copying {} to {}'.format(f, out_lib_dir))
    shutil.copy(f, out_lib_dir)

c.run(
    '{} {}/usr/share/applications/*.desktop {} -appimage -qmake={}/bin/qmake'.
    format(linuxdeployqt_bin, install_dir, flags, qt_dir))

c.run('mv {}-{}*.AppImage "{}"'.format(app_name, app_version, artifact_path))

bin_path = install_dir + '/usr/bin/' + bin_name
c.print('>> Md5 {} {}'.format(bin_path, c.md5sum(bin_path)))
Example #32
0
    def put(self, arg):
        '''
        上传文件
        :param arg: 要上传的文件
        :return:
        '''

        local_file = arg[1]  #要上传的文件(绝对路径)
        file_target_path = arg[2]  #上传到服务端的路径,默认为当前目录
        if os.path.exists(local_file):  #判断文件是否存在
            filename = os.path.basename(local_file)
            filesize = os.stat(local_file).st_size
            md5 = common.md5sum(local_file)
            point = {
                'action': arg[0],
                'filename': filename,
                'filesize': filesize,
                'md5': md5,
                'target_path': file_target_path,
                'user': self.isLogin,
            }

            self.conn.send(
                bytes(json.dumps(point),
                      encoding="utf-8"))  #发送文件指针信息,包括动作,文件名,目标路径,文件大小,md5等
            server_ack = self.conn.recv(1024).decode()  #接收服务器状态
            code = server_ack.split('|')[0]
            if code == '211':  #选择是否断点续传
                common.code_parsing(server_ack)
                while True:
                    res = input('请选择:')
                    if res in ('y', 'n'):
                        self.conn.send(res.encode())
                        server_ack = self.conn.recv(1024).decode()  # 接收服务器状态
                        if server_ack.split('|')[0] == "200":  # 服务器已经准备好接收数据
                            start_send_size = int(server_ack.split('|')[1])
                            break
                        else:
                            common.code_parsing(server_ack)
                            return None  #服务端异常,中断上传
                    else:
                        print("输入错误,请输入y和n")
            elif code == "200":  #直接上传
                start_send_size = 0
            else:
                common.code_parsing(server_ack)
                return None  # 服务端异常,中断上传
        else:
            print("%s 不存在!!" % local_file)
            return None  # 客户端异常,中断上传

        #开始上传
        fp = open(local_file, 'rb')
        fp.seek(start_send_size)
        while start_send_size < filesize:
            data = fp.read(4096)
            self.conn.send(data)
            start_send_size += len(data)
            common.view_bar(start_send_size, filesize)
        fp.close()
        result = self.conn.recv(1024).decode()
        common.code_parsing(result)
Example #33
0
    def put(self, arg):
        '''
        用户上传方法
        :param arg: 客户端post的文件指针
        :return:
        '''
        filename = arg['filename']
        filesize = arg['filesize']
        filemd5 = arg['md5']
        file_target_path = arg['target_path']  #要上传的目标路径
        user = arg['user']

        #用户空间配额判断
        if 'quota_size' in self.configinfo['userinfo'][
                user]:  #如果用户有自定义quota,自定义生效
            quota_size = self.configinfo['userinfo'][user]['quota_size']
        else:  #否则使用默认配置
            quota_size = self.quota_size

        available_space = int(
            quota_size) * 1024 * 1024 - self.__get_total_size(user)  #可用空间
        if available_space > filesize:

            # 开始判断客户端是否要求重命名
            abs_file_target_path = os.path.join(self.data_path, user,
                                                file_target_path)  #目标地址的绝对路径
            if os.path.exists(
                    abs_file_target_path):  #绝对路径存在,说明客户端提供的是目标路径,不需要对文件重命名
                if os.path.isfile(
                        abs_file_target_path):  #目标路径是一个文件,这个文件存在,说明上传的文件冲突
                    self.request.send(
                        bytes("208|:%s" % file_target_path,
                              encoding='utf-8'))  # 文件存在,并发送传送指针
                    return None  # 中断运行
                else:
                    tmp_filename = "%s_%s.tmp" % (filename, filemd5)
                    tmp_file = os.path.join(abs_file_target_path,
                                            tmp_filename)  #拼接临时文件的绝对路径
                    file = os.path.join(abs_file_target_path,
                                        filename)  #实际的文件绝对路径

            else:  #目标地址不存在,客户端可能需要重命名

                #判断目标路径的上一级目录的绝对路径是否存在
                upper_abs_target_path = os.path.dirname(
                    abs_file_target_path)  #上一级目录的绝对路径
                if os.path.exists(upper_abs_target_path):  #存在,说明最后一个目录名为新文件名
                    tmp_filename = "%s_%s.tmp" % (
                        os.path.basename(abs_file_target_path), filemd5
                    )  #拼接临时文件名
                    tmp_file = os.path.join(upper_abs_target_path,
                                            tmp_filename)  #拼接临时文件的绝对路径
                    file = os.path.join(
                        upper_abs_target_path,
                        os.path.basename(abs_file_target_path))  #实际文件绝对路径
                else:  #不存在,说明,客户端提供的路径不存在
                    msg = self.log_template % (
                        arg['user'], arg['action'], file, "faild",
                        "路径不存在:%s" % os.path.dirname(file_target_path))
                    self.__write_log(msg, "warning")
                    self.request.send(
                        bytes("204|:%s" % os.path.dirname(file_target_path),
                              encoding='utf-8'))
                    return None  #中断put方法

            #判断目标文件是否存在
            if os.path.exists(file):  #存在
                msg = self.log_template % (arg['user'], arg['action'], file,
                                           "faild",
                                           "文件已存在:%s" % os.path.basename(file))
                self.__write_log(msg, "warning")
                self.request.send(
                    bytes("208|:%s" % os.path.basename(file),
                          encoding='utf-8'))  # 文件存在,并发送传送指针
                return None  # 中断运行
            else:  #不存在
                #是否断点续传判断
                if os.path.exists(tmp_file):  #临时文件存在,让客户端判断是否断点续传
                    self.request.send(
                        bytes('211|%s文件已存在' % filename, encoding='utf-8'))
                    client_ack = self.request.recv(1024).decode()
                    if client_ack == 'y':  #客户端要求断点续传
                        file_open_type = 'ab'  # 文件打开方式改为追加a
                        seek = int(os.stat(tmp_file).st_size)  #取出了临时文件大小
                        self.request.send(
                            bytes("200|%s" % seek,
                                  encoding='utf-8'))  # 服务端ok,并发送存在的文件大小
                    elif client_ack == 'n':  #客户端要求重新上传
                        seek = 0
                        file_open_type = 'wb'  # 文件打开方式
                        self.request.send(
                            bytes("200|%s" % seek,
                                  encoding='utf-8'))  # 服务端ok,并发送传送指针
                else:  #临时文件不存在
                    seek = 0
                    file_open_type = 'wb'  # 文件打开方式
                    self.request.send(bytes("200|%s" % seek,
                                            encoding='utf-8'))  # 服务端ok,并发送传送指针

            #开始接收数据
            recv_size = seek
            t_start = time.time()
            fw = open(tmp_file, file_open_type)
            while int(recv_size) < filesize:
                data = self.request.recv(4096)
                if not data: break  #最后收到空消息,表示客户端断开连接
                fw.write(data)
                recv_size += len(data)
            fw.close()
            t_cost = time.time() - t_start
            newmd5 = common.md5sum(tmp_file)  #md5校验
            if newmd5 == filemd5:
                msg = self.log_template % (arg['user'], arg['action'], file,
                                           "success", '上传成功')
                self.__write_log(msg, "info")
                shutil.move(tmp_file, file)  #将临时文件重命名为实际文件
                self.request.send(bytes("201|耗时:%s" % t_cost,
                                        encoding='utf-8'))  #201表示上传成功
            else:
                msg = self.log_template % (arg['user'], arg['action'], file,
                                           'faild', 'md5校验不一致')
                self.__write_log(msg, "error")
                self.request.send(
                    bytes("202|md5不一致!服务端md5:%s" % newmd5,
                          encoding='utf-8'))  #202表示上传失败

        else:
            msg = self.log_template % (arg['user'], arg['action'], file,
                                       'faild', "超过空间配额")
            self.__write_log(msg, "error")
            self.request.send(
                bytes("209|可用空间:%sM" % str(available_space / 1024 / 1024),
                      encoding='utf-8'))  # 用户空间不足