コード例 #1
0
    def downloadFile(self, savingPath, fileHandle):
        fileId = fileHandle[:64]
        fileKey = fileHandle[64:]
        key = bytearray.fromhex(fileKey)

        payloadJson = json.dumps({"fileID": fileId})
        with requests.Session() as s:
            response = s.post(self._baseUrl + "download", data=payloadJson)

        url = response.content.decode()
        url = json.loads(url)["fileDownloadUrl"]

        # Get file metadata
        with requests.Session() as s:
            response = s.get(url + "/metadata")

        encryptedMetaData = response.content

        # Decrypt file metadata
        decryptedMetaData = AesGcm256.decrypt(encryptedMetaData, key)
        metaData = json.loads(decryptedMetaData)

        uploadSize = Helper.GetUploadSize(metaData["size"])
        partSize = 5245440  # 80 * (Constants.DEFAULT_BLOCK_SIZE + Constants.BLOCK_OVERHEAD)
        parts = int(uploadSize / partSize) + 1

        fileName = metaData["name"].split(".")[0]
        fileName = fileName.rstrip()
        #folderPath = os.path.normpath(savingPath + "/tmp/" + fileName)
        folderPath = os.path.join(savingPath, "tmp", fileName)
        os.makedirs(folderPath, exist_ok=True)
        '''
            Downloading all parts
        '''
        fileUrl = url + "/file"

        print("Downloading file: {}".format(fileName))
        # start_time = time.time()
        Parallel(n_jobs=5)(delayed(self.downloadPart)(
            partNumber, parts, partSize, uploadSize, fileUrl, folderPath)
                           for partNumber in range(parts))
        # print("--- %s seconds with parallel n = 5---" % (time.time() - start_time))
        '''
        start_time = time.time()
        for partNumber in range(parts):
            byteFrom = partNumber * partSize
            byteTo = (partNumber + 1) * partSize - 1
            if (byteTo > uploadSize - 1):
                byteTo = uploadSize - 1

            fileBytes = None
            with requests.Session() as s:
                temp = "bytes={}-{}".format(byteFrom, byteTo)
                s.headers.update({"range": temp})
                response = s.get(url=url)

                fileBytes = response.content

            fileToWriteTo = folderPath + "\\" + str(partNumber) + ".part"

            with open(fileToWriteTo, 'wb') as file:
                file.write(fileBytes)
        print("--- %s seconds --- with single " % (time.time() - start_time))
        '''
        '''
            Decrypt the chunks and restore the file
        '''
        print("Joining all parts together")
        chunkSize = metaData["p"]["blockSize"] + Constants.BLOCK_OVERHEAD
        chunksAmount = int(uploadSize / chunkSize) + 1

        #path = os.path.normpath(savingPath + "\\" + metaData["name"])
        path = os.path.join(savingPath, fileName)

        if os.path.exists(path=path):
            os.remove(path=path)

        with open(path, 'ab+') as saveFile:
            fileIndex = 0
            seek = 0
            for chunkIndex in range(chunksAmount):
                chunkRawBytes = None
                with open(os.path.join(folderPath,
                                       str(fileIndex) + ".part"),
                          'rb') as partFile:
                    partFile.seek(seek)
                    toReadBytes = chunkSize
                    if seek + toReadBytes >= os.path.getsize(partFile.name):
                        toReadBytes = os.path.getsize(partFile.name) - seek

                        # if the bytes to read exceed the file in the next iteration of the for loop
                        # you need to go to the next partFile -> seek from start
                        seek = 0
                        fileIndex = fileIndex + 1
                    else:
                        seek = seek + chunkSize

                    chunkRawBytes = partFile.read(toReadBytes)

                decryptedChunk = AesGcm256.decrypt(chunkRawBytes, key)
                saveFile.write(decryptedChunk)

        shutil.rmtree(folderPath)
        tempFolderPath = os.path.dirname(folderPath)
        if len(os.listdir(tempFolderPath)) == 0:
            shutil.rmtree(tempFolderPath)

        print("Finished download of {}".format(fileName))
コード例 #2
0
    def uploadFile(self, filePath, folder) -> bool:

        fd = dict()
        fd["fullName"] = os.path.normpath(filePath)
        fd["name"] = os.path.basename(filePath)
        if os.path.getsize(filePath) == 0:
            print(
                f"Couldn't upload: {fd['fullName']}\nBecause the filesize is equal to 0."
            )
            return False
        else:
            fd["size"] = os.path.getsize(filePath)
        fd["type"] = mimetypes.guess_type(filePath)[0]
        # fd["type"] = "application/octet-stream"
        '''
            Check first if the file exists already in the metadata
            -> If yes skip all of this
        '''
        metadataToCheckIn = self.getFolderData(folder=folder)
        for file in metadataToCheckIn["metadata"].files:
            if file.name == fd["name"]:
                print("File: {} already exists".format(fd["name"]))
                return
        else:
            print("Uploading file: {}".format(fd["name"]))

        metaData = FileMetaData(fd)
        uploadSize = Helper.GetUploadSize(fd["size"])
        endIndex = Helper.GetEndIndex(uploadSize, metaData.p)

        handle = Helper.GenerateFileKeys()
        hashBytes = handle[0:32]
        keyBytes = handle[32:]

        metaDataJson = Helper.GetJson(metaData.getDict())

        encryptedMetaData = AesGcm256.encryptString(metaDataJson, keyBytes)

        handleHex = handle.hex()
        fileId = hashBytes.hex()

        requestBody = dict()
        requestBody["fileHandle"] = fileId
        requestBody["fileSizeInByte"] = uploadSize
        requestBody["endIndex"] = endIndex

        requestBodyJson = Helper.GetJson(requestBody)
        payload = self.SignPayloadForm(requestBodyJson,
                                       {"metadata": encryptedMetaData})
        with requests.Session() as s:
            response = s.post(self._baseUrl + "init-upload", files=payload)

        if response.status_code != 200:
            raise Exception("Error during init-upload\n{}".format(
                response.content.decode()))
        '''
            Uploading Parts
        '''

        # start_time = time.time()
        Parallel(n_jobs=8)(
            delayed(self.uploadPart)(fd, metaData, handle, index, endIndex)
            for index in range(endIndex))
        # print("--- %s seconds ---" % (time.time() - start_time))

        # for index in range(endIndex):
        #    #start_time = time.time()
        #    print("Uploading file %s part %d/%d" % (fd["name"], index, endIndex))
        #    self.uploadPart(fd, metaData, handle, index, endIndex)
        #    #print("--- %s seconds ---" % (time.time() - start_time))
        '''
            Verify Upload & Retry missing parts
        '''
        requestBody = dict()
        requestBody["fileHandle"] = fileId
        requestBodyJson = Helper.GetJson(requestBody)
        payload = self.signPayloadDict(requestBodyJson)
        payloadJson = Helper.GetJson(payload)

        with requests.Session() as s:
            response = s.post(self._baseUrl + "upload-status",
                              data=payloadJson)

        retries = 3
        content = json.loads(response.content.decode())
        if content["status"] != 'File is uploaded':
            if content["status"] == 'chunks missing':
                missing_parts = content["missingIndexes"]
                while len(missing_parts) > 0 and retries > 0:
                    amount = content["endIndex"]
                    for missingPart in missing_parts:
                        print("Trying to re-upload part {} out of {}".format(
                            missingPart, amount))
                        self.uploadPart(fd, metaData, handle, missingPart - 1,
                                        endIndex)
                    with requests.Session() as s:
                        response = s.post(self._baseUrl + "upload-status",
                                          data=payloadJson)
                        retries -= 1
                    content = json.loads(response.content.decode())
                    if content["status"] == "File is uploaded":
                        break
                    else:
                        if retries == 0:
                            print(
                                f"Failed to upload the {fd['name']}\nReason: Too many retries"
                            )
                            return
                        missing_parts = content["missingIndexes"]
            else:
                raise AssertionError("Unknown status of upload-status")
        '''
            Add file to the metadata
        '''

        fileInfo = FolderMetaFile()
        fileInfo.name = fd["name"]
        fileInfo.created = int(os.path.getctime(fd["fullName"]) * 1000)
        fileInfo.modified = int(os.path.getmtime(fd["fullName"]) * 1000)
        # fileInfo.created = Helper.GetUnixMilliseconds()
        # fileInfo.modified = Helper.GetUnixMilliseconds()
        # fileInfo.type = "file"
        fileInfo.versions.append(
            FolderMetaFileVersion(
                size=fd["size"],
                handle=handleHex,
                modified=fileInfo.modified,
                created=fileInfo.created,
                # modified=Helper.GetUnixMilliseconds(),
                # created=Helper.GetUnixMilliseconds()
                # modified=int(os.path.getmtime(filePath)),
                # created=int(os.path.getctime(filePath))
            ))
        try:
            self.AddFileToFolderMetaData(folder, fileInfo, isFile=True)
            print("Uploaded file: {}".format(fd["name"]))
        except Exception as e:
            print(
                "Failed to attach the file to the folder\nFilehandle: {}\nFolder: {}\nReason: {}"
                .format(handleHex, folder, e))