Пример #1
0
    def get_folder_key(self, folder_id):
        response = self.make_request(
            "GET", f"https://drive.google.com/open?id={folder_id}")

        start = response.text.index("__initData = ") + len("__initData = ")
        end = response.text.index(";", start)
        json_data = json_deserialize(response.text[start:end])
        return json_data[0][9][32][35]  # :nospies:
Пример #2
0
 def _apicall(
     self,
     request,
     maximum_backoff=32
 ):
     sleep_exponent_count = 0
     while True:
         success = True
         retry = False
         try:
             return request.execute()
         except HttpError as error:
             success = False
             try:
                 error_details = json_deserialize(
                     error.content.decode("utf-8"),
                 )["error"]
                 if "errors" in error_details:
                     if error_details["errors"][0]["reason"] in (
                         "dailyLimitExceeded",
                         "userRateLimitExceeded",
                         "rateLimitExceeded",
                         "backendError",
                         "sharingRateLimitExceeded",
                         "failedPrecondition",
                         "internalError",
                         "domainPolicy",
                         "insufficientFilePermissions",
                         "appNotAuthorizedToFile"
                     ):  # IF REQUEST IS RETRYABLE
                         retry = True
                 else:
                     raise error
             except JSONDecodeError:
                 retry = True
         except (TransportError, SocketTimeoutError, SocketTimeoutError):
             success = False
             retry = True
         if success:
             break
         if retry:
             sleep_time = 2 ^ sleep_exponent_count
             if sleep_time < maximum_backoff:
                 sleep(sleep_time)
                 sleep_exponent_count += 1
                 continue
             else:
                 raise Exception("Maximum Backoff Limit Exceeded.")
         else:
             raise Exception("Unretryable Error")
Пример #3
0
    def get_files_in_folder_id(
        self,
        folder_id
    ):
        pbar = tqdm(desc="Files scanned", unit="file", unit_scale=True)
        files = {}
        page_token = None

        # LIMITS TO 100 PAGES MAXIMUM, SHOULD CHANGE THIS LATER
        for _ in range(100):
            url = "https://clients6.google.com/drive/v2beta/files?" + \
                "openDrive=false&reason=102&syncType=0&errorRecovery=false" + \
                f"&q=trashed%20%3D%20false%20and%20%27{folder_id}%27%20in" + \
                "%20parents&fields=kind%2CnextPageToken%2Citems(kind" + \
                "%2CfileSize%2Ctitle%2Cid)%2CincompleteSearch&" + \
                "appDataFilter=NO_APP_DATA&spaces=drive&maxResults=500&" + \
                "orderBy=folder%2Ctitle_natural%20asc&" + \
                f"key={self.get_folder_key(folder_id)}"

            if page_token is not None:
                url = f"{url}&pageToken={page_token}"

            ls_response = self.make_request(
                "GET",
                url,
                referer=f"https://drive.google.com/open?id={folder_id}"
            )
            ls_json = json_deserialize(ls_response.text)
            pbar.update(len(ls_json["items"]))

            for drive_file in ls_json["items"]:
                if drive_file["kind"] != "drive#file" and "fileSize" not in \
                        drive_file:
                    continue

                files.update({
                    drive_file["id"]: {
                        "name": drive_file["title"],
                        "size": int(drive_file["fileSize"])
                    }
                })

            if "nextPageToken" not in ls_json:
                break

            page_token = ls_json["nextPageToken"]

        pbar.close()
        return files
Пример #4
0
def read_index(index_path: Path, rsa_priv_key_path: Path = None) -> dict:
    if index_path is None or not index_path.is_file():
        raise RuntimeError(
            f"Unable to read non-existant index file \"{index_path}\"")

    encryption_flag = None
    compression_flag = None
    session_key = None
    data_size = None
    to_read_buffer = None

    with open(index_path, "rb") as index_stream:
        magic = str(index_stream.read(7))

        if magic != "TINFOIL":
            raise RuntimeError(
                "Invalid tinfoil index magic.\n\nExpected Magic = " +
                f"\"TINFOIL\"\nMagic in index file = \"{magic}\"")

        flags = index_stream.read(1)[0]
        encryption_flag = flags & 0xF0

        key_available = rsa_priv_key_path is not None and \
            rsa_priv_key_path.is_file()

        if encryption_flag == EncryptionFlag.ENCRYPT and not key_available:
            raise RuntimeError(
                "Unable to decrypt encrypted index without private key.")

        compression_flag = flags & 0x0F

        if compression_flag not in CompressionFlag:
            raise RuntimeError(
                "Unimplemented compression method encountered while reading " +
                "index header.")

        session_key = index_stream.read(0x100)
        data_size = int.from_bytes(index_stream.read(8), byteorder="little")
        to_read_buffer = index_stream.read()

    if encryption_flag == EncryptionFlag.ENCRYPT:
        rsa_priv_key = import_rsa_key(open(rsa_priv_key_path).read())
        pkcs1_oaep_ctx = new_pkcs1_oaep_ctx(rsa_priv_key,
                                            hashAlgo=SHA256,
                                            label=b"")
        aes_key = pkcs1_oaep_ctx.decrypt(session_key)
        aes_ctx = new_aes_ctx(aes_key, MODE_ECB)
        to_read_buffer = aes_ctx.decrypt(to_read_buffer)

    if compression_flag == CompressionFlag.ZSTD_COMPRESSION:
        to_read_buffer = ZstdDecompressor().decompress(
            to_read_buffer[:data_size])

    elif compression_flag == CompressionFlag.ZLIB_COMPRESSION:
        to_read_buffer = zlib_decompress(to_read_buffer[:data_size])

    elif compression_flag == CompressionFlag.NO_COMPRESSION:
        to_read_buffer = to_read_buffer[:data_size]

    try:
        return json_deserialize(to_read_buffer)

    except JSONDecodeError:
        raise RuntimeError("Unable to deserialize index data.")