async def _internal_transfer_to_telegram( client: TelegramClient, response: BinaryIO, file_size, file_name, progress_callback: callable, max_connection=None) -> Tuple[TypeInputFile, int]: file_id = helpers.generate_random_long() # file_size = os.path.getsize(response.name) hash_md5 = hashlib.md5() uploader = ParallelTransferrer(client) part_size, part_count, is_large = await uploader.init_upload( file_id, file_size, max_connection=max_connection) buffer = bytearray() part_index = 0 async for data in stream_file(response, chunk_size=part_size): part_index += 1 if len(data) == 0: part_index -= 1 break # if len(data) != part_size and part_index != part_count: # dat = b'\0' * (part_size - len(data)) # data += dat if not is_large: hash_md5.update(data) if len(buffer) == 0: await uploader.upload(data) if part_index >= part_count: break else: continue new_len = len(buffer) + len(data) if new_len >= part_size: cutoff = part_size - len(buffer) buffer.extend(data[:cutoff]) await uploader.upload(bytes(buffer)) buffer.clear() buffer.extend(data[cutoff:]) else: buffer.extend(data) if part_index >= part_count: break else: continue for u in uploader.senders: u.request.file_total_parts = part_index u.part_count = part_index part_count = part_index if len(buffer) > 0: await uploader.upload(bytes(buffer)) await uploader.finish_upload() if is_large: return InputFileBig(file_id, part_count, file_name), file_size else: return InputFile(file_id, part_count, file_name, hash_md5.hexdigest()), file_size
def _upload_file(tc, filepath): """ Upload a file to Telegram cloud. Stolen from telethon.TelegramClient.upload_file(). Specialised for upload sticker images. :param tc: A TelegramClient :param filepath: A path-like object :return: An InputFile handle. """ from telethon.tl.types import InputFile from telethon.tl.functions.upload import SaveFilePartRequest file = Path(filepath) file_id = _get_random_id() file_name = file.name part_size_kb = 32 * 1024 # just hardcode it, every file is under 350KB anyways part_count = (file.stat().st_size + part_size_kb - 1) // part_size_kb file_hash = md5() with open(file, mode='rb') as f: for part_index in range(part_count): part = f.read(part_size_kb) tc.invoke(SaveFilePartRequest(file_id, part_index, part)) file_hash.update(part) return InputFile(id=file_id, parts=part_count, name=file_name, md5_checksum=file_hash.hexdigest())
async def _internal_transfer_to_telegram( client: MautrixTelegramClient, response: ClientResponse) -> tuple[TypeInputFile, int]: file_id = helpers.generate_random_long() file_size = response.content_length hash_md5 = hashlib.md5() uploader = ParallelTransferrer(client) part_size, part_count, is_large = await uploader.init_upload( file_id, file_size) buffer = bytearray() async for data in response.content: if not is_large: hash_md5.update(data) if len(buffer) == 0 and len(data) == part_size: await uploader.upload(data) continue new_len = len(buffer) + len(data) if new_len >= part_size: cutoff = part_size - len(buffer) buffer.extend(data[:cutoff]) await uploader.upload(bytes(buffer)) buffer.clear() buffer.extend(data[cutoff:]) else: buffer.extend(data) if len(buffer) > 0: await uploader.upload(bytes(buffer)) await uploader.finish_upload() if is_large: return InputFileBig(file_id, part_count, "upload"), file_size else: return InputFile(file_id, part_count, "upload", hash_md5.hexdigest()), file_size
async def _internal_transfer_to_telegram(client: TelegramClient, response: Union[BinaryIO, BytesIO, BufferedReader], progress_callback: callable, name: str = None ) -> Tuple[TypeInputFile, int]: file_id = helpers.generate_random_long() if isinstance(response, BytesIO): file_size = len(response.getvalue()) else: file_size = os.path.getsize(name or response.name) hash_md5 = hashlib.md5() uploader = ParallelTransferrer(client) part_size, part_count, is_large = await uploader.init_upload(file_id, file_size) buffer = bytearray() for data in stream_file(response): if progress_callback: r = progress_callback(response.tell(), file_size) if inspect.isawaitable(r): await r if not is_large: hash_md5.update(data) if len(buffer) == 0 and len(data) == part_size: await uploader.upload(data) continue new_len = len(buffer) + len(data) if new_len >= part_size: cutoff = part_size - len(buffer) buffer.extend(data[:cutoff]) await uploader.upload(bytes(buffer)) buffer.clear() buffer.extend(data[cutoff:]) else: buffer.extend(data) if len(buffer) > 0: await uploader.upload(bytes(buffer)) await uploader.finish_upload() if is_large: return InputFileBig(file_id, part_count, name or "upload"), file_size return InputFile(file_id, part_count, name or "upload", hash_md5.hexdigest()), file_size
def upload_file(self, file_path, part_size_kb=None, file_name=None, progress_callback=None): """Uploads the specified file_path and returns a handle which can be later used :param file_path: The file path of the file that will be uploaded :param part_size_kb: The part size when uploading the file. None = Automatic :param file_name: The name of the uploaded file. None = Automatic :param progress_callback: A callback function which takes two parameters, uploaded size (in bytes) and total file size (in bytes) This is called every time a part is uploaded """ file_size = path.getsize(file_path) if not part_size_kb: part_size_kb = self.find_appropiate_part_size(file_size) if part_size_kb > 512: raise ValueError('The part size must be less or equal to 512KB') part_size = int(part_size_kb * 1024) if part_size % 1024 != 0: raise ValueError('The part size must be evenly divisible by 1024') # Determine whether the file is too big (over 10MB) or not # Telegram does make a distinction between smaller or larger files is_large = file_size > 10 * 1024 * 1024 part_count = (file_size + part_size - 1) // part_size # Multiply the datetime timestamp by 10^6 to get the ticks # This is high likely going to be unique file_id = int(datetime.now().timestamp() * (10 ** 6)) hash_md5 = md5() with open(file_path, 'rb') as file: for part_index in range(part_count): # Read the file by in chunks of size part_size part = file.read(part_size) # The SavePartRequest is different depending on whether # the file is too large or not (over or less than 10MB) if is_large: request = SaveBigFilePartRequest(file_id, part_index, part_count, part) else: request = SaveFilePartRequest(file_id, part_index, part) # Invoke the file upload and increment both the part index and MD5 checksum result = self.invoke(request) if result: hash_md5.update(part) if progress_callback: progress_callback(file.tell(), file_size) else: raise ValueError('Could not upload file part #{}'.format(part_index)) # Set a default file name if None was specified if not file_name: file_name = path.basename(file_path) # After the file has been uploaded, we can return a handle pointing to it return InputFile(id=file_id, parts=part_count, name=file_name, md5_checksum=hash_md5.hexdigest())