async def progress(current, total, event, start, type_of_ps, file_name=None): """Generic progress_callback for uploads and downloads.""" now = time.time() diff = now - start out_folder = TEMP_DOWNLOAD_DIRECTORY + "youtubedl/" thumb_image_path = TEMP_DOWNLOAD_DIRECTORY + "youtubedl/thumb_image.jpg" if not os.path.isdir(out_folder): os.makedirs(out_folder) if round(diff % 10.00) == 0 or current == total: percentage = current * 100 / total speed = current / diff elapsed_time = round(diff) * 1000 time_to_completion = round((total - current) / speed) * 1000 estimated_total_time = elapsed_time + time_to_completion progress_str = "{0}{1} {2}%\n".format( ''.join(["█" for i in range(math.floor(percentage / 10))]), ''.join(["░" for i in range(10 - math.floor(percentage / 10))]), round(percentage, 2)) tmp = progress_str + \ "{0} of {1}\nETA: {2}".format( humanbytes(current), humanbytes(total), time_formatter(estimated_total_time) ) if file_name: await event.edit("{}\nFile Name: `{}`\n{}".format( type_of_ps, file_name, tmp)) else: await event.edit("{}\n{}".format(type_of_ps, tmp))
async def progress(current, total, event, start, type_of_ps, file_name=None): """Generic progress_callback for uploads and downloads.""" now = time.time() diff = now - start if round(diff % 10.00) == 0 or current == total: percentage = current * 100 / total speed = current / diff elapsed_time = round(diff) * 1000 time_to_completion = round((total - current) / speed) * 1000 estimated_total_time = elapsed_time + time_to_completion progress_str = "[{0}{1}] {2}%\n".format( ''.join(["▰" for i in range(math.floor(percentage / 10))]), ''.join(["▱" for i in range(10 - math.floor(percentage / 10))]), round(percentage, 2)) tmp = progress_str + \ "{0} of {1}\nETA: {2}".format( humanbytes(current), humanbytes(total), time_formatter(estimated_total_time) ) if file_name: await event.edit("{}\nFile Name: `{}`\n{}".format( type_of_ps, file_name, tmp)) else: await event.edit("{}\n{}".format(type_of_ps, tmp))
def _get(self, file_id: str) -> str: drive_file = self._service.files().get( fileId=file_id, fields='*', supportsTeamDrives=True).execute() drive_file['size'] = humanbytes(int(drive_file.get('size', 0))) drive_file['quotaBytesUsed'] = humanbytes( int(drive_file.get('quotaBytesUsed', 0))) drive_file = dumps(drive_file, sort_keys=True, indent=4) _LOG.info("Getting Google-Drive File Details => %s", drive_file) return drive_file
async def combine_(message: Message) -> None: """combine""" file_path = message.input_str if not file_path: await message.err("missing file path!") return if not isfile(file_path): await message.err("file path not exists!") return _, ext = splitext(basename(file_path)) if not ext.lstrip('.').isdigit(): await message.err("unsupported file!") return await message.edit("`processing...`") start_t = datetime.now() c_obj = SCLib(file_path) c_obj.combine() tmp = \ "__Combining file path...__\n" + \ "```{}({}%)```\n" + \ "× **File Path** : `{}`\n" + \ "× **Dest** : `{}`\n" + \ "× **Completed** : `{}`\n" + \ "× **Total** : `{}`\n" + \ "× **Speed** : `{}/s`\n" + \ "× **ETA** : `{}`\n" + \ "× **Completed Files** : `{}/{}`" count = 0 while not c_obj.finished: if message.process_is_canceled: c_obj.cancel() count += 1 if count >= 5: count = 0 await message.try_to_edit( tmp.format(c_obj.progress, c_obj.percentage, file_path, c_obj.final_file_path, humanbytes(c_obj.completed), humanbytes(c_obj.total), humanbytes(c_obj.speed), c_obj.eta, c_obj.completed_files, c_obj.total_files)) await sleep(1) if c_obj.output: await message.err(c_obj.output) else: end_t = datetime.now() m_s = (end_t - start_t).seconds await message.edit( f"**combined** `{file_path}` into `{c_obj.final_file_path}` " f"in `{m_s}` seconds.", log=True)
def _copy(self, file_id: str) -> None: try: drive_file = self._service.files().get( fileId=file_id, fields="id, name, mimeType", supportsTeamDrives=True).execute() if drive_file['mimeType'] == G_DRIVE_DIR_MIME_TYPE: dir_id = self._create_drive_dir(drive_file['name'], self._parent_id) self._copy_dir(file_id, dir_id) ret_id = dir_id else: ret_id = self._copy_file(file_id, self._parent_id) drive_file = self._service.files().get( fileId=ret_id, fields="id, name, mimeType, size", supportsTeamDrives=True).execute() mime_type = drive_file['mimeType'] file_name = drive_file['name'] file_id = drive_file['id'] if mime_type == G_DRIVE_DIR_MIME_TYPE: self._output = G_DRIVE_FOLDER_LINK.format(file_id, file_name) else: file_size = humanbytes(int(drive_file.get('size', 0))) self._output = G_DRIVE_FILE_LINK.format( file_id, file_name, file_size) except HttpError as h_e: _LOG.exception(h_e) self._output = h_e except ProcessCanceled: self._output = "`Process Canceled!`" finally: self._finish()
def mega_dl(url: str) -> str: """MEGA.nz direct links generator Using https://github.com/tonikelope/megadown""" reply = '' try: link = re.findall(r'\bhttps?://.*mega.*\.nz\S+', url)[0] except IndexError: reply = "`No MEGA.nz links found`\n" return reply command = f'bin/megadown -q -m {link}' result = popen(command).read() try: data = json.loads(result) except json.JSONDecodeError: reply += "`Error: Can't extract the link`\n" return reply dl_url = data['url'] name = data['file_name'] size = humanbytes(int(data['file_size'])) reply += f'[{name} ({size})]({dl_url})\n' return reply
def cm_ru(url: str) -> str: """cloud.mail.ru direct links generator Using https://github.com/JrMasterModelBuilder/cmrudl.py""" reply = '' try: link = re.findall(r'\bhttps?://.*cloud\.mail\.ru\S+', url)[0] except IndexError: reply = "`No cloud.mail.ru links found`\n" return reply command = f'bin/cmrudl -s {link}' result = popen(command).read() result = result.splitlines()[-1] try: data = json.loads(result) except json.decoder.JSONDecodeError: reply += "`Error: Can't extract the link`\n" return reply dl_url = data['download'] name = data['file_name'] size = humanbytes(int(data['file_size'])) reply += f'[{name} ({size})]({dl_url})\n' return reply
async def split_(message: Message) -> None: """split""" split_size = int(message.matches[0].group(1)) file_path = str(message.matches[0].group(2)) if not file_path: await message.err("missing file path!") return if not isfile(file_path): await message.err("file path not exists!") return await message.edit("`processing...`") start_t = datetime.now() s_obj = SCLib(file_path) s_obj.split(split_size) tmp = \ "__Splitting file path...__\n" + \ "```{}({}%)```\n" + \ "× **File Path** : `{}`\n" + \ "× **Dest** : `{}`\n" + \ "× **Completed** : `{}`\n" + \ "× **Total** : `{}`\n" + \ "× **Speed** : `{}/s`\n" + \ "× **ETA** : `{}`\n" + \ "× **Completed Files** : `{}/{}`" count = 0 while not s_obj.finished: if message.process_is_canceled: s_obj.cancel() count += 1 if count >= 5: count = 0 await message.try_to_edit( tmp.format(s_obj.progress, s_obj.percentage, file_path, s_obj.final_file_path, humanbytes(s_obj.completed), humanbytes(s_obj.total), humanbytes(s_obj.speed), s_obj.eta, s_obj.completed_files, s_obj.total_files)) await sleep(1) if s_obj.output: await message.err(s_obj.output) else: end_t = datetime.now() m_s = (end_t - start_t).seconds await message.edit( f"**split** `{file_path}` into `{s_obj.final_file_path}` " f"in `{m_s}` seconds.", log=True)
def _search(self, search_query: str, flags: list, parent_id: str = "", list_root: bool = False) -> str: force = '-f' in flags pid = parent_id or self._parent_id if pid and not force: query = f"'{pid}' in parents and (name contains '{search_query}')" else: query = f"name contains '{search_query}'" page_token = None limit = int(flags.get('-l', 20)) page_size = limit if limit < 50 else 50 fields = 'nextPageToken, files(id, name, mimeType, size)' results = [] msg = "" while True: response = self._service.files().list( supportsTeamDrives=True, includeTeamDriveItems=True, q=query, spaces='drive', corpora='allDrives', fields=fields, pageSize=page_size, orderBy='modifiedTime desc', pageToken=page_token).execute() for file_ in response.get('files', []): if len(results) >= limit: break if file_.get('mimeType') == G_DRIVE_DIR_MIME_TYPE: msg += G_DRIVE_FOLDER_LINK.format(file_.get('id'), file_.get('name')) else: msg += G_DRIVE_FILE_LINK.format( file_.get('id'), file_.get('name'), humanbytes(int(file_.get('size', 0)))) msg += '\n' results.append(file_) if len(results) >= limit: break page_token = response.get('nextPageToken', None) if page_token is None: break del results if not msg: return "`Not Found!`" if parent_id and not force: out = f"**List GDrive Folder** : `{parent_id}`\n" elif list_root and not force: out = f"**List GDrive Root Folder** : `{self._parent_id}`\n" else: out = f"**GDrive Search Query** : `{search_query}`\n" return out + f"**Limit** : `{limit}`\n\n__Results__ : \n\n" + msg
def _download_file(self, path: str, name: str, **kwargs) -> None: request = self._service.files().get_media(fileId=kwargs['id'], supportsTeamDrives=True) with io.FileIO(os.path.join(path, name), 'wb') as d_f: d_file_obj = MediaIoBaseDownload(d_f, request, chunksize=50 * 1024 * 1024) c_time = time.time() done = False while done is False: status, done = d_file_obj.next_chunk() if self._is_canceled: raise ProcessCanceled if status: f_size = status.total_size diff = time.time() - c_time downloaded = status.resumable_progress percentage = downloaded / f_size * 100 speed = round(downloaded / diff, 2) eta = round((f_size - downloaded) / speed) tmp = \ "__Downloading From GDrive...__\n" + \ "```[{}{}]({}%)```\n" + \ "× **File Name** : `{}`\n" + \ "× **File Size** : `{}`\n" + \ "× **Downloaded** : `{}`\n" + \ "× **Completed** : `{}/{}`\n" + \ "× **Speed** : `{}/s`\n" + \ "× **ETA** : `{}`" self._progress = tmp.format( "".join((Config.FINISHED_PROGRESS_STR for i in range(math.floor(percentage / 5)))), "".join( (Config.UNFINISHED_PROGRESS_STR for i in range(20 - math.floor(percentage / 5)))), round(percentage, 2), name, humanbytes(f_size), humanbytes(downloaded), self._completed, self._list, humanbytes(speed), time_formatter(eta)) self._completed += 1 _LOG.info("Downloaded Google-Drive File => Name: %s ID: %s", name, kwargs['id'])
async def gdrive_list_file_md(service, file_id): try: file = service.files().get(fileId=file_id).execute() # logger.info(file) file_meta_data = {} file_meta_data["title"] = file["title"] mimeType = file["mimeType"] file_meta_data["createdDate"] = file["createdDate"] if mimeType == G_DRIVE_DIR_MIME_TYPE: # is a dir. file_meta_data["mimeType"] = "directory" file_meta_data["previewURL"] = file["alternateLink"] else: # is a file. file_meta_data["mimeType"] = file["mimeType"] file_meta_data["md5Checksum"] = file["md5Checksum"] file_meta_data["fileSize"] = str(humanbytes(int(file["fileSize"]))) file_meta_data["quotaBytesUsed"] = str(humanbytes(int(file["quotaBytesUsed"]))) file_meta_data["previewURL"] = file["downloadUrl"] return json.dumps(file_meta_data, sort_keys=True, indent=4) except Exception as e: return str(e)
def _set_perms(self, file_id: str) -> str: self._set_permission(file_id) drive_file = self._service.files().get( fileId=file_id, supportsTeamDrives=True, fields="id, name, mimeType, size").execute() _LOG.info("Set Permission : for Google-Drive File : %s\n%s", file_id, drive_file) mime_type = drive_file['mimeType'] file_name = drive_file['name'] file_id = drive_file['id'] if mime_type == G_DRIVE_DIR_MIME_TYPE: return G_DRIVE_FOLDER_LINK.format(file_id, file_name) file_size = humanbytes(int(drive_file.get('size', 0))) return G_DRIVE_FILE_LINK.format(file_id, file_name, file_size)
async def _(event): if event.fwd_from: return entity = event.chat_id input_str = event.pattern_match.group(1) if input_str: entity = input_str status_message = await event.reply("... this might take some time " "depending on the number of messages " "in the chat ...") hmm = {} async for message in event.client.iter_messages(entity=entity, limit=None): if message and message.file: if message.file.mime_type not in hmm: hmm[message.file.mime_type] = 0 hmm[message.file.mime_type] += message.file.size hnm = {key: humanbytes(hmm[key]) for key in hmm} await status_message.edit(yaml_format(hnm), parse_mode=parse_pre) await event.delete()
def search_idop_se(search_query): r = [] url = "https://idope.se/search/{}/".format(search_query) raw_json = requests.get(url).json() results = raw_json["result"]["items"] for item in results: # The content scrapped on 24.09.2018 22:56:45 title = item["name"] hash = item["info_hash"] age = item["create_time"] size = item["length"] seeds = str(item["seeds"]) r.append({ "title": title, "hash": hash, "age": age, "size": humanbytes(size), "seeds": seeds, "peers": "NA", }) return r
def _move(self, file_id: str) -> str: previous_parents = ",".join(self._service.files().get( fileId=file_id, fields='parents', supportsTeamDrives=True).execute()['parents']) drive_file = self._service.files().update( fileId=file_id, addParents=self._parent_id, removeParents=previous_parents, fields="id, name, mimeType, size, parents", supportsTeamDrives=True).execute() _LOG.info( "Moved file : %s => " "from : %s to : {drive_file['parents']} in Google-Drive", file_id, previous_parents) mime_type = drive_file['mimeType'] file_name = drive_file['name'] file_id = drive_file['id'] if mime_type == G_DRIVE_DIR_MIME_TYPE: return G_DRIVE_FOLDER_LINK.format(file_id, file_name) file_size = humanbytes(int(drive_file.get('size', 0))) return G_DRIVE_FILE_LINK.format(file_id, file_name, file_size)
def __progress(data: dict): if ((time() - startTime) % 4) > 3.9: if data['status'] == "downloading": eta = data.get('eta') speed = data.get('speed') if not (eta and speed): return out = "**Speed** >> {}/s\n**ETA** >> {}\n".format( humanbytes(speed), time_formatter(eta)) out += f'**File Name** >> `{data["filename"]}`\n\n' current = data.get('downloaded_bytes') total = data.get("total_bytes") if current and total: percentage = int(current) * 100 / int(total) out += f"Progress >> {int(percentage)}%\n" out += "[{}{}]".format( ''.join((Config.FINISHED_PROGRESS_STR for _ in range(floor(percentage / 5)))), ''.join((Config.UNFINISHED_PROGRESS_STR for _ in range(20 - floor(percentage / 5))))) if message.text != out: asyncio.get_event_loop().run_until_complete( message.edit(out))
async def uploadtotg(message: Message): flags = message.flags path_ = message.filtered_input_str if not path_: await message.edit("invalid input!, check `.help .upload`", del_in=5) return is_url = re.search(r"(?:https?|ftp)://[^|\s]+\.[^|\s]+", path_) del_path = False if is_url: del_path = True await message.edit("`Downloading From URL...`") if not os.path.isdir(Config.DOWN_PATH): os.mkdir(Config.DOWN_PATH) url = is_url[0] file_name = unquote_plus(os.path.basename(url)) if "|" in path_: file_name = path_.split("|")[1].strip() path_ = os.path.join(Config.DOWN_PATH, file_name) try: downloader = SmartDL(url, path_, progress_bar=False) downloader.start(blocking=False) count = 0 while not downloader.isFinished(): if message.process_is_canceled: downloader.stop() raise Exception('Process Canceled!') total_length = downloader.filesize if downloader.filesize else 0 downloaded = downloader.get_dl_size() percentage = downloader.get_progress() * 100 speed = downloader.get_speed(human=True) estimated_total_time = downloader.get_eta(human=True) progress_str = \ "__{}__\n" + \ "```[{}{}]```\n" + \ "× **Progress** : `{}%`\n" + \ "× **URL** : `{}`\n" + \ "× **FILENAME** : `{}`\n" + \ "× **Completed** : `{}`\n" + \ "× **Total** : `{}`\n" + \ "× **Speed** : `{}`\n" + \ "× **ETA** : `{}`" progress_str = progress_str.format( "trying to download", ''.join( (Config.FINISHED_PROGRESS_STR for i in range(math.floor(percentage / 5)))), ''.join((Config.UNFINISHED_PROGRESS_STR for i in range(20 - math.floor(percentage / 5)))), round(percentage, 2), url, file_name, humanbytes(downloaded), humanbytes(total_length), speed, estimated_total_time) count += 1 if count >= 5: count = 0 await message.try_to_edit(progress_str, disable_web_page_preview=True) await asyncio.sleep(1) except Exception as d_e: await message.err(d_e) return if "|" in path_: path_, file_name = path_.split("|") path_ = path_.strip() if os.path.isfile(path_): new_path = os.path.join(Config.DOWN_PATH, file_name.strip()) os.rename(path_, new_path) path_ = new_path try: string = Path(path_) except IndexError: await message.edit("wrong syntax\n`.upload [path]`") else: await message.delete() await explorer(string, message.chat.id, flags, del_path)
async def down_load_media(message: Message): await message.edit("Trying to Download...") if not os.path.isdir(Config.DOWN_PATH): os.mkdir(Config.DOWN_PATH) if message.reply_to_message and message.reply_to_message.media: start_t = datetime.now() c_time = time.time() dl_loc = await userbot.download_media( message=message.reply_to_message, file_name=Config.DOWN_PATH, progress=progress, progress_args=("trying to download", userbot, message, c_time)) if message.process_is_canceled: await message.edit("`Process Canceled!`", del_in=5) else: dl_loc = os.path.join(Config.DOWN_PATH, os.path.basename(dl_loc)) end_t = datetime.now() m_s = (end_t - start_t).seconds await message.edit(f"Downloaded to `{dl_loc}` in {m_s} seconds") elif message.input_str: start_t = datetime.now() url = message.input_str custom_file_name = unquote_plus(os.path.basename(url)) if "|" in url: url, custom_file_name = url.split("|") url = url.strip() custom_file_name = custom_file_name.strip() download_file_path = os.path.join(Config.DOWN_PATH, custom_file_name) try: downloader = SmartDL(url, download_file_path, progress_bar=False) downloader.start(blocking=False) count = 0 while not downloader.isFinished(): if message.process_is_canceled: downloader.stop() raise Exception('Process Canceled!') total_length = downloader.filesize if downloader.filesize else 0 downloaded = downloader.get_dl_size() percentage = downloader.get_progress() * 100 speed = downloader.get_speed(human=True) estimated_total_time = downloader.get_eta(human=True) progress_str = \ "__{}__\n" + \ "```[{}{}]```\n" + \ "× **Progress** : `{}%`\n" + \ "× **URL** : `{}`\n" + \ "× **FILENAME** : `{}`\n" + \ "× **Completed** : `{}`\n" + \ "× **Total** : `{}`\n" + \ "× **Speed** : `{}`\n" + \ "× **ETA** : `{}`" progress_str = progress_str.format( "trying to download", ''.join( (Config.FINISHED_PROGRESS_STR for i in range(math.floor(percentage / 5)))), ''.join((Config.UNFINISHED_PROGRESS_STR for i in range(20 - math.floor(percentage / 5)))), round(percentage, 2), url, custom_file_name, humanbytes(downloaded), humanbytes(total_length), speed, estimated_total_time) count += 1 if count >= 5: count = 0 await message.try_to_edit(progress_str, disable_web_page_preview=True) await asyncio.sleep(1) except Exception as e: await message.err(e) else: end_t = datetime.now() m_s = (end_t - start_t).seconds await message.edit( f"Downloaded to `{download_file_path}` in {m_s} seconds") else: await message.edit("Please read `.help download`", del_in=5)
async def upload(self) -> None: """Upload from file/folder/link/tg file to GDrive""" if not os.path.isdir(Config.DOWN_PATH): os.mkdir(Config.DOWN_PATH) replied = self._message.reply_to_message is_url = re.search(r"(?:https?|ftp)://[^\|\s]+\.[^\|\s]+", self._message.input_str) dl_loc = None if replied and replied.media: await self._message.edit("`Downloading From TG...`") c_time = time.time() file_name = Config.DOWN_PATH if self._message.input_str: file_name = os.path.join(Config.DOWN_PATH, self._message.input_str) dl_loc = await userbot.download_media( message=replied, file_name=file_name, progress=progress, progress_args=("trying to download", userbot, self._message, c_time)) if self._message.process_is_canceled: await self._message.edit("`Process Canceled!`", del_in=5) return dl_loc = os.path.join(Config.DOWN_PATH, os.path.basename(dl_loc)) elif is_url: await self._message.edit("`Downloading From URL...`") url = is_url[0] file_name = unquote_plus(os.path.basename(url)) if "|" in self._message.input_str: file_name = self._message.input_str.split("|")[1].strip() dl_loc = os.path.join(Config.DOWN_PATH, file_name) try: downloader = SmartDL(url, dl_loc, progress_bar=False) downloader.start(blocking=False) count = 0 while not downloader.isFinished(): if self._message.process_is_canceled: downloader.stop() raise Exception('Process Canceled!') total_length = downloader.filesize if downloader.filesize else 0 downloaded = downloader.get_dl_size() percentage = downloader.get_progress() * 100 speed = downloader.get_speed(human=True) estimated_total_time = downloader.get_eta(human=True) progress_str = \ "__{}__\n" + \ "```[{}{}]```\n" + \ "× **Progress** : `{}%`\n" + \ "× **URL** : `{}`\n" + \ "× **FILENAME** : `{}`\n" + \ "× **Completed** : `{}`\n" + \ "× **Total** : `{}`\n" + \ "× **Speed** : `{}`\n" + \ "× **ETA** : `{}`" progress_str = progress_str.format( "trying to download", ''.join( (Config.FINISHED_PROGRESS_STR for i in range(math.floor(percentage / 5)))), ''.join( (Config.UNFINISHED_PROGRESS_STR for i in range(20 - math.floor(percentage / 5)))), round(percentage, 2), url, file_name, humanbytes(downloaded), humanbytes(total_length), speed, estimated_total_time) count += 1 if count >= 5: count = 0 await self._message.try_to_edit( progress_str, disable_web_page_preview=True) await asyncio.sleep(1) except Exception as d_e: await self._message.err(d_e) return file_path = dl_loc if dl_loc else self._message.input_str if not os.path.exists(file_path): await self._message.err("invalid file path provided?") return if "|" in file_path: file_path, file_name = file_path.split("|") new_path = os.path.join(os.path.dirname(file_path.strip()), file_name.strip()) os.rename(file_path.strip(), new_path) file_path = new_path await self._message.edit("`Loading GDrive Upload...`") pool.submit_thread(self._upload, file_path) start_t = datetime.now() count = 0 while not self._is_finished: count += 1 if self._message.process_is_canceled: self._cancel() if self._progress is not None and count >= 5: count = 0 await self._message.try_to_edit(self._progress) await asyncio.sleep(1) if dl_loc and os.path.exists(dl_loc): os.remove(dl_loc) end_t = datetime.now() m_s = (end_t - start_t).seconds if isinstance(self._output, HttpError): out = f"**ERROR** : `{self._output._get_reason()}`" elif self._output is not None and not self._is_canceled: out = f"**Uploaded Successfully** __in {m_s} seconds__\n\n{self._output}" elif self._output is not None and self._is_canceled: out = self._output else: out = "`failed to upload.. check logs?`" await self._message.edit(out, disable_web_page_preview=True, log=True)
async def _(event): # sourcery no-metrics "Shows you the complete media/file summary of the that group" entity = event.chat_id input_str = event.pattern_match.group(1) if input_str: try: entity = int(input_str) except ValueError: entity = input_str starttime = int(time.monotonic()) x = PrettyTable() totalcount = totalsize = msg_count = 0 x.title = "File Summary" x.field_names = ["Media", "Count", "File size"] largest = " <b>Largest Size</b>\n" try: chatdata = await event.client.get_entity(entity) except Exception as e: return await edit_delete( event, f"<b>Error : </b><code>{e}</code>", time=5, parse_mode="HTML", ) if type(chatdata).__name__ == "Channel": if chatdata.username: link = f"<a href='t.me/{chatdata.username}'>{chatdata.title}</a>" else: link = chatdata.title else: link = f"<a href='tg://user?id={chatdata.id}'>{chatdata.first_name}</a>" event = await edit_or_reply( event, f"<code>Counting files and file size of </code><b>{link}</b>\n<code>This may take some time also depends on number of group messages</code>", parse_mode="HTML", ) media_dict = { m: { "file_size": 0, "count": 0, "max_size": 0, "max_file_link": "" } for m in TYPES } async for message in event.client.iter_messages(entity=entity, limit=None): msg_count += 1 media = media_type(message) if media is not None: media_dict[media]["file_size"] += message.file.size media_dict[media]["count"] += 1 if message.file.size > media_dict[media]["max_size"]: media_dict[media]["max_size"] = message.file.size if type(chatdata).__name__ == "Channel": media_dict[media][ "max_file_link" ] = f"https://t.me/c/{chatdata.id}/{message.id}" # pylint: disable=line-too-long else: media_dict[media][ "max_file_link" ] = f"tg://openmessage?user_id={chatdata.id}&message_id={message.id}" # pylint: disable=line-too-long totalsize += message.file.size totalcount += 1 for mediax in TYPES: x.add_row([ mediax, media_dict[mediax]["count"], humanbytes(media_dict[mediax]["file_size"]), ]) if media_dict[mediax]["count"] != 0: largest += f" • <b><a href='{media_dict[mediax]['max_file_link']}'>{mediax}</a> : </b><code>{humanbytes(media_dict[mediax]['max_size'])}</code>\n" endtime = int(time.monotonic()) if endtime - starttime >= 120: runtime = str(round(((endtime - starttime) / 60), 2)) + " minutes" else: runtime = str(endtime - starttime) + " seconds" avghubytes = humanbytes(weird_division(totalsize, totalcount)) avgruntime = (str( round((weird_division( (endtime - starttime), totalcount)) * 1000, 2)) + " ms") totalstring = f"<code><b>Total files : </b> | {totalcount} \nTotal file size : | {humanbytes(totalsize)} \nAvg. file size : | {avghubytes} \n</code>" runtimestring = f"<code>Runtime : | {runtime}\ \nRuntime per file : | {avgruntime}\ \n</code>" line = "<code>----------------------------------</code>\n" result = f"<b>Group : {link}</b>\n\n" result += f"<code>Total Messages: {msg_count}</code>\n" result += "<b>File Summary : </b>\n" result += f"<code>{x}</code>\n" result += f"{largest}" result += line + totalstring + line + runtimestring + line await event.edit(result, parse_mode="HTML", link_preview=False)
def _upload_file(self, file_path: str, parent_id: str) -> str: if self._is_canceled: raise ProcessCanceled mime_type = guess_type(file_path)[0] or "text/plain" file_name = os.path.basename(file_path) body = { "name": file_name, "mimeType": mime_type, "description": "Uploaded using Userbot" } if parent_id: body["parents"] = [parent_id] if os.path.getsize(file_path) == 0: media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=False) u_file_obj = self._service.files().create( body=body, media_body=media_body, supportsTeamDrives=True).execute() file_id = u_file_obj.get("id") else: media_body = MediaFileUpload(file_path, mimetype=mime_type, chunksize=50 * 1024 * 1024, resumable=True) u_file_obj = self._service.files().create(body=body, media_body=media_body, supportsTeamDrives=True) c_time = time.time() response = None while response is None: status, response = u_file_obj.next_chunk() if self._is_canceled: raise ProcessCanceled if status: f_size = status.total_size diff = time.time() - c_time uploaded = status.resumable_progress percentage = uploaded / f_size * 100 speed = round(uploaded / diff, 2) eta = round((f_size - uploaded) / speed) tmp = \ "__Uploading to GDrive...__\n" + \ "```[{}{}]({}%)```\n" + \ "× **File Name** : `{}`\n" + \ "× **File Size** : `{}`\n" + \ "× **Uploaded** : `{}`\n" + \ "× **Completed** : `{}/{}`\n" + \ "× **Speed** : `{}/s`\n" + \ "× **ETA** : `{}`" self._progress = tmp.format( "".join((Config.FINISHED_PROGRESS_STR for i in range(math.floor(percentage / 5)))), "".join( (Config.UNFINISHED_PROGRESS_STR for i in range(20 - math.floor(percentage / 5)))), round(percentage, 2), file_name, humanbytes(f_size), humanbytes(uploaded), self._completed, self._list, humanbytes(speed), time_formatter(eta)) file_id = response.get("id") if not Config.G_DRIVE_IS_TD: self._set_permission(file_id) self._completed += 1 drive_file = self._service.files().get( fileId=file_id, fields='id, name, size', supportsTeamDrives=True).execute() file_id = drive_file.get('id') file_name = drive_file.get("name") file_size = humanbytes(int(drive_file.get('size', 0))) _LOG.info("Created Google-Drive File => Name: %s ID: %s Size: %s", file_name, file_id, file_size) return G_DRIVE_FILE_LINK.format(file_id, file_name, file_size)