def clone(self, link, status, ignoreList=[]): self.transferred_size = 0 try: file_id = self.getIdFromUrl(link) except (KeyError,IndexError): msg = "Google drive ID could not be found in the provided link" return msg msg = "" LOGGER.info(f"File ID: {file_id}") try: meta = self.__service.files().get(supportsAllDrives=True, fileId=file_id, fields="name,id,mimeType,size").execute() dest_meta = self.__service.files().get(supportsAllDrives=True, fileId=self.gparentid, fields="name,id,size").execute() status.SetMainFolder(meta.get('name'), self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(meta.get('id'))) status.SetDestinationFolder(dest_meta.get('name'), self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dest_meta.get('id'))) except Exception as e: return f"{str(e).replace('>', '').replace('<', '')}" if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: dir_id = self.check_folder_exists(meta.get('name'), self.gparentid) if not dir_id: dir_id = self.create_directory(meta.get('name'), self.gparentid) try: self.cloneFolder(meta.get('name'), meta.get('name'), meta.get('id'), dir_id, status, ignoreList) except Exception as e: if isinstance(e, RetryError): LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}") err = e.last_attempt.exception() else: err = str(e).replace('>', '').replace('<', '') LOGGER.error(err) return err status.set_status(True) msg += f'<a href="{self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id)}">{meta.get("name")}</a>' \ f' ({get_readable_file_size(self.transferred_size)})' if INDEX_URL: url = requests.utils.requote_uri(f'{INDEX_URL}/{meta.get("name")}/') msg += f' | <a href="{url}"> Index URL</a>' else: try: file = self.check_file_exists(meta.get('id'), self.gparentid) if file: status.checkFileExist(True) if not file: status.checkFileExist(False) file = self.copyFile(meta.get('id'), self.gparentid, status) except Exception as e: if isinstance(e, RetryError): LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}") err = e.last_attempt.exception() else: err = str(e).replace('>', '').replace('<', '') LOGGER.error(err) return err msg += f'<a href="{self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id"))}">{file.get("name")}</a>' try: msg += f' ({get_readable_file_size(int(meta.get("size")))}) ' if INDEX_URL is not None: url = requests.utils.requote_uri(f'{INDEX_URL}/{file.get("name")}') msg += f' | <a href="{url}"> Index URL</a>' except TypeError: pass return msg
def cancel_download(self): LOGGER.info(f"Cancelling download on user request: {self.gid}") self.__mega_client.cancelDl(self.gid)
mod_paths = glob.glob(dirname(__file__) + "/*.py") all_modules = [ basename(f)[:-3] for f in mod_paths if isfile(f) and f.endswith(".py") and not f.endswith('__init__.py') ] if LOAD or NO_LOAD: to_load = LOAD if to_load: if not all( any(mod == module_name for module_name in all_modules) for mod in to_load): LOGGER.error("Invalid loadorder names. Quitting.") quit(1) else: to_load = all_modules if NO_LOAD: LOGGER.info("Not loading: {}".format(NO_LOAD)) return [item for item in to_load if item not in NO_LOAD] return to_load return all_modules ALL_MODULES = sorted(__list_all_modules()) LOGGER.info("Modules to load: %s", str(ALL_MODULES)) __all__ = ALL_MODULES + ["ALL_MODULES"]
dispatcher.add_handler(source_handler) dispatcher.add_handler(M_CONNECT_BTN_HANDLER) dispatcher.add_handler(IMDB_HANDLER) dispatcher.add_handler(IMDB_SEARCHDATAHANDLER) # dispatcher.add_error_handler(error_callback) if WEBHOOK: LOGGER.info("Using webhooks.") updater.start_webhook(listen="127.0.0.1", port=PORT, url_path=TOKEN) if CERT_PATH: updater.bot.set_webhook(url=URL + TOKEN, certificate=open(CERT_PATH, 'rb')) else: updater.bot.set_webhook(url=URL + TOKEN) else: LOGGER.info("bot running...") updater.start_polling(timeout=15, read_latency=4) updater.idle() if __name__ == '__main__': LOGGER.info("Successfully loaded modules: " + str(ALL_MODULES)) main()
def onRequestStart(self, api, request): LOGGER.info('Request start ({})'.format(request))
def _restart(client, message): shutil.rmtree(DOWNLOAD_DIRECTORY) LOGGER.info('Deleted DOWNLOAD_DIRECTORY successfully.') message.reply_text('**♻️Restarted Successfully !**', quote=True) LOGGER.info(f'{message.from_user.id}: Restarting...') execl(executable, executable, "-m", "bot")
def setup(bot): bot.add_cog(RSRole(bot)) LOGGER.info('RS Role System loaded')
def upload(self, file_name: str): self.__listener.onUploadStarted() file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}" file_path = f"{file_dir}/{file_name}" LOGGER.info("Uploading File: " + file_path) self.start_time = time.time() self.updater = setInterval(self.update_interval, self._on_upload_progress) if os.path.isfile(file_path): try: mime_type = get_mime_type(file_path) link = self.upload_file(file_path, file_name, mime_type, parent_id) if link is None: raise Exception('Upload has been manually cancelled') LOGGER.info("Uploaded To G-Drive: " + file_path) except Exception as e: LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}") LOGGER.error(e.last_attempt.exception()) self.__listener.onUploadError(e) return finally: self.updater.cancel() else: try: dir_id = self.create_directory( os.path.basename(os.path.abspath(file_name)), parent_id) result = self.upload_dir(file_path, dir_id) if result is None: raise Exception('Upload has been manually cancelled!') LOGGER.info("Uploaded To G-Drive: " + file_name) link = f"https://drive.google.com/folderview?id={dir_id}" except Exception as e: LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}") LOGGER.error(e.last_attempt.exception()) self.__listener.onUploadError(e) return finally: self.updater.cancel() LOGGER.info(download_dict) self.__listener.onUploadComplete(link) LOGGER.info("Deleting downloaded file/folder..") return link
def _watch(bot, update, isZip=False, isLeech=False, pswd=None, tag=None): mssg = update.message.text message_args = mssg.split(' ') name_args = mssg.split('|', maxsplit=1) user_id = update.message.from_user.id msg_id = update.message.message_id try: link = message_args[1].strip() if link.startswith("|") or link.startswith("pswd: "): link = '' except IndexError: link = '' try: name = name_args[1] name = name.split(' pswd: ')[0] name = name.strip() except IndexError: name = '' pswdMsg = mssg.split(' pswd: ') if len(pswdMsg) > 1: pswd = pswdMsg[1] if update.message.from_user.username: tag = f"@{update.message.from_user.username}" else: tag = update.message.from_user.mention_html(update.message.from_user.first_name) reply_to = update.message.reply_to_message if reply_to is not None: link = reply_to.text.strip() if reply_to.from_user.username: tag = f"@{reply_to.from_user.username}" else: tag = reply_to.from_user.mention_html(reply_to.from_user.first_name) if not is_url(link): help_msg = "<b>Send link along with command line:</b>" help_msg += "\n<code>/command</code> {link} |newname pswd: mypassword [𝚣𝚒𝚙]" help_msg += "\n\n<b>By replying to link:</b>" help_msg += "\n<code>/command</code> |newname pswd: mypassword [𝚣𝚒𝚙]" return sendMessage(help_msg, bot, update) LOGGER.info(link) listener = MirrorListener(bot, update, isZip, isLeech=isLeech, pswd=pswd, tag=tag) buttons = button_build.ButtonMaker() best_video = "bv*+ba/b" best_audio = "ba/b" ydl = YoutubeDLHelper(listener) try: result = ydl.extractMetaData(link, name, True) except Exception as e: return sendMessage(str(e), bot, update) if 'entries' in result: for i in ['144', '240', '360', '480', '720', '1080', '1440', '2160']: video_format = f"bv*[height<={i}][ext=mp4]+ba/b" buttons.sbutton(f"{i}-mp4", f"qu {msg_id} {video_format} t") video_format = f"bv*[height<={i}][ext=webm]+ba/b" buttons.sbutton(f"{i}-webm", f"qu {msg_id} {video_format} t") buttons.sbutton("Audios", f"qu {msg_id} audio t") buttons.sbutton("Best Videos", f"qu {msg_id} {best_video} t") buttons.sbutton("Best Audios", f"qu {msg_id} {best_audio} t") buttons.sbutton("Cancel", f"qu {msg_id} cancel") YTBUTTONS = InlineKeyboardMarkup(buttons.build_menu(3)) listener_dict[msg_id] = [listener, user_id, link, name, YTBUTTONS] bmsg = sendMarkup('Choose Playlist Videos Quality:', bot, update, YTBUTTONS) else: formats = result.get('formats') formats_dict = {} if formats is not None: for frmt in formats: if not frmt.get('tbr') or not frmt.get('height'): continue if frmt.get('fps'): quality = f"{frmt['height']}p{frmt['fps']}-{frmt['ext']}" else: quality = f"{frmt['height']}p-{frmt['ext']}" if frmt.get('filesize'): size = frmt['filesize'] elif frmt.get('filesize_approx'): size = frmt['filesize_approx'] else: size = 0 if quality in formats_dict: formats_dict[quality][frmt['tbr']] = size else: subformat = {} subformat[frmt['tbr']] = size formats_dict[quality] = subformat for forDict in formats_dict: if len(formats_dict[forDict]) == 1: qual_fps_ext = resplit(r'p|-', forDict, maxsplit=2) height = qual_fps_ext[0] fps = qual_fps_ext[1] ext = qual_fps_ext[2] if fps != '': video_format = f"bv*[height={height}][fps={fps}][ext={ext}]+ba/b" else: video_format = f"bv*[height={height}][ext={ext}]+ba/b" size = list(formats_dict[forDict].values())[0] buttonName = f"{forDict} ({get_readable_file_size(size)})" buttons.sbutton(str(buttonName), f"qu {msg_id} {video_format}") else: buttons.sbutton(str(forDict), f"qu {msg_id} dict {forDict}") buttons.sbutton("Audios", f"qu {msg_id} audio") buttons.sbutton("Best Video", f"qu {msg_id} {best_video}") buttons.sbutton("Best Audio", f"qu {msg_id} {best_audio}") buttons.sbutton("Cancel", f"qu {msg_id} cancel") YTBUTTONS = InlineKeyboardMarkup(buttons.build_menu(2)) listener_dict[msg_id] = [listener, user_id, link, name, YTBUTTONS, formats_dict] bmsg = sendMarkup('Choose Video Quality:', bot, update, YTBUTTONS) Thread(target=_auto_cancel, args=(bmsg, msg_id)).start()
def cancel_download(self): LOGGER.info(f"Cancelling Download: {self.name()}") self.client.torrents_pause(torrent_hashes=self.__hash) sleep(0.3) self.listener.onDownloadError('Download stopped by user!') self.client.torrents_delete(torrent_hashes=self.__hash)
from bot import DOWNLOAD_DIRECTORY, LOGGER from bot.config import Messages, BotCommands from pyrogram.errors import FloodWait, RPCError @Client.on_message(filters.incoming & filters.text & (filters.command(BotCommands.Download) | filters.regex('^(ht|f)tp*')) def _download(client, message): user_id = message.from_user.id if not message.media: sent_message = message.reply_text('🕵️**Checking link...**', quote=True) if message.command: link = message.command[1] else: link = message.text if 'drive.google.com' in link: sent_message.edit(Messages.CLONING.format(link)) LOGGER.info(f'Copy:{user_id}: {link}') msg = GoogleDrive(user_id).clone(link) sent_message.edit(msg) else: if '|' in link: link, filename = link.split('|') link = link.strip() filename.strip() dl_path = os.path.join(f'{DOWNLOAD_DIRECTORY}/{filename}') else: link = link.strip() filename = os.path.basename(link) dl_path = DOWNLOAD_DIRECTORY LOGGER.info(f'Download:{user_id}: {link}') sent_message.edit(Messages.DOWNLOADING.format(link)) result, file_path = download_file(link, dl_path)
async def _schedule(ctx): """Get schedule for daily task.""" await getclass(ctx, args="now", is_scheduler=True) LOGGER.info("Daily task complete")
"""Create a ctx from a channel message placeholder""" channel = bot.get_channel(channel) message = await channel.fetch_message(id=message_id) return await bot.get_context(message) async def _schedule(ctx): """Get schedule for daily task.""" await getclass(ctx, args="now", is_scheduler=True) LOGGER.info("Daily task complete") async def daily_task(): """Create a daily task.""" while True: now = datetime.utcnow() date = now.date() if now.time() > DT_TIME: date = now.date() + timedelta(days=1) then = datetime.combine(date, DT_TIME) await discord.utils.sleep_until(then) LOGGER.info("Running daily schedule task") ctx = await _create_context(SCHEDULE_CHANNEL, TASK_MSG_PLACEHOLDER) await _schedule(ctx) if __name__ == "__main__": LOGGER.info("Modules Loaded: %s", str(ALL_MODULES)) bot.loop.create_task(startup()) bot.run(BOT_TOKEN)
def _mirror(bot, update, isTar=False, extract=False): mesg = update.message.text.split('\n') message_args = mesg[0].split(' ') name_args = mesg[0].split('|') try: link = message_args[1] print(link) if link.startswith("|") or link.startswith("pswd: "): link = '' except IndexError: link = '' try: name = name_args[1] name = name.strip() if name.startswith("pswd: "): name = '' except IndexError: name = '' try: ussr = urllib.parse.quote(mesg[1], safe='') pssw = urllib.parse.quote(mesg[2], safe='') except: ussr = '' pssw = '' if ussr != '' and pssw != '': link = link.split("://", maxsplit=1) link = f'{link[0]}://{ussr}:{pssw}@{link[1]}' pswd = re.search('(?<=pswd: )(.*)', update.message.text) if pswd is not None: pswd = pswd.groups() pswd = " ".join(pswd) LOGGER.info(link) link = link.strip() reply_to = update.message.reply_to_message if reply_to is not None: file = None tag = reply_to.from_user.username media_array = [reply_to.document, reply_to.video, reply_to.audio] for i in media_array: if i is not None: file = i break if not bot_utils.is_url(link) and not bot_utils.is_magnet(link) or len( link) == 0: if file is not None: if file.mime_type != "application/x-bittorrent": listener = MirrorListener(bot, update, pswd, isTar, tag, extract) tg_downloader = TelegramDownloadHelper(listener) tg_downloader.add_download( reply_to, f'{DOWNLOAD_DIR}{listener.uid}/', name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) return else: link = file.get_file().file_path else: tag = None if not bot_utils.is_url(link) and not bot_utils.is_magnet(link): sendMessage('No download source provided', bot, update) return try: link = direct_link_generator(link) except DirectDownloadLinkException as e: LOGGER.info(f'{link}: {e}') listener = MirrorListener(bot, update, pswd, isTar, tag, extract) if bot_utils.is_mega_link( link) and MEGA_KEY is not None and not BLOCK_MEGA_LINKS: mega_dl = MegaDownloader(listener) mega_dl.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/') sendStatusMessage(update, bot) elif bot_utils.is_mega_link(link) and BLOCK_MEGA_LINKS: sendMessage("Mega links are blocked. Dont try to mirror mega links.", bot, update) else: ariaDlManager.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
def _emptyTrash(client, message): user_id = message.from_user.id LOGGER.info(f'EmptyTrash: {user_id}') msg = GoogleDrive(user_id).emptyTrash() message.reply_text(msg, quote=True)
def cloneNode(update, context): args = update.message.text.split(" ", maxsplit=1) reply_to = update.message.reply_to_message link = '' if len(args) > 1: link = args[1] if update.message.from_user.username: tag = f"@{update.message.from_user.username}" else: tag = update.message.from_user.mention_html( update.message.from_user.first_name) if reply_to is not None: if len(link) == 0: link = reply_to.text if reply_to.from_user.username: tag = f"@{reply_to.from_user.username}" else: tag = reply_to.from_user.mention_html( reply_to.from_user.first_name) is_gdtot = is_gdtot_link(link) if is_gdtot: try: msg = sendMessage(f"Processing: <code>{link}</code>", context.bot, update) link = gdtot(link) deleteMessage(context.bot, msg) except DirectDownloadLinkException as e: deleteMessage(context.bot, msg) return sendMessage(str(e), context.bot, update) if is_gdrive_link(link): gd = GoogleDriveHelper() res, size, name, files = gd.helper(link) if res != "": return sendMessage(res, context.bot, update) if STOP_DUPLICATE: LOGGER.info('Checking File/Folder if already in Drive...') smsg, button = gd.drive_list(name, True, True) if smsg: msg3 = "File/Folder is already available in Drive.\nHere are the search results:" return sendMarkup(msg3, context.bot, update, button) if CLONE_LIMIT is not None: LOGGER.info('Checking File/Folder Size...') if size > CLONE_LIMIT * 1024**3: msg2 = f'Failed, Clone limit is {CLONE_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(size)}.' return sendMessage(msg2, context.bot, update) if files <= 20: msg = sendMessage(f"Cloning: <code>{link}</code>", context.bot, update) result, button = gd.clone(link) deleteMessage(context.bot, msg) else: drive = GoogleDriveHelper(name) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) clone_status = CloneStatus(drive, size, update, gid) with download_dict_lock: download_dict[update.message.message_id] = clone_status sendStatusMessage(update, context.bot) result, button = drive.clone(link) with download_dict_lock: del download_dict[update.message.message_id] count = len(download_dict) try: if count == 0: Interval[0].cancel() del Interval[0] delete_all_messages() else: update_all_messages() except IndexError: pass cc = f'\n\n<b>cc: </b>{tag}' if button in ["cancelled", ""]: sendMessage(f"{tag} {result}", context.bot, update) else: sendMarkup(result + cc, context.bot, update, button) if is_gdtot: gd.deletefile(link) else: sendMessage( 'Send Gdrive or gdtot link along with command or by replying to the link by command', context.bot, update)
def tar(orig_path: str): path = pathlib.PurePath(orig_path) base = path.name root = pathlib.Path(path.parent.as_posix()).absolute().as_posix() LOGGER.info(f'Tar: orig_path: {orig_path}, base: {base}, root: {root}') return shutil.make_archive(orig_path, 'tar', root, base)
def log_input(update): user = update.effective_user.id chat = update.effective_chat.id LOGGER.info( f"IN: {update.effective_message.text} (user={user}, chat={chat})")
def add_download(mega_link: str, path: str, listener): if MEGA_API_KEY is None: raise MegaDownloaderException( 'Mega API KEY not provided! Cannot mirror Mega links') executor = AsyncExecutor() api = MegaApi(MEGA_API_KEY, None, None, 'telegram-mirror-bot') global listeners mega_listener = MegaAppListener(executor.continue_event, listener) listeners.append(mega_listener) api.addListener(mega_listener) if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None: executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD)) link_type = get_mega_link_type(mega_link) if link_type == "file": LOGGER.info( "File. If your download didn't start, then check your link if it's available to download" ) executor.do(api.getPublicNode, (mega_link, )) node = mega_listener.public_node else: LOGGER.info( "Folder. If your download didn't start, then check your link if it's available to download" ) folder_api = MegaApi(MEGA_API_KEY, None, None, 'TgBot') folder_api.addListener(mega_listener) executor.do(folder_api.loginToFolder, (mega_link, )) node = folder_api.authorizeNode(mega_listener.node) if mega_listener.error is not None: return listener.onDownloadError(str(mega_listener.error)) if STOP_DUPLICATE_MEGA: LOGGER.info(f'Checking File/Folder if already in Drive') mname = node.getName() if listener.isTar: mname = mname + ".tar" if listener.extract: smsg = None else: gd = GoogleDriveHelper() smsg, button = gd.drive_list(mname) if smsg: msg1 = "File/Folder is already available in Drive.\nHere are the search results:" sendMarkup(msg1, listener.bot, listener.update, button) return if MEGA_LIMIT is not None or TAR_UNZIP_LIMIT is not None: limit = None LOGGER.info(f'Checking File/Folder Size') if TAR_UNZIP_LIMIT is not None and (listener.isTar or listener.extract): limit = TAR_UNZIP_LIMIT msg3 = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.' if MEGA_LIMIT is not None and limit is None: limit = MEGA_LIMIT msg3 = f'Failed, Mega limit is {MEGA_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.' if limit is not None: limit = limit.split(' ', maxsplit=1) limitint = int(limit[0]) if 'G' in limit[1] or 'g' in limit[1]: if api.getSize(node) > limitint * 1024**3: sendMessage(msg3, listener.bot, listener.update) return elif 'T' in limit[1] or 't' in limit[1]: if api.getSize(node) > limitint * 1024**4: sendMessage(msg3, listener.bot, listener.update) return with download_dict_lock: download_dict[listener.uid] = MegaDownloadStatus( mega_listener, listener) os.makedirs(path) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=8)) mega_listener.setValues(node.getName(), api.getSize(node), gid) sendStatusMessage(listener.update, listener.bot) executor.do(api.startDownload, (node, path))
def onTransferStart(self, api: MegaApi, transfer: MegaTransfer): LOGGER.info(f"Transfer Started: {transfer.getFileName()}")
def cloneNode(update, context): args = update.message.text.split(" ", maxsplit=1) if len(args) > 1: link = args[1] gd = gdriveTools.GoogleDriveHelper() res, clonesize, name, files = gd.clonehelper(link) if res != "": sendMessage(res, context.bot, update) return if STOP_DUPLICATE_CLONE: LOGGER.info(f"Checking File/Folder if already in Drive...") smsg, button = gd.drive_list(name) if smsg: msg3 = "File/Folder is already available in Drive.\nHere are the search results:" sendMarkup(msg3, context.bot, update, button) return if CLONE_LIMIT is not None: LOGGER.info(f"Checking File/Folder Size...") limit = CLONE_LIMIT limit = limit.split(' ', maxsplit=1) limitint = int(limit[0]) msg2 = f'Failed, Clone limit is {CLONE_LIMIT}.\nYour File/Folder size is {get_readable_file_size(clonesize)}.' if 'G' in limit[1] or 'g' in limit[1]: if clonesize > limitint * 1024**3: sendMessage(msg2, context.bot, update) return elif 'T' in limit[1] or 't' in limit[1]: if clonesize > limitint * 1024**4: sendMessage(msg2, context.bot, update) return if files < 15: msg = sendMessage(f"Cloning: <code>{link}</code>", context.bot, update) result, button = gd.clone(link) deleteMessage(context.bot, msg) else: drive = gdriveTools.GoogleDriveHelper(name) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) clone_status = CloneStatus(drive, clonesize, update, gid) with download_dict_lock: download_dict[update.message.message_id] = clone_status if len(Interval) == 0: Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) sendStatusMessage(update, context.bot) result, button = drive.clone(link) if update.message.from_user.username: uname = f'@{update.message.from_user.username}' else: uname = f'<a href="tg://user?id={update.message.from_user.id}">{update.message.from_user.first_name}</a>' if uname is not None: cc = f'\n\ncc: {uname}' men = f'{uname} ' if button == "cancelled": sendMessage(men + result, context.bot, update) elif button == "": sendMessage(men + result, context.bot, update) else: sendMarkup(result + cc, context.bot, update, button) try: with download_dict_lock: del download_dict[update.message.message_id] count = len(download_dict) if count == 0: Interval[0].cancel() del Interval[0] delete_all_messages() else: update_all_messages() except IndexError: pass else: sendMessage('Provide G-Drive Shareable Link to Clone.', context.bot, update)
def clean_download(path: str): if os.path.exists(path): LOGGER.info(f"Cleaning download: {path}") shutil.rmtree(path)
def _mirror(bot, update, isZip=False, extract=False): mesg = update.message.text.split("\n") message_args = mesg[0].split(" ") name_args = mesg[0].split("|") try: link = message_args[1] print(link) if link.startswith("|") or link.startswith("pswd: "): link = "" except IndexError: link = "" try: name = name_args[1] name = name.strip() if name.startswith("pswd: "): name = "" except IndexError: name = "" try: ussr = urllib.parse.quote(mesg[1], safe="") pssw = urllib.parse.quote(mesg[2], safe="") except: ussr = "" pssw = "" if ussr != "" and pssw != "": link = link.split("://", maxsplit=1) link = f"{link[0]}://{ussr}:{pssw}@{link[1]}" pswd = re.search("(?<=pswd: )(.*)", update.message.text) if pswd is not None: pswd = pswd.groups() pswd = " ".join(pswd) LOGGER.info(link) link = link.strip() reply_to = update.message.reply_to_message if reply_to is not None: file = None tag = reply_to.from_user.username media_array = [reply_to.document, reply_to.video, reply_to.audio] for i in media_array: if i is not None: file = i break if (not bot_utils.is_url(link) and not bot_utils.is_magnet(link) or len(link) == 0) and file is not None: if file.mime_type != "application/x-bittorrent": listener = MirrorListener(bot, update, pswd, isZip, tag, extract) tg_downloader = TelegramDownloadHelper(listener) tg_downloader.add_download(reply_to, f"{DOWNLOAD_DIR}{listener.uid}/", name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) return else: link = file.get_file().file_path else: tag = None if not bot_utils.is_url(link) and not bot_utils.is_magnet(link): sendMessage("No download source provided", bot, update) return try: link = direct_link_generator(link) except DirectDownloadLinkException as e: LOGGER.info(f"{link}: {e}") listener = MirrorListener(bot, update, pswd, isZip, tag, extract) if bot_utils.is_gdrive_link(link): if not isZip and not extract: sendMessage(f"Use /{BotCommands.CloneCommand} To Copy File/Folder", bot, update) return res, size, name = gdriveTools.GoogleDriveHelper().clonehelper(link) if res != "": sendMessage(res, bot, update) return LOGGER.info(f"Download Name : {name}") drive = gdriveTools.GoogleDriveHelper(name, listener) gid = "".join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) download_status = DownloadStatus(drive, size, listener, gid) with download_dict_lock: download_dict[listener.uid] = download_status if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) sendStatusMessage(update, bot) drive.download(link) elif bot_utils.is_mega_link( link) and MEGA_KEY is not None and not BLOCK_MEGA_LINKS: mega_dl = MegaDownloader(listener) mega_dl.add_download(link, f"{DOWNLOAD_DIR}{listener.uid}/") sendStatusMessage(update, bot) elif bot_utils.is_mega_link(link) and BLOCK_MEGA_LINKS: sendMessage("Mega links are blocked. Dont try to mirror mega links.", bot, update) else: ariaDlManager.add_download(link, f"{DOWNLOAD_DIR}{listener.uid}/", listener, name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.warning( 'Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path : {path}') except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) if size == 0: size = fs_utils.get_path_size(m_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() if name is None: # when pyrogram's media.file_name is of NoneType name = os.listdir(f"{DOWNLOAD_DIR}{self.uid}")[0] m_path = f"{DOWNLOAD_DIR}{self.uid}/{name}" if self.isZip: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = ZipStatus(name, m_path, size) path = fs_utils.zip(name, m_path) except FileNotFoundError: LOGGER.info("File to archive not found!") self.onUploadError("Internal error occurred!!") return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.warning( "Unable to extract archive! Uploading anyway") path = f"{DOWNLOAD_DIR}{self.uid}/{name}" LOGGER.info(f"got path : {path}") except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f"{DOWNLOAD_DIR}{self.uid}/{name}" else: path = f"{DOWNLOAD_DIR}{self.uid}/{name}" up_name = pathlib.PurePath(path).name if up_name == "None": up_name = "".join(os.listdir(f"{DOWNLOAD_DIR}{self.uid}/")) up_path = f"{DOWNLOAD_DIR}{self.uid}/{up_name}" LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def cancel_download(self): LOGGER.info(f'Cancelling download on user request: {self.gid}') self.__is_cancelled = True
"RS5": "<@&806018333517938688>", "RS6": "<@&806018337804910592>", "RS7": "<@&806018340203397140>", "RS8": "<@&806018343696990208>", "RS9": "<@&806018346269016084>", "RS10": "<@&806018349183139890>", "RS11": "<@&806261118158372936>" } }, } if TESTING: LOGGER.debug('Loading testing settings.') RS_GROUPS = RS_GROUPS.get('testing') else: LOGGER.info('Loading PRODUCTION settings.') RS_GROUPS = RS_GROUPS.get('prod') class RSQueue(commands.Cog, name='Queue'): def __init__(self, bot): self.bot = bot self.index = 0 self.check_people.start() self.current_mods = [ "croid", "influence", "nosanc", "notele", "rse", "suppress", "unity", "veng", "barrage" ] self.rs_channel = { "rs5-club": 5, "rs6-club": 6,