def cloneNode(update, context): args = update.message.text.split(" ", maxsplit=1) if len(args) > 1: link = args[1] gd = gdriveTools.GoogleDriveHelper() res, size, name, files = gd.clonehelper(link) if res != "": sendMessage(res, context.bot, update) return if STOP_DUPLICATE: LOGGER.info('Checking File/Folder if already in Drive...') smsg, button = gd.drive_list(name) if smsg: msg3 = "File/Folder is already available in Drive.\nHere are the search results:" sendMarkup(msg3, context.bot, update, button) return if CLONE_LIMIT is not None: result = check_limit(size, CLONE_LIMIT) if result: msg2 = f'Failed, Clone limit is {CLONE_LIMIT}.\nYour File/Folder size is {get_readable_file_size(size)}.' sendMessage(msg2, context.bot, update) return if files < 15: msg = sendMessage(f"Cloning: <code>{link}</code>", context.bot, update) result, button = gd.clone(link) deleteMessage(context.bot, msg) else: drive = gdriveTools.GoogleDriveHelper(name) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) clone_status = CloneStatus(drive, size, update, gid) with download_dict_lock: download_dict[update.message.message_id] = clone_status sendStatusMessage(update, context.bot) result, button = drive.clone(link) with download_dict_lock: del download_dict[update.message.message_id] count = len(download_dict) try: if count == 0: Interval[0].cancel() del Interval[0] delete_all_messages() else: update_all_messages() except IndexError: pass if update.message.from_user.username: uname = f'@{update.message.from_user.username}' else: uname = f'<a href="tg://user?id={update.message.from_user.id}">{update.message.from_user.first_name}</a>' if uname is not None: cc = f'\n\ncc: {uname}' men = f'{uname} ' if button in ["cancelled", ""]: sendMessage(men + result, context.bot, update) else: sendMarkup(result + cc, context.bot, update, button) else: sendMessage('Provide G-Drive Shareable Link to Clone.', context.bot, update)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() gid = download.gid() size = download.size_raw() if name is None or self.isQbit: # when pyrogram's media.file_name is of NoneType name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0] m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}' if self.isTar: try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.zip( name, m_path) if self.isZip else fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return try: shutil.rmtree(m_path) except: os.remove(m_path) elif self.extract: try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting: {name}") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path)).start() LOGGER.info(f"Deleting archive: {m_path}") else: LOGGER.warning( 'Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path: {path}') except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' LOGGER.info(f"Upload Name: {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, gid, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return else: path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}' up_name = pathlib.PurePath(path).name LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) if size == 0: size = fs_utils.get_path_size(m_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() if name is None: # when pyrogram's media.file_name is of NoneType name = os.listdir(f"{DOWNLOAD_DIR}{self.uid}")[0] m_path = f"{DOWNLOAD_DIR}{self.uid}/{name}" if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info("File to archive not found!") self.onUploadError("Internal error occurred!!") return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.warning( "Unable to extract archive! Uploading anyway") path = f"{DOWNLOAD_DIR}{self.uid}/{name}" LOGGER.info(f"got path : {path}") except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f"{DOWNLOAD_DIR}{self.uid}/{name}" else: path = f"{DOWNLOAD_DIR}{self.uid}/{name}" up_name = pathlib.PurePath(path).name if up_name == "None": up_name = "".join(os.listdir(f"{DOWNLOAD_DIR}{self.uid}/")) up_path = f"{DOWNLOAD_DIR}{self.uid}/{up_name}" LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info(f"✅ : {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info( f"📁🔓 : {name} " ) with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path,)).start() LOGGER.info(f"🗑🔒 : {m_path}") else: LOGGER.warning('Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info( f'got path : {path}' ) except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' if up_name == "None": up_name = "".join(os.listdir(f'{DOWNLOAD_DIR}{self.uid}/')) LOGGER.info(f"📄 : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"File yang kamu download berhasil: {download_dict[self.uid].name()}" ) download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File yang pengen di arsip gk ada') self.onUploadError('Ada masalah internal nih') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"Arsip lagi didelete: {m_path}") else: LOGGER.warning('Nggk bisa diarsip, aku upload aja ya') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path : {path}') except NotSupportedExtractionArchive: LOGGER.info("Arsip yang kamu kirim gk bisa, aku kirim aja ya") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name if up_name == "None": up_name = "".join(os.listdir(f'{DOWNLOAD_DIR}{self.uid}/')) up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' LOGGER.info(f"Bentar lagi upload file yang kamu minta : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info(f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return elif self.extract: download.is_extracting = True path = fs_utils.get_base_name(m_path) if path != "unsupported": LOGGER.info( f"Extracting : {download_dict[self.uid].name()} " ) download_dict[self.uid] = ExtractStatus(name, m_path, size) os.system(f"extract '{m_path}'") if not os.path.exists(path): self.onUploadError("Cannot extract file, check integrity of the file") return LOGGER.info( f'got path : {path}' ) try: os.remove(m_path) LOGGER.info(f"Deleting archive : {m_path}") except Exception as e: LOGGER.error(str(e)) else: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}' up_name = pathlib.PurePath(path).name LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) if size == 0: size = fs_utils.get_path_size(m_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info(f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('<b>🛣️Iɴᴛᴇʀɴᴀʟ Eʀʀᴏʀ Oᴄᴄᴜʀʀᴇᴅ!!</b>') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info( f"Extracting : {name} " ) with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path,)).start() LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.warning('Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info( f'got path : {path}' ) except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"⬢𝗗𝗼𝘄𝗻𝗹𝗼𝗮𝗱 𝗰𝗼𝗺𝗽𝗹𝗲𝘁𝗲𝗱: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('𝗙𝗶𝗹𝗲 𝘁𝗼 𝗮𝗿𝗰𝗵𝗶𝘃𝗲 𝗻𝗼𝘁 𝗳𝗼𝘂𝗻𝗱!') self.onUploadError('Internal error occurred!!') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"⬢𝗘𝘅𝘁𝗿𝗮𝗰𝘁𝗶𝗻𝗴 : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"⬢𝗗𝗲𝗹𝗲𝘁𝗶𝗻𝗴 𝗮𝗿𝗰𝗵𝗶𝘃𝗲 : {m_path}") else: LOGGER.warning( '𝗨𝗻𝗮𝗯𝗹𝗲 𝘁𝗼 𝗲𝘅𝘁𝗿𝗮𝗰𝘁 𝗮𝗿𝗰𝗵𝗶𝘃𝗲! 𝗨𝗽𝗹𝗼𝗮𝗱𝗶𝗻𝗴 𝗮𝗻𝘆𝘄𝗮𝘆') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path : {path}') except NotSupportedExtractionArchive: LOGGER.info("𝗡𝗼𝘁 𝗮𝗻𝘆 𝘃𝗮𝗹𝗶𝗱 𝗮𝗿𝗰𝗵𝗶𝘃𝗲, 𝘂𝗽𝗹𝗼𝗮𝗱𝗶𝗻𝗴 𝗳𝗶𝗹𝗲 𝗮𝘀 𝗶𝘁 𝗶𝘀.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name LOGGER.info(f"⬢𝗨𝗽𝗹𝗼𝗮𝗱 𝗡𝗮𝗺𝗲 : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) if size == 0: size = fs_utils.get_path_size(m_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def deletefile(update, context): msg_args = update.message.text.split(None, 1) msg = '' try: link = msg_args[1] LOGGER.info(msg_args[1]) except IndexError: msg = '𝚂𝚎𝚗𝚍 𝚊 𝚕𝚒𝚗𝚔 𝚊𝚕𝚘𝚗𝚐 𝚠𝚒𝚝𝚑 𝚌𝚘𝚖𝚖𝚊𝚗𝚍😏' if msg == '' : drive = gdriveTools.GoogleDriveHelper() msg = drive.deletefile(link) LOGGER.info(f"DeleteFileCmd : {msg}") reply_message = sendMessage(msg, context.bot, update) threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
def deletefile(update, context): msg_args = update.message.text.split(None, 1) msg = '' try: link = msg_args[1] LOGGER.info(msg_args[1]) except IndexError: msg = 'send a link along with command' if msg == '' : drive = gdriveTools.GoogleDriveHelper() msg = drive.deletefile(link) LOGGER.info(f"this is msg : {msg}") reply_message = sendMessage(msg, context.bot, update) threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
def deletefile(update, context): msg_args = update.message.text.split(None, 1) msg = '' try: link = msg_args[1] LOGGER.info(msg_args[1]) except IndexError: msg = '<b>➼ Sᴇɴᴅ ᴀ Gᴏᴏɢʟᴇ Dʀɪᴠᴇ Lɪɴᴋ Aʟᴏɴɢ Wɪᴛʜ</b> /{BotCommands.deleteCommand} <b>Cᴏᴍᴍᴀɴᴅ Tᴏ ♻️Dᴇʟᴇᴛᴇ Fɪʟᴇ Fʀᴏᴍ G-Dʀɪᴠᴇ Oʀ Tᴇᴀᴍ Dʀɪᴠᴇ</b>' if msg == '': drive = gdriveTools.GoogleDriveHelper() msg = drive.deletefile(link) LOGGER.info(f"this is msg : {msg}") reply_message = sendMessage(msg, context.bot, update) threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
def deletefile(update, context): msg_args = update.message.text.split(None, 1) msg = '' try: link = msg_args[1] LOGGER.info(msg_args[1]) except IndexError: msg = '𝐓𝐨 𝐃𝐞𝐥𝐞𝐭𝐞 𝐅𝐢𝐥𝐞 𝐟𝐫𝐨𝐦 𝐆-𝐃𝐫𝐢𝐯𝐞 𝐔 𝐌𝐮𝐬𝐭 𝐒𝐞𝐧𝐝 𝐚 𝐆𝐨𝐨𝐠𝐥𝐞 𝐃𝐫𝐢𝐯𝐞 𝐋𝐢𝐧𝐤 𝐀𝐥𝐨𝐧𝐠 𝐖𝐢𝐭𝐡 𝐁𝐨𝐭 𝐂𝐨𝐦𝐦𝐚𝐧𝐝' if msg == '': drive = gdriveTools.GoogleDriveHelper() msg = drive.deletefile(link) LOGGER.info(f"this is msg : {msg}") reply_message = sendMessage(msg, context.bot, update) threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
def deletefile(update, context): args = update.message.text.split(" ", maxsplit=1) reply_to = update.message.reply_to_message if len(args) > 1: link = args[1] elif reply_to is not None: link = reply_to.text else: link = '' if is_gdrive_link(link): LOGGER.info(link) drive = gdriveTools.GoogleDriveHelper() msg = drive.deletefile(link) else: msg = 'Send Gdrive link along with command or by replying to the link by command' reply_message = sendMessage(msg, context.bot, update) Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"📥Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to 🗂archive not found!') self.onUploadError('Internal ⚠️error occurred!!') return elif self.extract: if tarfile.is_tarfile(m_path) or zipfile.is_zipfile(m_path): LOGGER.info(f"Extracting : {download_dict[self.uid].name()} ") path = fs_utils.unzip(m_path) LOGGER.info(f'got path : {path}') try: os.remove(m_path) except Exception as e: LOGGER.error(str(e)) pass LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}' up_name = pathlib.PurePath(path).name LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) if size == 0: size = fs_utils.get_path_size(m_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def _mirror(bot, update, isTar=False, extract=False): mesg = update.message.text.split('\n') message_args = mesg[0].split(' ') name_args = mesg[0].split('|') try: link = message_args[1] print(link) if link.startswith("|") or link.startswith("pswd: "): link = '' except IndexError: link = '' try: name = name_args[1] name = name.strip() if name.startswith("pswd: "): name = '' except IndexError: name = '' try: ussr = urllib.parse.quote(mesg[1], safe='') pssw = urllib.parse.quote(mesg[2], safe='') except: ussr = '' pssw = '' if ussr != '' and pssw != '': link = link.split("://", maxsplit=1) link = f'{link[0]}://{ussr}:{pssw}@{link[1]}' pswd = re.search('(?<=pswd: )(.*)', update.message.text) if pswd is not None: pswd = pswd.groups() pswd = " ".join(pswd) LOGGER.info(link) link = link.strip() reply_to = update.message.reply_to_message if reply_to is not None: file = None tag = reply_to.from_user.username media_array = [reply_to.document, reply_to.video, reply_to.audio] for i in media_array: if i is not None: file = i break if not bot_utils.is_url(link) and not bot_utils.is_magnet(link) or len( link) == 0: if file is not None: if file.mime_type != "application/x-bittorrent": listener = MirrorListener(bot, update, pswd, isTar, tag, extract) tg_downloader = TelegramDownloadHelper(listener) tg_downloader.add_download( reply_to, f'{DOWNLOAD_DIR}{listener.uid}/', name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) return else: link = file.get_file().file_path else: tag = None if not bot_utils.is_url(link) and not bot_utils.is_magnet(link): sendMessage('No download source provided', bot, update) return try: link = direct_link_generator(link) except DirectDownloadLinkException as e: LOGGER.info(f'{link}: {e}') listener = MirrorListener(bot, update, pswd, isTar, tag, extract) if bot_utils.is_gdrive_link(link): if not isTar and not extract: sendMessage(f"Use /{BotCommands.CloneCommand} To Copy File/Folder", bot, update) return res, size, name = gdriveTools.GoogleDriveHelper().clonehelper(link) if res != "": sendMessage(res, bot, update) return LOGGER.info(f"Download Name : {name}") drive = gdriveTools.GoogleDriveHelper(name, listener) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) download_status = DownloadStatus(drive, size, listener, gid) with download_dict_lock: download_dict[listener.uid] = download_status if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) sendStatusMessage(update, bot) drive.download(link) elif bot_utils.is_mega_link(link) and MEGA_KEY is not None: if BLOCK_MEGA_LINKS: sendMessage("Mega Links Are Blocked.", bot, update) else: mega_dl = MegaDownloader(listener) mega_dl.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/') sendStatusMessage(update, bot) else: ariaDlManager.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
def _mirror(bot, update, isTar=False, extract=False): mesg = update.message.text.split('\n') message_args = mesg[0].split(' ') name_args = mesg[0].split('|') try: link = message_args[1] print(link) if link.startswith("|") or link.startswith("pswd: "): link = '' except IndexError: link = '' try: name = name_args[1] name = name.strip() if name.startswith("pswd: "): name = '' except IndexError: name = '' try: ussr = urllib.parse.quote(mesg[1], safe='') pssw = urllib.parse.quote(mesg[2], safe='') except: ussr = '' pssw = '' if ussr != '' and pssw != '': link = link.split("://", maxsplit=1) link = f'{link[0]}://{ussr}:{pssw}@{link[1]}' pswd = re.search('(?<=pswd: )(.*)', update.message.text) if pswd is not None: pswd = pswd.groups() pswd = " ".join(pswd) LOGGER.info(link) link = link.strip() reply_to = update.message.reply_to_message if reply_to is not None: file = None tag = reply_to.from_user.username media_array = [reply_to.document, reply_to.video, reply_to.audio] for i in media_array: if i is not None: file = i break if not bot_utils.is_url(link) and not bot_utils.is_magnet(link) or len( link) == 0: if file is not None: if file.mime_type != "application/x-bittorrent": listener = MirrorListener(bot, update, pswd, isTar, tag, extract) tg_downloader = TelegramDownloadHelper(listener) tg_downloader.add_download( reply_to, f'{DOWNLOAD_DIR}{listener.uid}/', name) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) return else: link = file.get_file().file_path else: tag = None if not bot_utils.is_url(link) and not bot_utils.is_magnet(link): sendMessage('No download source provided', bot, update) return try: link = direct_link_generator(link) except DirectDownloadLinkException as e: LOGGER.info(f'{link}: {e}') if "ERROR:" in str(e): sendMessage(f"{e}", bot, update) return if "Youtube" in str(e): sendMessage(f"{e}", bot, update) return listener = MirrorListener(bot, update, pswd, isTar, tag, extract) if bot_utils.is_gdrive_link(link): if not isTar and not extract: sendMessage( f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\nUse /{BotCommands.TarMirrorCommand} to make tar of Google Drive folder\nUse /{BotCommands.UnzipMirrorCommand} to extracts archive Google Drive file", bot, update) return res, size, name = gdriveTools.GoogleDriveHelper().clonehelper(link) if res != "": sendMessage(res, bot, update) return if TAR_UNZIP_LIMIT is not None: LOGGER.info(f'Checking File/Folder Size') limit = TAR_UNZIP_LIMIT limit = limit.split(' ', maxsplit=1) limitint = int(limit[0]) msg = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(size)}.' if 'G' in limit[1] or 'g' in limit[1]: if size > limitint * 1024**3: sendMessage(msg, listener.bot, listener.update) return elif 'T' in limit[1] or 't' in limit[1]: if size > limitint * 1024**4: sendMessage(msg, listener.bot, listener.update) return LOGGER.info(f"Download Name : {name}") drive = gdriveTools.GoogleDriveHelper(name, listener) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) download_status = DownloadStatus(drive, size, listener, gid) with download_dict_lock: download_dict[listener.uid] = download_status if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) sendStatusMessage(update, bot) drive.download(link) elif bot_utils.is_mega_link(link): link_type = get_mega_link_type(link) if link_type == "folder" and BLOCK_MEGA_FOLDER: sendMessage("Mega folder are blocked!", bot, update) elif BLOCK_MEGA_LINKS: sendMessage("Mega links are blocked!", bot, update) else: mega_dl = MegaDownloadHelper() mega_dl.add_download(link, f'{DOWNLOAD_DIR}/{listener.uid}/', listener) else: ariaDlManager.add_download(link, f'{DOWNLOAD_DIR}/{listener.uid}/', listener, name) sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False): mesg = update.message.text.split('\n') message_args = mesg[0].split(' ') name_args = mesg[0].split('|') qbitsel = False try: link = message_args[1] if link in ["qb", "qbs"]: isQbit = True if link == "qbs": qbitsel = True link = message_args[2] if bot_utils.is_url(link) and not bot_utils.is_magnet(link): resp = requests.get(link) if resp.status_code == 200: file_name = str(time.time()).replace(".", "") + ".torrent" with open(file_name, "wb") as f: f.write(resp.content) link = f"{file_name}" else: sendMessage( "ERROR: link got HTTP response:" + resp.status_code, bot, update) return if link.startswith("|") or link.startswith("pswd: "): link = '' except IndexError: link = '' try: name = name_args[1] name = name.strip() if name.startswith("pswd: "): name = '' except IndexError: name = '' try: ussr = urllib.parse.quote(mesg[1], safe='') pssw = urllib.parse.quote(mesg[2], safe='') except: ussr = '' pssw = '' if ussr != '' and pssw != '': link = link.split("://", maxsplit=1) link = f'{link[0]}://{ussr}:{pssw}@{link[1]}' pswd = re.search('(?<=pswd: )(.*)', update.message.text) if pswd is not None: pswd = pswd.groups() pswd = " ".join(pswd) if link != '': LOGGER.info(link) link = link.strip() reply_to = update.message.reply_to_message if reply_to is not None: file = None media_array = [reply_to.document, reply_to.video, reply_to.audio] for i in media_array: if i is not None: file = i break if (not bot_utils.is_url(link) and not bot_utils.is_magnet(link) or len(link) == 0) and file is not None: if isQbit: file_name = str(time.time()).replace(".", "") + ".torrent" file.get_file().download(custom_path=f"{file_name}") link = f"{file_name}" elif file.mime_type != "application/x-bittorrent": listener = MirrorListener(bot, update, pswd, isTar, extract, isZip) tg_downloader = TelegramDownloadHelper(listener) ms = update.message tg_downloader.add_download(ms, f'{DOWNLOAD_DIR}{listener.uid}/', name) return else: link = file.get_file().file_path if not bot_utils.is_url(link) and not bot_utils.is_magnet(link): sendMessage('No download source provided', bot, update) return if not os.path.exists(link) and not bot_utils.is_mega_link( link) and not bot_utils.is_gdrive_link( link) and not bot_utils.is_magnet(link): try: link = direct_link_generator(link) except DirectDownloadLinkException as e: LOGGER.info(e) if "ERROR:" in str(e): sendMessage(f"{e}", bot, update) return if "Youtube" in str(e): sendMessage(f"{e}", bot, update) return listener = MirrorListener(bot, update, pswd, isTar, extract, isZip, isQbit) if bot_utils.is_gdrive_link(link): if not isTar and not extract: sendMessage( f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\nUse /{BotCommands.TarMirrorCommand} to make tar of Google Drive folder\nUse /{BotCommands.UnzipMirrorCommand} to extracts archive Google Drive file", bot, update) return res, size, name, files = gdriveTools.GoogleDriveHelper().clonehelper( link) if res != "": sendMessage(res, bot, update) return if TAR_UNZIP_LIMIT is not None: result = bot_utils.check_limit(size, TAR_UNZIP_LIMIT) if result: msg = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(size)}.' sendMessage(msg, listener.bot, listener.update) return LOGGER.info(f"Download Name : {name}") drive = gdriveTools.GoogleDriveHelper(name, listener) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) download_status = DownloadStatus(drive, size, listener, gid) with download_dict_lock: download_dict[listener.uid] = download_status sendStatusMessage(update, bot) drive.download(link) elif bot_utils.is_mega_link(link): if BLOCK_MEGA_LINKS: sendMessage("Mega links are blocked!", bot, update) return link_type = bot_utils.get_mega_link_type(link) if link_type == "folder" and BLOCK_MEGA_FOLDER: sendMessage("Mega folder are blocked!", bot, update) else: mega_dl = MegaDownloadHelper() mega_dl.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener) elif isQbit and (bot_utils.is_magnet(link) or os.path.exists(link)): qbit = qbittorrent() qbit.add_torrent(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, qbitsel) else: ariaDlManager.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, name) sendStatusMessage(update, bot)
def cloneNode(update, context): args = update.message.text.split(" ", maxsplit=1) if len(args) > 1: link = args[1] gd = gdriveTools.GoogleDriveHelper() res, clonesize, name, files = gd.clonehelper(link) if res != "": sendMessage(res, context.bot, update) return if STOP_DUPLICATE_CLONE: LOGGER.info(f"Checking File/Folder if already in Drive...") smsg, button = gd.drive_list(name) if smsg: msg3 = "File/Folder is already available in Drive.\nHere are the search results:" sendMarkup(msg3, context.bot, update, button) return if CLONE_LIMIT is not None: LOGGER.info(f"Checking File/Folder Size...") limit = CLONE_LIMIT limit = limit.split(' ', maxsplit=1) limitint = int(limit[0]) msg2 = f'Failed, Clone limit is {CLONE_LIMIT}.\nYour File/Folder size is {get_readable_file_size(clonesize)}.' if 'G' in limit[1] or 'g' in limit[1]: if clonesize > limitint * 1024**3: sendMessage(msg2, context.bot, update) return elif 'T' in limit[1] or 't' in limit[1]: if clonesize > limitint * 1024**4: sendMessage(msg2, context.bot, update) return if files < 15: msg = sendMessage(f"Cloning: <code>{link}</code>", context.bot, update) result, button = gd.clone(link) deleteMessage(context.bot, msg) else: drive = gdriveTools.GoogleDriveHelper(name) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) clone_status = CloneStatus(drive, clonesize, update, gid) with download_dict_lock: download_dict[update.message.message_id] = clone_status if len(Interval) == 0: Interval.append( setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) sendStatusMessage(update, context.bot) result, button = drive.clone(link) if update.message.from_user.username: uname = f'@{update.message.from_user.username}' else: uname = f'<a href="tg://user?id={update.message.from_user.id}">{update.message.from_user.first_name}</a>' if uname is not None: cc = f'\n\ncc: {uname}' men = f'{uname} ' if button == "cancelled": sendMessage(men + result, context.bot, update) elif button == "": sendMessage(men + result, context.bot, update) else: sendMarkup(result + cc, context.bot, update, button) try: with download_dict_lock: del download_dict[update.message.message_id] count = len(download_dict) if count == 0: Interval[0].cancel() del Interval[0] delete_all_messages() else: update_all_messages() except IndexError, KeyError: pass
def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = f"{download.name()}".replace('/', '') gid = download.gid() size = download.size_raw() if name == "None" or self.isQbit: # when pyrogram's media.file_name is of NoneType name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0] m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}' if self.isTar: try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) if self.isZip: pswd = self.pswd path = m_path + ".zip" LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}') if pswd is not None: subprocess.run(["7z", "a", f"-p{pswd}", path, m_path]) else: subprocess.run(["7z", "a", path, m_path]) else: path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return try: shutil.rmtree(m_path) except: os.remove(m_path) elif self.extract: try: LOGGER.info(f"Extracting: {name}") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if os.path.isdir(m_path): for dirpath, subdir, files in os.walk(m_path, topdown=False): for file in files: suffixes = (".part1.rar", ".part01.rar", ".part001.rar", ".part0001.rar") if (file.endswith(".rar") and "part" not in file) or file.endswith(suffixes): m_path = os.path.join(dirpath, file) if pswd is not None: result = subprocess.run([ "7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}" ]) else: result = subprocess.run( ["7z", "x", m_path, f"-o{dirpath}"]) if result.returncode != 0: LOGGER.warning( 'Unable to extract archive!') break for file in files: if file.endswith(".rar") or fnmatch( file, "*.r[0-9]") or fnmatch( file, "*.r[0-9]*"): del_path = os.path.join(dirpath, file) os.remove(del_path) path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = fs_utils.get_base_name(m_path) if pswd is not None: result = subprocess.run(["pextract", m_path, pswd]) else: result = subprocess.run(["extract", m_path]) if result.returncode == 0: os.remove(m_path) LOGGER.info(f"Deleting archive: {m_path}") else: LOGGER.warning( 'Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path: {path}') except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' size = fs_utils.get_path_size(up_path) if self.isLeech: checked = False for dirpath, subdir, files in os.walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False): for file in files: f_path = os.path.join(dirpath, file) f_size = os.path.getsize(f_path) if int(f_size) > TG_SPLIT_SIZE: if not checked: checked = True with download_dict_lock: download_dict[self.uid] = SplitStatus( up_name, up_path, size) LOGGER.info(f"Splitting: {up_name}") fs_utils.split(f_path, f_size, file, dirpath, TG_SPLIT_SIZE) os.remove(f_path) LOGGER.info(f"Leech Name: {up_name}") tg = pyrogramEngine.TgUploader(up_name, self) tg_upload_status = TgUploadStatus(tg, size, gid, self) with download_dict_lock: download_dict[self.uid] = tg_upload_status update_all_messages() tg.upload() else: LOGGER.info(f"Upload Name: {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) upload_status = UploadStatus(drive, size, gid, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)