def onUploadComplete(self, link: str):
     with download_dict_lock:
         msg = f'<a href="{link}">{download_dict[self.uid].name()}</a> ({download_dict[self.uid].size()})'
         LOGGER.info(f'Done Uploading {download_dict[self.uid].name()}')
         if INDEX_URL is not None:
             share_url = requests.utils.requote_uri(
                 f'{INDEX_URL}/{download_dict[self.uid].name()}')
             if os.path.isdir(
                     f'{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}'
             ):
                 share_url += '/'
             msg += f'\n\n Shareable link: <a href="{share_url}">here</a>'
         if self.tag is not None:
             msg += f'\ncc: @{self.tag}'
         try:
             fs_utils.clean_download(download_dict[self.uid].path())
         except FileNotFoundError:
             pass
         del download_dict[self.uid]
         count = len(download_dict)
     sendMessage(msg, self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
 def onDownloadError(self, error):
     error = error.replace('<', ' ')
     error = error.replace('>', ' ')
     LOGGER.info(self.update.effective_chat.id)
     with download_dict_lock:
         try:
             download = download_dict[self.uid]
             del download_dict[self.uid]
             LOGGER.info(f"Deleting folder: {download.path()}")
             fs_utils.clean_download(download.path())
             LOGGER.info(str(download_dict))
         except Exception as e:
             LOGGER.error(str(e))
             pass
         count = len(download_dict)
     if self.message.from_user.username:
         uname = f"@{self.message.from_user.username}"
     else:
         uname = f'<a href="tg://user?id={self.message.from_user.id}">{self.message.from_user.first_name}</a>'
     msg = f"{uname} your download has been stopped due to: {error}"
     sendMessage(msg, self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            if name is None:  # when pyrogram's media.file_name is of NoneType
                name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0]
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"Extracting : {name} ")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path, )).start()
                    LOGGER.info(f"Deleting archive : {m_path}")
                else:
                    LOGGER.warning(
                        'Unable to extract archive! Uploading anyway')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(f'got path : {path}')

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            if self.root:
                path = f'{DOWNLOAD_DIR}{self.uid}/{self.root}'
            else:
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        LOGGER.info(f"Upload Name : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        if self.root:
            size = fs_utils.get_path_size(path)
        else:
            if size == 0:
                size = fs_utils.get_path_size(m_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Exemple #4
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            if name is None:  # when pyrogram's media.file_name is of NoneType
                name = os.listdir(f"{DOWNLOAD_DIR}{self.uid}")[0]
            m_path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        if self.isZip:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = ZipStatus(name, m_path, size)
                path = fs_utils.zip(name, m_path)
            except FileNotFoundError:
                LOGGER.info("File to archive not found!")
                self.onUploadError("Internal error occurred!!")
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"Extracting : {name} ")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if pswd is not None:
                    archive_result = subprocess.run(["pextract", m_path, pswd])
                else:
                    archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path, )).start()
                    LOGGER.info(f"Deleting archive : {m_path}")
                else:
                    LOGGER.warning(
                        "Unable to extract archive! Uploading anyway")
                    path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
                LOGGER.info(f"got path : {path}")

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        else:
            path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        up_name = pathlib.PurePath(path).name
        if up_name == "None":
            up_name = "".join(os.listdir(f"{DOWNLOAD_DIR}{self.uid}/"))
        up_path = f"{DOWNLOAD_DIR}{self.uid}/{up_name}"
        LOGGER.info(f"Upload Name : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Exemple #5
0
def status_pages(update, context):
    query = update.callback_query
    data = query.data
    data = data.split(' ')
    query.answer()
    done = turn(data)
    if done:
        update_all_messages()
    else:
        query.message.delete()
Exemple #6
0
 def onUploadComplete(self, link: str, size):
     with download_dict_lock:
         msg = f"<b>Filename : </b><code>{download_dict[self.uid].name()}</code>\n<b>Size : </b><code>{size}</code>"
         buttons = button_build.ButtonMaker()
         if SHORTENER is not None and SHORTENER_API is not None:
             surl = requests.get(
                 f"https://{SHORTENER}/api?api={SHORTENER_API}&url={link}&format=text"
             ).text
             buttons.buildbutton("Drive Link", surl)
         else:
             buttons.buildbutton("Drive Link", link)
         LOGGER.info(f"Done Uploading {download_dict[self.uid].name()}")
         if INDEX_URL is not None:
             url_path = requests.utils.quote(
                 f"{download_dict[self.uid].name()}")
             share_url = f"{INDEX_URL}/{url_path}"
             if os.path.isdir(
                     f"{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}"
             ):
                 share_url += "/"
             if SHORTENER is not None and SHORTENER_API is not None:
                 siurl = requests.get(
                     f"https://{SHORTENER}/api?api={SHORTENER_API}&url={share_url}&format=text"
                 ).text
                 buttons.buildbutton("Index Link", siurl)
             else:
                 buttons.buildbutton("Index Link", share_url)
         if BUTTON_THREE_NAME is not None and BUTTON_THREE_URL is not None:
             buttons.buildbutton(f"{BUTTON_THREE_NAME}",
                                 f"{BUTTON_THREE_URL}")
         if BUTTON_FOUR_NAME is not None and BUTTON_FOUR_URL is not None:
             buttons.buildbutton(f"{BUTTON_FOUR_NAME}",
                                 f"{BUTTON_FOUR_URL}")
         if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None:
             buttons.buildbutton(f"{BUTTON_FIVE_NAME}",
                                 f"{BUTTON_FIVE_URL}")
         if self.message.from_user.username:
             uname = f"@{self.message.from_user.username}"
         else:
             uname = f'<a href="tg://user?id={self.message.from_user.id}">{self.message.from_user.first_name}</a>'
         if uname is not None:
             msg += f"\n\ncc : {uname}"
         try:
             fs_utils.clean_download(download_dict[self.uid].path())
         except FileNotFoundError:
             pass
         del download_dict[self.uid]
         count = len(download_dict)
     sendMarkup(msg, self.bot, self.update,
                InlineKeyboardMarkup(buttons.build_menu(2)))
     if count == 0:
         self.clean()
     else:
         update_all_messages()
Exemple #7
0
 def onUploadError(self, error):
     e_str = error.replace("<", "").replace(">", "")
     with download_dict_lock:
         try:
             fs_utils.clean_download(download_dict[self.uid].path())
         except FileNotFoundError:
             pass
         del download_dict[self.message.message_id]
         count = len(download_dict)
     sendMessage(e_str, self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
Exemple #8
0
 def onUploadError(self, error):
     e_str = error.replace('<', '').replace('>', '')
     with download_dict_lock:
         try:
             clean_download(download_dict[self.uid].path())
         except FileNotFoundError:
             pass
         del download_dict[self.message.message_id]
         count = len(download_dict)
     sendMessage(f"{self.tag} {e_str}", self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
Exemple #9
0
 def onDownloadError(self, error):
     error = error.replace('<', ' ').replace('>', ' ')
     with download_dict_lock:
         try:
             download = download_dict[self.uid]
             del download_dict[self.uid]
             clean_download(download.path())
         except Exception as e:
             LOGGER.error(str(e))
         count = len(download_dict)
     msg = f"{self.tag} your download has been stopped due to: {error}"
     sendMessage(msg, self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
Exemple #10
0
def flip(update, context):
    query = update.callback_query
    query.answer()
    global COUNT, PAGE_NO
    if query.data == "nex":
        if PAGE_NO == pages:
            COUNT = 0
            PAGE_NO = 1
        else:
            COUNT += STATUS_LIMIT
            PAGE_NO += 1
    elif query.data == "pre":
        if PAGE_NO == 1:
            COUNT = STATUS_LIMIT * (pages - 1)
            PAGE_NO = pages
        else:
            COUNT -= STATUS_LIMIT
            PAGE_NO -= 1
    message_utils.update_all_messages()
Exemple #11
0
 def __onDownloadComplete(self, api: API, gid):
     LOGGER.info(f"onDownloadComplete: {gid}")
     dl = getDownloadByGid(gid)
     download = api.get_download(gid)
     if download.followed_by_ids:
         new_gid = download.followed_by_ids[0]
         new_download = api.get_download(new_gid)
         if dl is None:
             dl = getDownloadByGid(new_gid)
         with download_dict_lock:
             download_dict[dl.uid()] = AriaDownloadStatus(
                 new_gid, dl.getListener())
             if new_download.is_torrent:
                 download_dict[dl.uid()].is_torrent = True
         update_all_messages()
         LOGGER.info(f"Changed gid from {gid} to {new_gid}")
     elif dl:
         threading.Thread(
             target=dl.getListener().onDownloadComplete).start()
 def add_download(self, mega_link: str, path: str, listener):
     executor = AsyncExecutor()
     api = MegaApi(MEGA_API_KEY, None, None, 'telegram-mirror-bot')
     mega_listener = MegaAppListener(executor.continue_event, listener)
     os.makedirs(path)
     api.addListener(mega_listener)
     executor.do(api.getPublicNode, (mega_link, ))
     node = mega_listener.node
     if node is None:
         executor.do(api.loginToFolder, (mega_link, ))
         node = mega_listener.node
     if mega_listener.error is not None:
         return listener.onDownloadError(str(mega_listener.error))
     mega_listener.setValues(node.getName(), api.getSize(node),
                             mega_link.split("!", 1)[-1].split("!", 1)[0])
     with download_dict_lock:
         download_dict[listener.uid] = MegaDownloadStatus(
             mega_listener, listener)
     threading.Thread(target=executor.do,
                      args=(api.startDownload, (node, path))).start()
     update_all_messages()
 def __onDownloadComplete(self, api: API, gid):
     LOGGER.info(f"onDownloadComplete: {gid}")
     dl = getDownloadByGid(gid)
     download = api.get_download(gid)
     if download.followed_by_ids:
         new_gid = download.followed_by_ids[0]
         new_download = api.get_download(new_gid)
         with download_dict_lock:
             download_dict[dl.uid()] = AriaDownloadStatus(
                 new_gid, dl.getListener(), self)
             if new_download.is_torrent:
                 download_dict[dl.uid()].is_torrent = True
         update_all_messages()
         LOGGER.info(f'Changed gid from {gid} to {new_gid}')
         return
     if dl:
         queue = self.queue_dict[dl.uid()]
         if queue.current_download != queue.queue_length:
             self.__startNextDownload(dl.uid())
             return
         threading.Thread(
             target=dl.getListener().onDownloadComplete).start()
Exemple #14
0
 def add_download(mega_link: str, path: str, listener):
     if MEGA_API_KEY is None:
         raise MegaDownloaderException('Mega API KEY not provided! Cannot mirror mega links')
     executor = AsyncExecutor()
     api = MegaApi(MEGA_API_KEY, None, None, 'telegram-mirror-bot')
     mega_listener = MegaAppListener(executor.continue_event, listener)
     os.makedirs(path)
     api.addListener(mega_listener)
     if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None:
         executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD))
     executor.do(api.getPublicNode, (mega_link,))
     node = mega_listener.node
     if node is None:
         executor.do(api.loginToFolder, (mega_link,))
         node = mega_listener.node
     if mega_listener.error is not None:
         return listener.onDownloadError(str(mega_listener.error))
     gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=8))
     mega_listener.setValues(node.getName(), api.getSize(node), gid)
     with download_dict_lock:
         download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener)
     threading.Thread(target=executor.do, args=(api.startDownload, (node, path))).start()
     update_all_messages()
Exemple #15
0
 def onDownloadComplete(self):
     with download_dict_lock:
         LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
         download = download_dict[self.uid]
         name = str(download.name()).replace('/', '')
         gid = download.gid()
         size = download.size_raw()
         if name == "None" or self.isQbit:
             name = listdir(f'{DOWNLOAD_DIR}{self.uid}')[-1]
         m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     if self.isZip:
         try:
             with download_dict_lock:
                 download_dict[self.uid] = ZipStatus(name, m_path, size)
             pswd = self.pswd
             path = m_path + ".zip"
             LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
             if pswd is not None:
                 if self.isLeech and int(size) > TG_SPLIT_SIZE:
                     path = m_path + ".zip"
                     srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path])
                 else:
                     srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path])
             elif self.isLeech and int(size) > TG_SPLIT_SIZE:
                 path = m_path + ".zip"
                 srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
             else:
                 srun(["7z", "a", "-mx=0", path, m_path])
         except FileNotFoundError:
             LOGGER.info('File to archive not found!')
             self.onUploadError('Internal error occurred!!')
             return
         try:
             rmtree(m_path)
         except:
             osremove(m_path)
     elif self.extract:
         try:
             if ospath.isfile(m_path):
                 path = get_base_name(m_path)
             LOGGER.info(f"Extracting: {name}")
             with download_dict_lock:
                 download_dict[self.uid] = ExtractStatus(name, m_path, size)
             pswd = self.pswd
             if ospath.isdir(m_path):
                 for dirpath, subdir, files in walk(m_path, topdown=False):
                     for file_ in files:
                         if search(r'\.part0*1.rar$', file_) or search(r'\.7z.0*1$', file_) \
                            or (file_.endswith(".rar") and not search(r'\.part\d+.rar$', file_)) \
                            or file_.endswith(".zip") or search(r'\.zip.0*1$', file_):
                             m_path = ospath.join(dirpath, file_)
                             if pswd is not None:
                                 result = srun(["7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}", "-aot"])
                             else:
                                 result = srun(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
                             if result.returncode != 0:
                                 LOGGER.error('Unable to extract archive!')
                     for file_ in files:
                         if file_.endswith(".rar") or search(r'\.r\d+$', file_) \
                            or search(r'\.7z.\d+$', file_) or search(r'\.z\d+$', file_) \
                            or search(r'\.zip.\d+$', file_) or file_.endswith(".zip"):
                             del_path = ospath.join(dirpath, file_)
                             osremove(del_path)
                 path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
             else:
                 if pswd is not None:
                     result = srun(["bash", "pextract", m_path, pswd])
                 else:
                     result = srun(["bash", "extract", m_path])
                 if result.returncode == 0:
                     LOGGER.info(f"Extract Path: {path}")
                     osremove(m_path)
                     LOGGER.info(f"Deleting archive: {m_path}")
                 else:
                     LOGGER.error('Unable to extract archive! Uploading anyway')
                     path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
         except NotSupportedExtractionArchive:
             LOGGER.info("Not any valid archive, uploading file as it is.")
             path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     else:
         path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     up_name = PurePath(path).name
     up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
     size = get_path_size(f'{DOWNLOAD_DIR}{self.uid}')
     if self.isLeech and not self.isZip:
         checked = False
         for dirpath, subdir, files in walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False):
             for file_ in files:
                 f_path = ospath.join(dirpath, file_)
                 f_size = ospath.getsize(f_path)
                 if int(f_size) > TG_SPLIT_SIZE:
                     if not checked:
                         checked = True
                         with download_dict_lock:
                             download_dict[self.uid] = SplitStatus(up_name, up_path, size)
                         LOGGER.info(f"Splitting: {up_name}")
                     fssplit(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE)
                     osremove(f_path)
     if self.isLeech:
         LOGGER.info(f"Leech Name: {up_name}")
         tg = TgUploader(up_name, self)
         tg_upload_status = TgUploadStatus(tg, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = tg_upload_status
         update_all_messages()
         tg.upload()
     else:
         LOGGER.info(f"Upload Name: {up_name}")
         drive = GoogleDriveHelper(up_name, self)
         upload_status = UploadStatus(drive, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = upload_status
         update_all_messages()
         drive.upload(up_name)
Exemple #16
0
    def onUploadComplete(self, link: str, size, files, folders, typ):
        if self.isLeech:
            if self.isQbit and QB_SEED:
                pass
            else:
                with download_dict_lock:
                    try:
                        clean_download(download_dict[self.uid].path())
                    except FileNotFoundError:
                        pass
                    del download_dict[self.uid]
                    dcount = len(download_dict)
                if dcount == 0:
                    self.clean()
                else:
                    update_all_messages()
            count = len(files)
            msg = f'<b>Name: </b><code>{link}</code>\n\n'
            msg += f'<b>Size: </b>{get_readable_file_size(size)}\n'
            msg += f'<b>Total Files: </b>{count}'
            if typ != 0:
                msg += f'\n<b>Corrupted Files: </b>{typ}'
            if self.message.chat.type == 'private':
                sendMessage(msg, self.bot, self.update)
            else:
                chat_id = str(self.message.chat.id)[4:]
                msg += f'\n<b>cc: </b>{self.tag}\n\n'
                fmsg = ''
                for index, item in enumerate(list(files), start=1):
                    msg_id = files[item]
                    link = f"https://t.me/c/{chat_id}/{msg_id}"
                    fmsg += f"{index}. <a href='{link}'>{item}</a>\n"
                    if len(fmsg.encode('utf-8') + msg.encode('utf-8')) > 4000:
                        sleep(2)
                        sendMessage(msg + fmsg, self.bot, self.update)
                        fmsg = ''
                if fmsg != '':
                    sleep(2)
                    sendMessage(msg + fmsg, self.bot, self.update)
            return

        with download_dict_lock:
            msg = f'<b>Name: </b><code>{download_dict[self.uid].name()}</code>\n\n<b>Size: </b>{size}'
            msg += f'\n\n<b>Type: </b>{typ}'
            if ospath.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}'):
                msg += f'\n<b>SubFolders: </b>{folders}'
                msg += f'\n<b>Files: </b>{files}'
            buttons = ButtonMaker()
            link = short_url(link)
            buttons.buildbutton("☁️ Drive Link", link)
            LOGGER.info(f'Done Uploading {download_dict[self.uid].name()}')
            if INDEX_URL is not None:
                url_path = requests.utils.quote(f'{download_dict[self.uid].name()}')
                share_url = f'{INDEX_URL}/{url_path}'
                if ospath.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}'):
                    share_url += '/'
                    share_url = short_url(share_url)
                    buttons.buildbutton("⚡ Index Link", share_url)
                else:
                    share_url = short_url(share_url)
                    buttons.buildbutton("⚡ Index Link", share_url)
                    if VIEW_LINK:
                        share_urls = f'{INDEX_URL}/{url_path}?a=view'
                        share_urls = short_url(share_urls)
                        buttons.buildbutton("🌐 View Link", share_urls)
            if BUTTON_FOUR_NAME is not None and BUTTON_FOUR_URL is not None:
                buttons.buildbutton(f"{BUTTON_FOUR_NAME}", f"{BUTTON_FOUR_URL}")
            if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None:
                buttons.buildbutton(f"{BUTTON_FIVE_NAME}", f"{BUTTON_FIVE_URL}")
            if BUTTON_SIX_NAME is not None and BUTTON_SIX_URL is not None:
                buttons.buildbutton(f"{BUTTON_SIX_NAME}", f"{BUTTON_SIX_URL}")
        msg += f'\n\n<b>cc: </b>{self.tag}'
        if self.isQbit and QB_SEED:
           return sendMarkup(msg, self.bot, self.update, InlineKeyboardMarkup(buttons.build_menu(2)))
        else:
            with download_dict_lock:
                try:
                    clean_download(download_dict[self.uid].path())
                except FileNotFoundError:
                    pass
                del download_dict[self.uid]
                count = len(download_dict)
            sendMarkup(msg, self.bot, self.update, InlineKeyboardMarkup(buttons.build_menu(2)))
            if count == 0:
                self.clean()
            else:
                update_all_messages()
Exemple #17
0
 def __onDownloadStarted(self, api, gid):
     sleep(1)
     LOGGER.info(f"onDownloadStart: {gid}")
     download = api.get_download(gid)
     self.name = download.name
     update_all_messages()
Exemple #18
0
def cloneNode(update, context):
    args = update.message.text.split(" ", maxsplit=1)
    reply_to = update.message.reply_to_message
    if len(args) > 1:
        link = args[1]
        if update.message.from_user.username:
            tag = f"@{update.message.from_user.username}"
        else:
            tag = update.message.from_user.mention_html(
                update.message.from_user.first_name)
    elif reply_to is not None:
        link = reply_to.text
        if reply_to.from_user.username:
            tag = f"@{reply_to.from_user.username}"
        else:
            tag = reply_to.from_user.mention_html(
                reply_to.from_user.first_name)
    else:
        link = ''
    gdtot_link = is_gdtot_link(link)
    if gdtot_link:
        try:
            msg = sendMessage(f"Processing: <code>{link}</code>", context.bot,
                              update)
            link = gdtot(link)
            deleteMessage(context.bot, msg)
        except DirectDownloadLinkException as e:
            deleteMessage(context.bot, msg)
            return sendMessage(str(e), context.bot, update)
    if is_gdrive_link(link):
        gd = GoogleDriveHelper()
        res, size, name, files = gd.helper(link)
        if res != "":
            return sendMessage(res, context.bot, update)
        if STOP_DUPLICATE:
            LOGGER.info('Checking File/Folder if already in Drive...')
            smsg, button = gd.drive_list(name, True, True)
            if smsg:
                msg3 = "File/Folder is already available in Drive.\nHere are the search results:"
                sendMarkup(msg3, context.bot, update, button)
                if gdtot_link:
                    gd.deletefile(link)
                return
        if CLONE_LIMIT is not None:
            LOGGER.info('Checking File/Folder Size...')
            if size > CLONE_LIMIT * 1024**3:
                msg2 = f'Failed, Clone limit is {CLONE_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(size)}.'
                return sendMessage(msg2, context.bot, update)
        if files <= 10:
            msg = sendMessage(f"Cloning: <code>{link}</code>", context.bot,
                              update)
            result, button = gd.clone(link)
            deleteMessage(context.bot, msg)
        else:
            drive = GoogleDriveHelper(name)
            gid = ''.join(random.SystemRandom().choices(string.ascii_letters +
                                                        string.digits,
                                                        k=12))
            clone_status = CloneStatus(drive, size, update, gid)
            with download_dict_lock:
                download_dict[update.message.message_id] = clone_status
            sendStatusMessage(update, context.bot)
            result, button = drive.clone(link)
            with download_dict_lock:
                del download_dict[update.message.message_id]
                count = len(download_dict)
            try:
                if count == 0:
                    Interval[0].cancel()
                    del Interval[0]
                    delete_all_messages()
                else:
                    update_all_messages()
            except IndexError:
                pass
        cc = f'\n\n<b>cc: </b>{tag}'
        if button in ["cancelled", ""]:
            sendMessage(f"{tag} {result}", context.bot, update)
        else:
            sendMarkup(result + cc, context.bot, update, button)
        if gdtot_link:
            gd.deletefile(link)
    else:
        sendMessage(
            'Send Gdrive or gdtot link along with command or by replying to the link by command',
            context.bot, update)
 def __onDownloadStarted(self, api, gid):
     LOGGER.info(f"onDownloadStart: {gid}")
     dl = getDownloadByGid(gid)
     if dl:
         self.queue_dict[dl.uid()].name = api.get_download(gid).name
     update_all_messages()
Exemple #20
0
def _qb_listener(listener, client, gid, ext_hash, select, meta_time, path):
    stalled_time = time()
    uploaded = False
    sizeChecked = False
    dupChecked = False
    rechecked = False
    get_info = 0
    while True:
        sleep(4)
        tor_info = client.torrents_info(torrent_hashes=ext_hash)
        if len(tor_info) == 0:
            with download_dict_lock:
                if listener.uid not in list(download_dict.keys()):
                    client.auth_log_out()
                    break
            get_info += 1
            if get_info > 10:
                client.auth_log_out()
                break
            continue
        get_info = 0
        try:
            tor_info = tor_info[0]
            if tor_info.state == "metaDL":
                stalled_time = time()
                if time(
                ) - meta_time >= 999999999:  # timeout while downloading metadata
                    client.torrents_pause(torrent_hashes=ext_hash)
                    sleep(0.3)
                    listener.onDownloadError("Dead Torrent!")
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == "downloading":
                stalled_time = time()
                if STOP_DUPLICATE and not listener.isLeech and not dupChecked and ospath.isdir(
                        f'{path}'):
                    LOGGER.info('Checking File/Folder if already in Drive')
                    qbname = str(listdir(f'{path}')[-1])
                    if qbname.endswith('.!qB'):
                        qbname = ospath.splitext(qbname)[0]
                    if listener.isZip:
                        qbname = qbname + ".zip"
                    if not listener.extract:
                        qbmsg, button = GoogleDriveHelper().drive_list(
                            qbname, True)
                        if qbmsg:
                            msg = "File/Folder is already available in Drive."
                            client.torrents_pause(torrent_hashes=ext_hash)
                            sleep(0.3)
                            listener.onDownloadError(msg)
                            sendMarkup("Here are the search results:",
                                       listener.bot, listener.update, button)
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                    dupChecked = True
                if not sizeChecked:
                    limit = None
                    if ZIP_UNZIP_LIMIT is not None and (listener.isZip
                                                        or listener.extract):
                        mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
                        limit = ZIP_UNZIP_LIMIT
                    elif TORRENT_DIRECT_LIMIT is not None:
                        mssg = f'Torrent limit is {TORRENT_DIRECT_LIMIT}GB'
                        limit = TORRENT_DIRECT_LIMIT
                    if limit is not None:
                        LOGGER.info('Checking File/Folder Size...')
                        sleep(1)
                        size = tor_info.size
                        if size > limit * 1024**3:
                            client.torrents_pause(torrent_hashes=ext_hash)
                            sleep(0.3)
                            listener.onDownloadError(
                                f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}"
                            )
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                    sizeChecked = True
            elif tor_info.state == "stalledDL":
                if not rechecked and 0.99989999999999999 < tor_info.progress < 1:
                    LOGGER.info(
                        f"Force recheck - Name: {tor_info.name} Hash: {ext_hash} Downloaded Bytes: {tor_info.downloaded} Size: {tor_info.size} Total Size: {tor_info.total_size}"
                    )
                    client.torrents_recheck(torrent_hashes=ext_hash)
                    rechecked = True
                elif time(
                ) - stalled_time >= 999999999:  # timeout after downloading metadata
                    client.torrents_pause(torrent_hashes=ext_hash)
                    sleep(0.3)
                    listener.onDownloadError("Dead Torrent!")
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == "missingFiles":
                client.torrents_recheck(torrent_hashes=ext_hash)
            elif tor_info.state == "error":
                client.torrents_pause(torrent_hashes=ext_hash)
                sleep(0.3)
                listener.onDownloadError(
                    "No enough space for this torrent on device")
                client.torrents_delete(torrent_hashes=ext_hash,
                                       delete_files=True)
                client.auth_log_out()
                break
            elif tor_info.state in [
                    "uploading", "queuedUP", "stalledUP", "forcedUP"
            ] and not uploaded:
                uploaded = True
                if not QB_SEED:
                    client.torrents_pause(torrent_hashes=ext_hash)
                if select:
                    for dirpath, subdir, files in walk(f"{path}",
                                                       topdown=False):
                        for filee in files:
                            if filee.endswith(".!qB") or filee.endswith(
                                    '.parts') and filee.startswith('.'):
                                osremove(ospath.join(dirpath, filee))
                        for folder in subdir:
                            if folder == ".unwanted":
                                rmtree(ospath.join(dirpath, folder))
                    for dirpath, subdir, files in walk(f"{path}",
                                                       topdown=False):
                        if not listdir(dirpath):
                            rmdir(dirpath)
                listener.onDownloadComplete()
                if QB_SEED:
                    with download_dict_lock:
                        if listener.uid not in list(download_dict.keys()):
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                        download_dict[listener.uid] = QbDownloadStatus(
                            listener, client, gid, ext_hash, select)
                    update_all_messages()
                    LOGGER.info(f"Seeding started: {tor_info.name}")
                else:
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == 'pausedUP' and QB_SEED:
                listener.onUploadError(
                    f"Seeding stopped with Ratio: {round(tor_info.ratio, 3)} and Time: {get_readable_time(tor_info.seeding_time)}"
                )
                client.torrents_delete(torrent_hashes=ext_hash,
                                       delete_files=True)
                client.auth_log_out()
                break
        except:
            pass
Exemple #21
0
def _qb_listener(listener, client, ext_hash, select, path):
    stalled_time = time()
    uploaded = False
    sizeChecked = False
    dupChecked = False
    rechecked = False
    while True:
        sleep(4)
        try:
            tor_info = client.torrents_info(torrent_hashes=ext_hash)
            if len(tor_info) == 0:
                with download_dict_lock:
                    if listener.uid not in list(download_dict.keys()):
                        client.auth_log_out()
                        break
                continue
            tor_info = tor_info[0]
            if tor_info.state == "metaDL":
                stalled_time = time()
                if QB_TIMEOUT is not None and time(
                ) - tor_info.added_on >= QB_TIMEOUT:  #timeout while downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "downloading":
                stalled_time = time()
                if STOP_DUPLICATE and not dupChecked and ospath.isdir(
                        f'{path}') and not listener.isLeech:
                    LOGGER.info('Checking File/Folder if already in Drive')
                    qbname = str(listdir(f'{path}')[-1])
                    if qbname.endswith('.!qB'):
                        qbname = ospath.splitext(qbname)[0]
                    if listener.isZip:
                        qbname = qbname + ".zip"
                    elif listener.extract:
                        try:
                            qbname = get_base_name(qbname)
                        except:
                            qbname = None
                    if qbname is not None:
                        qbmsg, button = GoogleDriveHelper().drive_list(
                            qbname, True)
                        if qbmsg:
                            msg = "File/Folder is already available in Drive."
                            _onDownloadError(msg, client, ext_hash, listener)
                            sendMarkup("Here are the search results:",
                                       listener.bot, listener.update, button)
                            break
                    dupChecked = True
                if not sizeChecked:
                    sleep(1)
                    size = tor_info.size
                    arch = any([listener.isZip, listener.extract])
                    if STORAGE_THRESHOLD is not None:
                        acpt = check_storage_threshold(size, arch)
                        if not acpt:
                            msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
                            msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
                            _onDownloadError(msg, client, ext_hash, listener)
                            break
                    limit = None
                    if ZIP_UNZIP_LIMIT is not None and arch:
                        mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
                        limit = ZIP_UNZIP_LIMIT
                    elif TORRENT_DIRECT_LIMIT is not None:
                        mssg = f'Torrent limit is {TORRENT_DIRECT_LIMIT}GB'
                        limit = TORRENT_DIRECT_LIMIT
                    if limit is not None:
                        LOGGER.info('Checking File/Folder Size...')
                        if size > limit * 1024**3:
                            fmsg = f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}"
                            _onDownloadError(fmsg, client, ext_hash, listener)
                            break
                    sizeChecked = True
            elif tor_info.state == "stalledDL":
                if not rechecked and 0.99989999999999999 < tor_info.progress < 1:
                    msg = f"Force recheck - Name: {tor_info.name} Hash: "
                    msg += f"{ext_hash} Downloaded Bytes: {tor_info.downloaded} "
                    msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
                    LOGGER.info(msg)
                    client.torrents_recheck(torrent_hashes=ext_hash)
                    rechecked = True
                elif QB_TIMEOUT is not None and time(
                ) - stalled_time >= QB_TIMEOUT:  # timeout after downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "missingFiles":
                client.torrents_recheck(torrent_hashes=ext_hash)
            elif tor_info.state == "error":
                _onDownloadError("No enough space for this torrent on device",
                                 client, ext_hash, listener)
                break
            elif (tor_info.state.lower().endswith("up")
                  or tor_info.state == "uploading") and not uploaded:
                LOGGER.info(f"onQbDownloadComplete: {ext_hash}")
                uploaded = True
                if not QB_SEED:
                    client.torrents_pause(torrent_hashes=ext_hash)
                if select:
                    clean_unwanted(path)
                listener.onDownloadComplete()
                if QB_SEED and not listener.isLeech and not listener.extract:
                    with download_dict_lock:
                        if listener.uid not in list(download_dict.keys()):
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                        download_dict[listener.uid] = QbDownloadStatus(
                            listener, client, ext_hash, select)
                    update_all_messages()
                    LOGGER.info(f"Seeding started: {tor_info.name}")
                else:
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == 'pausedUP' and QB_SEED:
                listener.onUploadError(
                    f"Seeding stopped with Ratio: {round(tor_info.ratio, 3)} and Time: {get_readable_time(tor_info.seeding_time)}"
                )
                client.torrents_delete(torrent_hashes=ext_hash,
                                       delete_files=True)
                client.auth_log_out()
                update_all_messages()
                break
        except Exception as e:
            LOGGER.error(str(e))