def sendStatusMessage(msg, bot):
    progress = get_readable_message()
    progress += f"<b>CPU:</b> {psutil.cpu_percent()}%" \
           f" <b>DISK:</b> {psutil.disk_usage('/').percent}%" \
           f" <b>RAM:</b> {psutil.virtual_memory().percent}%"
    with download_dict_lock:
        dlspeed_bytes = 0
        uldl_bytes = 0
        for download in list(download_dict.values()):
            speedy = download.speed()
            if download.status() == MirrorStatus.STATUS_DOWNLOADING:
                if 'KiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
            if download.status() == MirrorStatus.STATUS_UPLOADING:
                if 'KB/s' in speedy:
                    uldl_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MB/s' in speedy:
                    uldl_bytes += float(speedy.split('M')[0]) * 1048576
        dlspeed = get_readable_file_size(dlspeed_bytes)
        ulspeed = get_readable_file_size(uldl_bytes)
        progress += f"\n<b>DL:</b>{dlspeed}ps 🔻| <b>UL:</b>{ulspeed}ps 🔺\n"
    with status_reply_dict_lock:
        if msg.message.chat.id in list(status_reply_dict.keys()):
            try:
                message = status_reply_dict[msg.message.chat.id]
                deleteMessage(bot, message)
                del status_reply_dict[msg.message.chat.id]
            except Exception as e:
                LOGGER.error(str(e))
                del status_reply_dict[msg.message.chat.id]
                pass
        if len(progress) == 0:
            progress = "Starting DL"
        message = sendMessage(progress, bot, msg)
        status_reply_dict[msg.message.chat.id] = message
Esempio n. 2
0
def update_all_messages():
    msg = get_readable_message()
    with download_dict_lock:
        dlspeed_bytes = 0
        uldl_bytes = 0
        for download in list(download_dict.values()):
            speedy = download.speed()
            if download.status() == MirrorStatus.STATUS_DOWNLOADING:
                if 'K' in speedy:
                    dlspeed_bytes += float(speedy.split('K')[0]) * 1024
                elif 'M' in speedy:
                    dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
            if download.status() == MirrorStatus.STATUS_UPLOADING:
                if 'KB/s' in speedy:
                    uldl_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MB/s' in speedy:
                    uldl_bytes += float(speedy.split('M')[0]) * 1048576
        dlspeed = get_readable_file_size(dlspeed_bytes)
        ulspeed = get_readable_file_size(uldl_bytes)
        msg += f"<b>DL:</b> {dlspeed}ps 🔻 | <b>UL:</b> {ulspeed}ps 🔺"
    with status_reply_dict_lock:
        for chat_id in list(status_reply_dict.keys()):
            if status_reply_dict[
                    chat_id] and msg != status_reply_dict[chat_id].text:
                if len(msg) == 0:
                    msg = "Starting DL"
                try:
                    keyboard = [[
                        InlineKeyboardButton("♻️ Stats",
                                             callback_data="stats_")
                    ]]
                    editMessage(msg,
                                status_reply_dict[chat_id],
                                reply_markup=InlineKeyboardMarkup(keyboard))
                except Exception as e:
                    LOGGER.error(str(e))
                status_reply_dict[chat_id].text = msg
Esempio n. 3
0
def update_all_messages():
    total, used, free = shutil.disk_usage('.')
    used = get_readable_file_size(used)
    free = get_readable_file_size(free)
    msg = get_readable_message()
    msg += f"<b>📟𝙲𝙿𝚄:</b> {psutil.cpu_percent()}%" \
           f" <b>💾𝚁𝙰𝙼:</b> {psutil.virtual_memory().percent}%" \
           f" <b>💿𝙳𝙸𝚂𝙺:</b> {psutil.disk_usage('/').percent}%"
    with download_dict_lock:
        dlspeed_bytes = 0
        uldl_bytes = 0
        for download in list(download_dict.values()):
            speedy = download.speed()
            if download.status() == MirrorStatus.STATUS_DOWNLOADING:
                if 'KiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
            if download.status() == MirrorStatus.STATUS_UPLOADING:
                if 'KB/s' in speedy:
                    uldl_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MB/s' in speedy:
                    uldl_bytes += float(speedy.split('M')[0]) * 1048576
        dlspeed = get_readable_file_size(dlspeed_bytes)
        ulspeed = get_readable_file_size(uldl_bytes)
        msg += f"\n<b>📮𝚄𝚂𝙴𝙳8:</b> {used} | <b>🏮𝙵𝚁𝙴𝙴:</b> {free}\n<b>𝗗𝗢𝗪𝗡:</b> {dlspeed}ps 🔻 | <b>𝗨𝗣:</b> {ulspeed}ps 🔺\n"
    with status_reply_dict_lock:
        for chat_id in list(status_reply_dict.keys()):
            if status_reply_dict[
                    chat_id] and msg != status_reply_dict[chat_id].text:
                if len(msg) == 0:
                    msg = "Starting DL"
                try:
                    editMessage(msg, status_reply_dict[chat_id])
                except Exception as e:
                    LOGGER.error(str(e))
                status_reply_dict[chat_id].text = msg
Esempio n. 4
0
 def onDownloadError(self, error):
     error = error.replace('<', ' ')
     error = error.replace('>', ' ')
     LOGGER.info(self.update.effective_chat.id)
     with download_dict_lock:
         try:
             download = download_dict[self.uid]
             del download_dict[self.uid]
             LOGGER.info(f"Deleting folder: {download.path()}")
             fs_utils.clean_download(download.path())
             LOGGER.info(str(download_dict))
         except Exception as e:
             LOGGER.error(str(e))
         count = len(download_dict)
     if self.message.from_user.username:
         uname = f"@{self.message.from_user.username}"
     else:
         uname = f'<a href="tg://user?id={self.message.from_user.id}">{self.message.from_user.first_name}</a>'
     msg = f"{uname} your download has been stopped due to: {error}"
     sendMessage(msg, self.bot, self.update)
     if count == 0:
         self.clean()
     else:
         update_all_messages()
Esempio n. 5
0
def shellExecute(bot: Bot, update: Update):
    cmd = update.message.text.split(' ',maxsplit=1)
    if len(cmd) == 1:
        sendMessage("No command provided!", bot, update)
        return
    LOGGER.info(cmd)
    output = shell(cmd[1])
    if output[1].decode():
        LOGGER.error(f"Shell: {output[1].decode()}")
    if len(output[0].decode()) > 4000:
        with open("shell.txt",'w') as f:
            f.write(f"Output\n-----------\n{output[0].decode()}\n")
            if output[1]:
                f.write(f"STDError\n-----------\n{output[1].decode()}\n")
        with open("shell.txt",'rb') as f:
            bot.send_document(document=f, filename=f.name,
                                  reply_to_message_id=update.message.message_id,
                                  chat_id=update.message.chat_id)  
    else:
        if output[1].decode():
            sendMessage(f"<code>{output[1].decode()}</code>", bot, update)
            return
        else:
            sendMessage(f"<code>{output[0].decode()}</code>", bot, update)
Esempio n. 6
0
def update_all_messages():
    msg = get_readable_message()
    msg += f"<b>📊 Performance Meter 📊</b>\n\n" \
           f"<b>🖥️ CPU  : {psutil.cpu_percent()}%</b>\n" \
           f"<b>🗃️ DISK : {psutil.disk_usage('/').percent}%</b>\n" \
           f"<b>⚙️ RAM : {psutil.virtual_memory().percent}%</b>"
    with download_dict_lock:
        dlspeed_bytes = 0
        uldl_bytes = 0
        for download in list(download_dict.values()):
            speedy = download.speed()
            if download.status() == MirrorStatus.STATUS_DOWNLOADING:
                if 'KiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MiB/s' in speedy:
                    dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
            if download.status() == MirrorStatus.STATUS_UPLOADING:
                if 'KB/s' in speedy:
                    uldl_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MB/s' in speedy:
                    uldl_bytes += float(speedy.split('M')[0]) * 1048576
        dlspeed = get_readable_file_size(dlspeed_bytes)
        ulspeed = get_readable_file_size(uldl_bytes)
        msg += f"<b>\n\n⚡️ Speed Meter ⚡️</b>\n" \
                    f"<b>D : {dlspeed}/s 🔻 </b> | <b>U : {ulspeed}/s 🔺</b>"
    with status_reply_dict_lock:
        for chat_id in list(status_reply_dict.keys()):
            if status_reply_dict[
                    chat_id] and msg != status_reply_dict[chat_id].text:
                if len(msg) == 0:
                    msg = "Starting DL"
                try:
                    editMessage(msg, status_reply_dict[chat_id])
                except Exception as e:
                    LOGGER.error(str(e))
                status_reply_dict[chat_id].text = msg
Esempio n. 7
0
def shellExecute(bot: Bot, update: Update):
    cmd = update.message.text.split(' ',maxsplit=1)
    if len(cmd) == 1:
        sendMessage("Heç bir əmr verilməyib!", bot, update)
        return
    LOGGER.info(cmd)
    output = shell(cmd[1])
    if output[1].decode():
        LOGGER.error(f"Qabıq: {output[1].decode()}")
    if len(output[0].decode()) > 4000:
        with open("qabıq.txt",'w') as f:
            f.write(f"Çıxış\n-----------\n{output[0].decode()}\n")
            if output[1]:
                f.write(f"STDXəta\n-----------\n{output[1].decode()}\n")
        with open("qabıq.txt",'rb') as f:
            bot.send_document(document=f, filename=f.name,
                                  reply_to_message_id=update.message.message_id,
                                  chat_id=update.message.chat_id)  
    else:
        if output[1].decode():
            sendMessage(f"<code>{output[1].decode()}</code>", bot, update)
            return
        else:
            sendMessage(f"<code>{output[0].decode()}</code>", bot, update)
Esempio n. 8
0
def update_all_messages():
    total, used, free = shutil.disk_usage('.')
    free = get_readable_file_size(free)
    currentTime = get_readable_time(time.time() - botStartTime)
    msg = get_readable_message()
    msg += f"<b>CPU:</b> {psutil.cpu_percent()}%" \
           f" <b>RAM:</b> {psutil.virtual_memory().percent}%" \
           f" <b>DISK:</b> {psutil.disk_usage('/').percent}%"
    with download_dict_lock:
        dlspeed_bytes = 0
        uldl_bytes = 0
        for download in list(download_dict.values()):
            speedy = download.speed()
            if download.status() == MirrorStatus.STATUS_DOWNLOADING:
                if 'K' in speedy:
                    dlspeed_bytes += float(speedy.split('K')[0]) * 1024
                elif 'M' in speedy:
                    dlspeed_bytes += float(speedy.split('M')[0]) * 1048576 
            if download.status() == MirrorStatus.STATUS_UPLOADING:
                if 'KB/s' in speedy:
            	    uldl_bytes += float(speedy.split('K')[0]) * 1024
                elif 'MB/s' in speedy:
                    uldl_bytes += float(speedy.split('M')[0]) * 1048576
        dlspeed = get_readable_file_size(dlspeed_bytes)
        ulspeed = get_readable_file_size(uldl_bytes)
        msg += f"\n<b>FREE:</b> {free} | <b>UPTIME:</b> {currentTime}\n<b>DL:</b> {dlspeed}ps 🔻 | <b>UL:</b> {ulspeed}ps 🔺\n"
    with status_reply_dict_lock:
        for chat_id in list(status_reply_dict.keys()):
            if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id].text:
                if len(msg) == 0:
                    msg = "Starting DL"
                try:
                    editMessage(msg, status_reply_dict[chat_id])
                except Exception as e:
                    LOGGER.error(str(e))
                status_reply_dict[chat_id].text = msg
Esempio n. 9
0
 def onDownloadComplete(self):
     with download_dict_lock:
         LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
         download = download_dict[self.uid]
         name = str(download.name()).replace('/', '')
         gid = download.gid()
         size = download.size_raw()
         if name == "None" or self.isQbit:
             name = listdir(f'{DOWNLOAD_DIR}{self.uid}')[-1]
         m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     if self.isZip:
         try:
             with download_dict_lock:
                 download_dict[self.uid] = ZipStatus(name, m_path, size)
             pswd = self.pswd
             path = m_path + ".zip"
             LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
             if pswd is not None:
                 if self.isLeech and int(size) > TG_SPLIT_SIZE:
                     path = m_path + ".zip"
                     srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path])
                 else:
                     srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path])
             elif self.isLeech and int(size) > TG_SPLIT_SIZE:
                 path = m_path + ".zip"
                 srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
             else:
                 srun(["7z", "a", "-mx=0", path, m_path])
         except FileNotFoundError:
             LOGGER.info('File to archive not found!')
             self.onUploadError('Internal error occurred!!')
             return
         try:
             rmtree(m_path)
         except:
             osremove(m_path)
     elif self.extract:
         try:
             if ospath.isfile(m_path):
                 path = get_base_name(m_path)
             LOGGER.info(f"Extracting: {name}")
             with download_dict_lock:
                 download_dict[self.uid] = ExtractStatus(name, m_path, size)
             pswd = self.pswd
             if ospath.isdir(m_path):
                 for dirpath, subdir, files in walk(m_path, topdown=False):
                     for file_ in files:
                         if search(r'\.part0*1.rar$', file_) or search(r'\.7z.0*1$', file_) \
                            or (file_.endswith(".rar") and not search(r'\.part\d+.rar$', file_)) \
                            or file_.endswith(".zip") or search(r'\.zip.0*1$', file_):
                             m_path = ospath.join(dirpath, file_)
                             if pswd is not None:
                                 result = srun(["7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}", "-aot"])
                             else:
                                 result = srun(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
                             if result.returncode != 0:
                                 LOGGER.error('Unable to extract archive!')
                     for file_ in files:
                         if file_.endswith(".rar") or search(r'\.r\d+$', file_) \
                            or search(r'\.7z.\d+$', file_) or search(r'\.z\d+$', file_) \
                            or search(r'\.zip.\d+$', file_) or file_.endswith(".zip"):
                             del_path = ospath.join(dirpath, file_)
                             osremove(del_path)
                 path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
             else:
                 if pswd is not None:
                     result = srun(["bash", "pextract", m_path, pswd])
                 else:
                     result = srun(["bash", "extract", m_path])
                 if result.returncode == 0:
                     LOGGER.info(f"Extract Path: {path}")
                     osremove(m_path)
                     LOGGER.info(f"Deleting archive: {m_path}")
                 else:
                     LOGGER.error('Unable to extract archive! Uploading anyway')
                     path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
         except NotSupportedExtractionArchive:
             LOGGER.info("Not any valid archive, uploading file as it is.")
             path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     else:
         path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     up_name = PurePath(path).name
     up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
     size = get_path_size(f'{DOWNLOAD_DIR}{self.uid}')
     if self.isLeech and not self.isZip:
         checked = False
         for dirpath, subdir, files in walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False):
             for file_ in files:
                 f_path = ospath.join(dirpath, file_)
                 f_size = ospath.getsize(f_path)
                 if int(f_size) > TG_SPLIT_SIZE:
                     if not checked:
                         checked = True
                         with download_dict_lock:
                             download_dict[self.uid] = SplitStatus(up_name, up_path, size)
                         LOGGER.info(f"Splitting: {up_name}")
                     fssplit(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE)
                     osremove(f_path)
     if self.isLeech:
         LOGGER.info(f"Leech Name: {up_name}")
         tg = TgUploader(up_name, self)
         tg_upload_status = TgUploadStatus(tg, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = tg_upload_status
         update_all_messages()
         tg.upload()
     else:
         LOGGER.info(f"Upload Name: {up_name}")
         drive = GoogleDriveHelper(up_name, self)
         upload_status = UploadStatus(drive, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = upload_status
         update_all_messages()
         drive.upload(up_name)
Esempio n. 10
0
def _mirror(bot, update, isZip=False, extract=False, isQbit=False, isLeech=False, pswd=None):
    mesg = update.message.text.split('\n')
    message_args = mesg[0].split(' ', maxsplit=1)
    name_args = mesg[0].split('|', maxsplit=1)
    qbitsel = False
    try:
        link = message_args[1]
        if link.startswith("s ") or link == "s":
            qbitsel = True
            message_args = mesg[0].split(' ', maxsplit=2)
            link = message_args[2].strip()
        if link.startswith("|") or link.startswith("pswd: "):
            link = ''
    except IndexError:
        link = ''
    try:
        name = name_args[1]
        name = name.split(' pswd: ')[0]
        name = name.strip()
    except IndexError:
        name = ''
    link = resplit(r"pswd:| \|", link)[0]
    link = link.strip()
    pswdMsg = mesg[0].split(' pswd: ')
    if len(pswdMsg) > 1:
        pswd = pswdMsg[1]

    if update.message.from_user.username:
        tag = f"@{update.message.from_user.username}"
    else:
        tag = update.message.from_user.mention_html(update.message.from_user.first_name)

    reply_to = update.message.reply_to_message
    if reply_to is not None:
        file = None
        media_array = [reply_to.document, reply_to.video, reply_to.audio]
        for i in media_array:
            if i is not None:
                file = i
                break
        if (
            not is_url(link)
            and not is_magnet(link)
            or len(link) == 0
        ):
            if not reply_to.from_user.is_bot:
                if reply_to.from_user.username:
                    tag = f"@{reply_to.from_user.username}"
                else:
                    tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)

            if file is None:
                reply_text = reply_to.text
                if is_url(reply_text) or is_magnet(reply_text):
                    link = reply_text.strip()
            elif isQbit:
                file_name = str(time()).replace(".", "") + ".torrent"
                link = file.get_file().download(custom_path=file_name)
            elif file.mime_type != "application/x-bittorrent":
                listener = MirrorListener(bot, update, isZip, extract, isQbit, isLeech, pswd, tag)
                tg_downloader = TelegramDownloadHelper(listener)
                ms = update.message
                tg_downloader.add_download(ms, f'{DOWNLOAD_DIR}{listener.uid}/', name)
                return
            else:
                link = file.get_file().file_path

    if len(mesg) > 1:
        try:
            ussr = quote(mesg[1], safe='')
            pssw = quote(mesg[2], safe='')
            link = link.split("://", maxsplit=1)
            link = f'{link[0]}://{ussr}:{pssw}@{link[1]}'
        except IndexError:
            pass

    if not is_url(link) and not is_magnet(link) and not ospath.exists(link):
        help_msg = "<b>Send link along with command line:</b>"
        help_msg += "\n<code>/command</code> {link} |newname pswd: mypassword [𝚣𝚒𝚙/𝚞𝚗𝚣𝚒𝚙]"
        help_msg += "\n\n<b>By replying to link or file:</b>"
        help_msg += "\n<code>/command</code> |newname pswd: mypassword [𝚣𝚒𝚙/𝚞𝚗𝚣𝚒𝚙]"
        help_msg += "\n\n<b>Direct link authorization:</b>"
        help_msg += "\n<code>/command</code> {link} |newname pswd: mypassword\nusername\npassword"
        help_msg += "\n\n<b>Qbittorrent selection:</b>"
        help_msg += "\n<code>/qbcommand</code> <b>s</b> {link} or by replying to {file}"
        return sendMessage(help_msg, bot, update)

    LOGGER.info(link)
    gdtot_link = is_gdtot_link(link)

    if not is_mega_link(link) and not isQbit and not is_magnet(link) \
       and not ospath.exists(link) and not is_gdrive_link(link) and not link.endswith('.torrent'):
        content_type = get_content_type(link)
        if content_type is None or match(r'text/html|text/plain', content_type):
            try:
                link = direct_link_generator(link)
                LOGGER.info(f"Generated link: {link}")
            except DirectDownloadLinkException as e:
                LOGGER.info(str(e))
                if str(e).startswith('ERROR:'):
                    return sendMessage(str(e), bot, update)
    elif isQbit and not is_magnet(link) and not ospath.exists(link):
        content_type = get_content_type(link)
        if content_type is None  or link.endswith('.torrent') \
           or match(r'application/x-bittorrent|application/octet-stream', content_type):
            try:
                resp = requests.get(link, timeout=10)
                if resp.status_code == 200:
                    file_name = str(time()).replace(".", "") + ".torrent"
                    open(file_name, "wb").write(resp.content)
                    link = f"{file_name}"
                else:
                    return sendMessage(f"ERROR: link got HTTP response: {resp.status_code}", bot, update)
            except Exception as e:
                LOGGER.error(str(e))
                error = str(e).replace('<', ' ').replace('>', ' ')
                return sendMessage(error, bot, update)
        else:
            msg = "Qb commands for torrents only. if you are trying to dowload torrent then report."
            return sendMessage(msg, bot, update)

    listener = MirrorListener(bot, update, isZip, extract, isQbit, isLeech, pswd, tag)

    if is_gdrive_link(link):
        if not isZip and not extract and not isLeech:
            gmsg = f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\n\n"
            gmsg += f"Use /{BotCommands.ZipMirrorCommand} to make zip of Google Drive folder\n\n"
            gmsg += f"Use /{BotCommands.UnzipMirrorCommand} to extracts Google Drive archive file"
            return sendMessage(gmsg, bot, update)
        Thread(target=add_gd_download, args=(link, listener, gdtot_link)).start()

    elif is_mega_link(link):
        if BLOCK_MEGA_LINKS:
            sendMessage("Mega links are blocked!", bot, update)
            return
        link_type = get_mega_link_type(link)
        if link_type == "folder" and BLOCK_MEGA_FOLDER:
            sendMessage("Mega folder are blocked!", bot, update)
        else:
            Thread(target=add_mega_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener)).start()

    elif isQbit and (is_magnet(link) or ospath.exists(link)):
        Thread(target=add_qb_torrent, args=(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, qbitsel)).start()

    else:
        Thread(target=add_aria2c_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, name)).start()
Esempio n. 11
0
 def clone(self, link, status):
     self.transferred_size = 0
     try:
         file_id = self.getIdFromUrl(link)
     except (KeyError, IndexError):
         msg = "Google drive ID could not be found in the provided link"
         return msg
     msg = ""
     LOGGER.info(f"File ID: {file_id}")
     try:
         meta = self.__service.files().get(
             supportsAllDrives=True,
             fileId=file_id,
             fields="name,id,mimeType,size").execute()
     except Exception as e:
         return f"{str(e).replace('>', '').replace('<', '')}"
     if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
         dir_id = self.check_folder_exists(meta.get('name'),
                                           GDRIVE_FOLDER_ID)
         if not dir_id:
             dir_id = self.create_directory(meta.get('name'),
                                            GDRIVE_FOLDER_ID)
         try:
             result = self.cloneFolder(meta.get('name'), meta.get('name'),
                                       meta.get('id'), dir_id, status)
         except Exception as e:
             if isinstance(e, RetryError):
                 LOGGER.info(
                     f"Total Attempts: {e.last_attempt.attempt_number}")
                 err = e.last_attempt.exception()
             else:
                 err = str(e).replace('>', '').replace('<', '')
             LOGGER.error(err)
             return err
         status.set_status(True)
         msg += f'<a href="{self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id)}">{meta.get("name")}</a>' \
                f' ({get_readable_file_size(self.transferred_size)})'
         if INDEX_URL:
             url = requests.utils.requote_uri(
                 f'{INDEX_URL}/{meta.get("name")}/')
             msg += f' | <a href="{url}"> Index URL</a>'
     else:
         try:
             file = self.check_file_exists(meta.get('id'), GDRIVE_FOLDER_ID)
             if file:
                 status.checkFileExist(True)
             if not file:
                 status.checkFileExist(False)
                 file = self.copyFile(meta.get('id'), GDRIVE_FOLDER_ID,
                                      status)
         except Exception as e:
             if isinstance(e, RetryError):
                 LOGGER.info(
                     f"Total Attempts: {e.last_attempt.attempt_number}")
                 err = e.last_attempt.exception()
             else:
                 err = str(e).replace('>', '').replace('<', '')
             LOGGER.error(err)
             return err
         msg += f'<a href="{self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id"))}">{file.get("name")}</a>'
         try:
             msg += f' ({get_readable_file_size(int(meta.get("size")))}) '
             if INDEX_URL is not None:
                 url = requests.utils.requote_uri(
                     f'{INDEX_URL}/{file.get("name")}')
                 msg += f' | <a href="{url}"> Index URL</a>'
         except TypeError:
             pass
     return msg
Esempio n. 12
0
def deleteMessage(message: Message):
    try:
        message.delete()
    except Exception as e:
        LOGGER.error(str(e))
Esempio n. 13
0
 def cloneFolder(self,
                 name,
                 local_path,
                 folder_id,
                 parent_id,
                 status,
                 ignoreList=[]):
     page_token = None
     q = f"'{folder_id}' in parents"
     files = []
     LOGGER.info(f"Syncing: {local_path}")
     new_id = None
     while True:
         response = self.__service.files().list(
             supportsTeamDrives=True,
             includeTeamDriveItems=True,
             q=q,
             spaces='drive',
             fields='nextPageToken, files(id, name, mimeType,size)',
             pageToken=page_token).execute()
         for file in response.get('files', []):
             files.append(file)
         page_token = response.get('nextPageToken', None)
         if page_token is None:
             break
     if len(files) == 0:
         return parent_id
     for file in files:
         if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
             file_path = os.path.join(local_path, file.get('name'))
             current_dir_id = self.check_folder_exists(
                 file.get('name'), parent_id)
             if not current_dir_id:
                 current_dir_id = self.create_directory(
                     file.get('name'), parent_id)
             if not str(file.get('id')) in ignoreList:
                 new_id = self.cloneFolder(file.get('name'), file_path,
                                           file.get('id'), current_dir_id,
                                           status, ignoreList)
             else:
                 LOGGER.info("Ignoring FolderID from clone: " +
                             str(file.get('id')))
         else:
             try:
                 if not self.check_file_exists(file.get('name'), parent_id):
                     status.checkFileExist(False)
                     self.transferred_size += int(file.get('size'))
                     status.add_size(int(file.get('size')))
                     status.set_name(file.get('name'))
                 else:
                     status.checkFileExist(True)
             except TypeError:
                 pass
             try:
                 if not self.check_file_exists(file.get('name'), parent_id):
                     self.copyFile(file.get('id'), parent_id, status)
                     status.checkFileExist(False)
                 else:
                     status.checkFileExist(True)
                 new_id = parent_id
             except Exception as e:
                 if isinstance(e, RetryError):
                     LOGGER.info(
                         f"Total Attempts: {e.last_attempt.attempt_number}")
                     err = e.last_attempt.exception()
                 else:
                     err = e
                 LOGGER.error(err)
     return new_id
Esempio n. 14
0
def _qb_listener(listener, client, ext_hash, select, path):
    stalled_time = time()
    uploaded = False
    sizeChecked = False
    dupChecked = False
    rechecked = False
    while True:
        sleep(4)
        try:
            tor_info = client.torrents_info(torrent_hashes=ext_hash)
            if len(tor_info) == 0:
                with download_dict_lock:
                    if listener.uid not in list(download_dict.keys()):
                        client.auth_log_out()
                        break
                continue
            tor_info = tor_info[0]
            if tor_info.state == "metaDL":
                stalled_time = time()
                if QB_TIMEOUT is not None and time(
                ) - tor_info.added_on >= QB_TIMEOUT:  #timeout while downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "downloading":
                stalled_time = time()
                if STOP_DUPLICATE and not dupChecked and ospath.isdir(
                        f'{path}') and not listener.isLeech:
                    LOGGER.info('Checking File/Folder if already in Drive')
                    qbname = str(listdir(f'{path}')[-1])
                    if qbname.endswith('.!qB'):
                        qbname = ospath.splitext(qbname)[0]
                    if listener.isZip:
                        qbname = qbname + ".zip"
                    elif listener.extract:
                        try:
                            qbname = get_base_name(qbname)
                        except:
                            qbname = None
                    if qbname is not None:
                        qbmsg, button = GoogleDriveHelper().drive_list(
                            qbname, True)
                        if qbmsg:
                            msg = "File/Folder is already available in Drive."
                            _onDownloadError(msg, client, ext_hash, listener)
                            sendMarkup("Here are the search results:",
                                       listener.bot, listener.update, button)
                            break
                    dupChecked = True
                if not sizeChecked:
                    sleep(1)
                    size = tor_info.size
                    arch = any([listener.isZip, listener.extract])
                    if STORAGE_THRESHOLD is not None:
                        acpt = check_storage_threshold(size, arch)
                        if not acpt:
                            msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
                            msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
                            _onDownloadError(msg, client, ext_hash, listener)
                            break
                    limit = None
                    if ZIP_UNZIP_LIMIT is not None and arch:
                        mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
                        limit = ZIP_UNZIP_LIMIT
                    elif TORRENT_DIRECT_LIMIT is not None:
                        mssg = f'Torrent limit is {TORRENT_DIRECT_LIMIT}GB'
                        limit = TORRENT_DIRECT_LIMIT
                    if limit is not None:
                        LOGGER.info('Checking File/Folder Size...')
                        if size > limit * 1024**3:
                            fmsg = f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}"
                            _onDownloadError(fmsg, client, ext_hash, listener)
                            break
                    sizeChecked = True
            elif tor_info.state == "stalledDL":
                if not rechecked and 0.99989999999999999 < tor_info.progress < 1:
                    msg = f"Force recheck - Name: {tor_info.name} Hash: "
                    msg += f"{ext_hash} Downloaded Bytes: {tor_info.downloaded} "
                    msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
                    LOGGER.info(msg)
                    client.torrents_recheck(torrent_hashes=ext_hash)
                    rechecked = True
                elif QB_TIMEOUT is not None and time(
                ) - stalled_time >= QB_TIMEOUT:  # timeout after downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "missingFiles":
                client.torrents_recheck(torrent_hashes=ext_hash)
            elif tor_info.state == "error":
                _onDownloadError("No enough space for this torrent on device",
                                 client, ext_hash, listener)
                break
            elif (tor_info.state.lower().endswith("up")
                  or tor_info.state == "uploading") and not uploaded:
                LOGGER.info(f"onQbDownloadComplete: {ext_hash}")
                uploaded = True
                if not QB_SEED:
                    client.torrents_pause(torrent_hashes=ext_hash)
                if select:
                    clean_unwanted(path)
                listener.onDownloadComplete()
                if QB_SEED and not listener.isLeech and not listener.extract:
                    with download_dict_lock:
                        if listener.uid not in list(download_dict.keys()):
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                        download_dict[listener.uid] = QbDownloadStatus(
                            listener, client, ext_hash, select)
                    update_all_messages()
                    LOGGER.info(f"Seeding started: {tor_info.name}")
                else:
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == 'pausedUP' and QB_SEED:
                listener.onUploadError(
                    f"Seeding stopped with Ratio: {round(tor_info.ratio, 3)} and Time: {get_readable_time(tor_info.seeding_time)}"
                )
                client.torrents_delete(torrent_hashes=ext_hash,
                                       delete_files=True)
                client.auth_log_out()
                update_all_messages()
                break
        except Exception as e:
            LOGGER.error(str(e))
Esempio n. 15
0
 def clone(self, link, status, ignoreList=[]):
     self.transferred_size = 0
     try:
         file_id = self.getIdFromUrl(link)
     except (KeyError, IndexError):
         msg = "No se pudo encontrar el ID de la unidad de Google en el enlace proporcionado"
         return msg
     msg = ""
     LOGGER.info(f"File ID: {file_id}")
     try:
         meta = self.__service.files().get(
             supportsAllDrives=True,
             fileId=file_id,
             fields="name,id,mimeType,size").execute()
         dest_meta = self.__service.files().get(
             supportsAllDrives=True,
             fileId=self.gparentid,
             fields="name,id,size").execute()
         status.SetMainFolder(
             meta.get('name'),
             self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(meta.get('id')))
         status.SetDestinationFolder(
             dest_meta.get('name'),
             self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(
                 dest_meta.get('id')))
     except Exception as e:
         return f"{str(e).replace('>', '').replace('<', '')}"
     if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
         dir_id = self.check_folder_exists(meta.get('name'), self.gparentid)
         if not dir_id:
             dir_id = self.create_directory(meta.get('name'),
                                            self.gparentid)
         try:
             self.cloneFolder(meta.get('name'), meta.get('name'),
                              meta.get('id'), dir_id, status, ignoreList)
         except Exception as e:
             if isinstance(e, RetryError):
                 LOGGER.info(
                     f"Total Attempts: {e.last_attempt.attempt_number}")
                 err = e.last_attempt.exception()
             else:
                 err = str(e).replace('>', '').replace('<', '')
             LOGGER.error(err)
             return err
         status.set_status(True)
         msg += f'<a href="{self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id)}">{meta.get("name")}</a>' \
                f' ({get_readable_file_size(self.transferred_size)})'
         if INDEX_URL:
             url = requests.utils.requote_uri(
                 f'{INDEX_URL}/{meta.get("name")}/')
             msg += f' | <a href="{url}"> URL de índice</a>'
     else:
         try:
             file = self.check_file_exists(meta.get('id'), self.gparentid)
             if file:
                 status.checkFileExist(True)
             if not file:
                 status.checkFileExist(False)
                 file = self.copyFile(meta.get('id'), self.gparentid,
                                      status)
         except Exception as e:
             if isinstance(e, RetryError):
                 LOGGER.info(
                     f"Intentos totales: {e.last_attempt.attempt_number}")
                 err = e.last_attempt.exception()
             else:
                 err = str(e).replace('>', '').replace('<', '')
             LOGGER.error(err)
             return err
         msg += f'<a href="{self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id"))}">{file.get("name")}</a>'
         try:
             msg += f' ({get_readable_file_size(int(meta.get("size")))}) '
             if INDEX_URL is not None:
                 url = requests.utils.requote_uri(
                     f'{INDEX_URL}/{file.get("name")}')
                 msg += f' | <a href="{url}"> Index URL</a>'
         except TypeError:
             pass
     return msg
Esempio n. 16
0
def rss_sub(update, context):
    try:
        args = update.message.text.split(" ", 3)
        title = str(args[1])
        feed_link = str(args[2])
        f_lists = []
        try:
            filters = str(args[3]).lower()
            if filters.startswith('f: '):
                filters = filters.split('f: ', 1)[1]
                filters_list = filters.split('|')
                for x in filters_list:
                    y = x.split(' or ')
                    f_lists.append(y)
            else:
                filters = None
        except:
            filters = None
        exists = rss_dict.get(title)
        if exists is not None:
            LOGGER.error(
                "This title already subscribed! Choose another title!")
            return sendMessage(
                "This title already subscribed! Choose another title!",
                context.bot, update)
        try:
            rss_d = feedparse(feed_link)
            sub_msg = "<b>Subscribed!</b>"
            sub_msg += f"\n\n<b>Title: </b><code>{title}</code>\n<b>Feed Url: </b>{feed_link}"
            sub_msg += f"\n\n<b>latest record for </b>{rss_d.feed.title}:"
            sub_msg += f"\n\n<b>Name: </b><code>{rss_d.entries[0]['title']}</code>"
            try:
                link = rss_d.entries[0]['links'][1]['href']
            except IndexError:
                link = rss_d.entries[0]['link']
            sub_msg += f"\n\n<b>Link: </b><code>{link}</code>"
            sub_msg += f"\n\n<b>Filters: </b><code>{filters}</code>"
            last_link = str(rss_d.entries[0]['link'])
            last_title = str(rss_d.entries[0]['title'])
            DbManger().rss_add(title, feed_link, last_link, last_title,
                               filters)
            with rss_dict_lock:
                if len(rss_dict) == 0:
                    rss_job.enabled = True
                rss_dict[title] = [feed_link, last_link, last_title, f_lists]
            sendMessage(sub_msg, context.bot, update)
            LOGGER.info(f"Rss Feed Added: {title} - {feed_link} - {filters}")
        except (IndexError, AttributeError) as e:
            LOGGER.error(str(e))
            msg = "The link doesn't seem to be a RSS feed or it's region-blocked!"
            sendMessage(msg, context.bot, update)
        except Exception as e:
            LOGGER.error(str(e))
            sendMessage(str(e), context.bot, update)
    except IndexError:
        msg = f"Use this format to add feed url:\n/{BotCommands.RssSubCommand} Title https://www.rss-url.com"
        msg += " f: 1080 or 720 or 144p|mkv or mp4|hevc (optional)\n\nThis filter will parse links that it's titles"
        msg += " contains `(1080 or 720 or 144p) and (mkv or mp4) and hevc` words. You can add whatever you want.\n\n"
        msg += "Another example: f:  1080  or 720p|.web. or .webrip.|hvec or x264 .. This will parse titles that contains"
        msg += " ( 1080  or 720p) and (.web. or .webrip.) and (hvec or x264). I have added space before and after 1080"
        msg += " to avoid wrong matching. If this `10805695` number in title it will match 1080 if added 1080 without"
        msg += " spaces after it."
        msg += "\n\nFilters Notes:\n\n1. | means and.\n\n2. Add `or` between similar keys, you can add it"
        msg += " between qualities or between extensions, so don't add filter like this f: 1080|mp4 or 720|web"
        msg += " because this will parse 1080 and (mp4 or 720) and web ... not (1080 and mp4) or (720 and web)."
        msg += "\n\n3. You can add `or` and `|` as much as you want."
        msg += "\n\n4. Take look on title if it has static special character after or before the qualities or extensions"
        msg += " or whatever and use them in filter to avoid wrong match"
        sendMessage(msg, context.bot, update)
Esempio n. 17
0
def deleteMessage(bot, message: Message):
    try:
        bot.delete_message(chat_id=message.chat.id,
                           message_id=message.message_id)
    except Exception as e:
        LOGGER.error(str(e))
Esempio n. 18
0
def editMessage(text: str, message: Message):
    try:
        message.edit_text(text)
    except Exception as e:
        LOGGER.error(str(e))