Esempio n. 1
0
def __onDownloadStarted(api, gid):
    try:
        if any([
                STOP_DUPLICATE, TORRENT_DIRECT_LIMIT, ZIP_UNZIP_LIMIT,
                STORAGE_THRESHOLD
        ]):
            sleep(1.5)
            dl = getDownloadByGid(gid)
            if dl is None:
                return
            download = api.get_download(gid)
            if STOP_DUPLICATE and not dl.getListener().isLeech:
                LOGGER.info('Checking File/Folder if already in Drive...')
                sname = download.name
                if dl.getListener().isZip:
                    sname = sname + ".zip"
                elif dl.getListener().extract:
                    try:
                        sname = get_base_name(sname)
                    except:
                        sname = None
                if sname is not None:
                    smsg, button = GoogleDriveHelper().drive_list(sname, True)
                    if smsg:
                        dl.getListener().onDownloadError(
                            'File/Folder already available in Drive.\n\n')
                        api.remove([download], force=True, files=True)
                        return sendMarkup("Here are the search results:",
                                          dl.getListener().bot,
                                          dl.getListener().update, button)
            if any([ZIP_UNZIP_LIMIT, TORRENT_DIRECT_LIMIT, STORAGE_THRESHOLD]):
                sleep(1)
                limit = None
                size = api.get_download(gid).total_length
                arch = any([dl.getListener().isZip, dl.getListener().extract])
                if STORAGE_THRESHOLD is not None:
                    acpt = check_storage_threshold(size, arch, True)
                    # True if files allocated, if allocation disabled remove True arg
                    if not acpt:
                        msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
                        msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
                        dl.getListener().onDownloadError(msg)
                        return api.remove([download], force=True, files=True)
                if ZIP_UNZIP_LIMIT is not None and arch:
                    mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
                    limit = ZIP_UNZIP_LIMIT
                elif TORRENT_DIRECT_LIMIT is not None:
                    mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}GB'
                    limit = TORRENT_DIRECT_LIMIT
                if limit is not None:
                    LOGGER.info('Checking File/Folder Size...')
                    if size > limit * 1024**3:
                        dl.getListener().onDownloadError(
                            f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}'
                        )
                        return api.remove([download], force=True, files=True)
    except:
        LOGGER.error(
            f"onDownloadStart: {gid} stop duplicate and size check didn't pass"
        )
Esempio n. 2
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            gid = download.gid()
            size = download.size_raw()
            if name is None or self.isQbit:  # when pyrogram's media.file_name is of NoneType
                name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0]
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        if self.isTar:
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.zip(
                    name, m_path) if self.isZip else fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
            try:
                shutil.rmtree(m_path)
            except:
                os.remove(m_path)
        elif self.extract:
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"Extracting: {name}")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if pswd is not None:
                    archive_result = subprocess.run(["pextract", m_path, pswd])
                else:
                    archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path)).start()
                    LOGGER.info(f"Deleting archive: {m_path}")
                else:
                    LOGGER.warning(
                        'Unable to extract archive! Uploading anyway')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(f'got path: {path}')

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
        LOGGER.info(f"Upload Name: {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, gid, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 3
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            if name is None:  # when pyrogram's media.file_name is of NoneType
                name = os.listdir(f"{DOWNLOAD_DIR}{self.uid}")[0]
            m_path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info("File to archive not found!")
                self.onUploadError("Internal error occurred!!")
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"Extracting : {name} ")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if pswd is not None:
                    archive_result = subprocess.run(["pextract", m_path, pswd])
                else:
                    archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path, )).start()
                    LOGGER.info(f"Deleting archive : {m_path}")
                else:
                    LOGGER.warning(
                        "Unable to extract archive! Uploading anyway")
                    path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
                LOGGER.info(f"got path : {path}")

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        else:
            path = f"{DOWNLOAD_DIR}{self.uid}/{name}"
        up_name = pathlib.PurePath(path).name
        if up_name == "None":
            up_name = "".join(os.listdir(f"{DOWNLOAD_DIR}{self.uid}/"))
        up_path = f"{DOWNLOAD_DIR}{self.uid}/{up_name}"
        LOGGER.info(f"Upload Name : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 4
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(f"โœ… : {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(
                    f"๐Ÿ“๐Ÿ”“ : {name} "
                )
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if pswd is not None:
                    archive_result = subprocess.run(["pextract", m_path, pswd])
                else:
                    archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path,)).start()
                    LOGGER.info(f"๐Ÿ—‘๐Ÿ”’ : {m_path}")
                else:
                    LOGGER.warning('Unable to extract archive! Uploading anyway')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(
                    f'got path : {path}'
                )

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
        if up_name == "None":
            up_name = "".join(os.listdir(f'{DOWNLOAD_DIR}{self.uid}/'))
        LOGGER.info(f"๐Ÿ“„ : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 5
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"File yang kamu download berhasil: {download_dict[self.uid].name()}"
            )
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File yang pengen di arsip gk ada')
                self.onUploadError('Ada masalah internal nih')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"Extracting : {name} ")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if pswd is not None:
                    archive_result = subprocess.run(["pextract", m_path, pswd])
                else:
                    archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path, )).start()
                    LOGGER.info(f"Arsip lagi didelete: {m_path}")
                else:
                    LOGGER.warning('Nggk bisa diarsip, aku upload aja ya')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(f'got path : {path}')

            except NotSupportedExtractionArchive:
                LOGGER.info("Arsip yang kamu kirim gk bisa, aku kirim aja ya")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        if up_name == "None":
            up_name = "".join(os.listdir(f'{DOWNLOAD_DIR}{self.uid}/'))
        up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
        LOGGER.info(f"Bentar lagi upload file yang kamu minta : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 6
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
        elif self.extract:
            download.is_extracting = True

            path = fs_utils.get_base_name(m_path)
            if path != "unsupported":
                LOGGER.info(
                    f"Extracting : {download_dict[self.uid].name()} "
                )
                download_dict[self.uid] = ExtractStatus(name, m_path, size)
                os.system(f"extract '{m_path}'")
                if not os.path.exists(path):
                    self.onUploadError("Cannot extract file, check integrity of the file")
                    return
                LOGGER.info(
                    f'got path : {path}'
                )
                try:
                    os.remove(m_path)
                    LOGGER.info(f"Deleting archive : {m_path}")
                except Exception as e:
                    LOGGER.error(str(e))
            else:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}'
        up_name = pathlib.PurePath(path).name
        LOGGER.info(f"Upload Name : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        if size == 0:
            size = fs_utils.get_path_size(m_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 7
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('<b>๐Ÿ›ฃ๏ธIษดแด›แด‡ส€ษดแด€สŸ Eส€ส€แดส€ Oแด„แด„แดœส€ส€แด‡แด…!!</b>')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(
                    f"Extracting : {name} "
                )
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path,)).start()
                    LOGGER.info(f"Deleting archive : {m_path}")
                else:
                    LOGGER.warning('Unable to extract archive! Uploading anyway')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(
                    f'got path : {path}'
                )

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
        LOGGER.info(f"Upload Name : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        size = fs_utils.get_path_size(up_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 8
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(
                    f"Extracting : {name} "
                )
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path,)).start()
                    LOGGER.info(f"Deleting archive : {m_path}")
                else:
                    LOGGER.warning('Unable to extract archive! Uploading anyway')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(
                    f'got path : {path}'
                )

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
            
        upload_path = ju_file(path,self.bot,self.update)
        if AUTO_RCLONE:
            upload_message = rc_upload(upload_path)
            if upload_message is not None:
                sendMessage(upload_message, self.bot, self.update)
            LOGGER.info(upload_message)
        update_all_messages()
Esempio n. 9
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"โฌข๐——๐—ผ๐˜„๐—ป๐—น๐—ผ๐—ฎ๐—ฑ ๐—ฐ๐—ผ๐—บ๐—ฝ๐—น๐—ฒ๐˜๐—ฒ๐—ฑ: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = download.name()
            size = download.size_raw()
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{download.name()}'
        if self.isTar:
            download.is_archiving = True
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('๐—™๐—ถ๐—น๐—ฒ ๐˜๐—ผ ๐—ฎ๐—ฟ๐—ฐ๐—ต๐—ถ๐˜ƒ๐—ฒ ๐—ป๐—ผ๐˜ ๐—ณ๐—ผ๐˜‚๐—ป๐—ฑ!')
                self.onUploadError('Internal error occurred!!')
                return
        elif self.extract:
            download.is_extracting = True
            try:
                path = fs_utils.get_base_name(m_path)
                LOGGER.info(f"โฌข๐—˜๐˜…๐˜๐—ฟ๐—ฎ๐—ฐ๐˜๐—ถ๐—ป๐—ด : {name} ")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                archive_result = subprocess.run(["extract", m_path])
                if archive_result.returncode == 0:
                    threading.Thread(target=os.remove, args=(m_path, )).start()
                    LOGGER.info(f"โฌข๐——๐—ฒ๐—น๐—ฒ๐˜๐—ถ๐—ป๐—ด ๐—ฎ๐—ฟ๐—ฐ๐—ต๐—ถ๐˜ƒ๐—ฒ : {m_path}")
                else:
                    LOGGER.warning(
                        '๐—จ๐—ป๐—ฎ๐—ฏ๐—น๐—ฒ ๐˜๐—ผ ๐—ฒ๐˜…๐˜๐—ฟ๐—ฎ๐—ฐ๐˜ ๐—ฎ๐—ฟ๐—ฐ๐—ต๐—ถ๐˜ƒ๐—ฒ! ๐—จ๐—ฝ๐—น๐—ผ๐—ฎ๐—ฑ๐—ถ๐—ป๐—ด ๐—ฎ๐—ป๐˜†๐˜„๐—ฎ๐˜†')
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(f'got path : {path}')

            except NotSupportedExtractionArchive:
                LOGGER.info("๐—ก๐—ผ๐˜ ๐—ฎ๐—ป๐˜† ๐˜ƒ๐—ฎ๐—น๐—ถ๐—ฑ ๐—ฎ๐—ฟ๐—ฐ๐—ต๐—ถ๐˜ƒ๐—ฒ, ๐˜‚๐—ฝ๐—น๐—ผ๐—ฎ๐—ฑ๐—ถ๐—ป๐—ด ๐—ณ๐—ถ๐—น๐—ฒ ๐—ฎ๐˜€ ๐—ถ๐˜ ๐—ถ๐˜€.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        LOGGER.info(f"โฌข๐—จ๐—ฝ๐—น๐—ผ๐—ฎ๐—ฑ ๐—ก๐—ฎ๐—บ๐—ฒ : {up_name}")
        drive = gdriveTools.GoogleDriveHelper(up_name, self)
        if size == 0:
            size = fs_utils.get_path_size(m_path)
        upload_status = UploadStatus(drive, size, self)
        with download_dict_lock:
            download_dict[self.uid] = upload_status
        update_all_messages()
        drive.upload(up_name)
Esempio n. 10
0
def add_gd_download(link, listener, is_gdtot):
    res, size, name, files = GoogleDriveHelper().helper(link)
    if res != "":
        return sendMessage(res, listener.bot, listener.update)
    if STOP_DUPLICATE and not listener.isLeech:
        LOGGER.info('Checking File/Folder if already in Drive...')
        if listener.isZip:
            gname = name + ".zip"
        elif listener.extract:
            try:
                gname = get_base_name(name)
            except:
                gname = None
        if gname is not None:
            gmsg, button = GoogleDriveHelper().drive_list(gname, True)
            if gmsg:
                msg = "File/Folder is already available in Drive.\nHere are the search results:"
                return sendMarkup(msg, listener.bot, listener.update, button)
    if STORAGE_THRESHOLD is not None:
        acpt = check_storage_threshold(size, True)
        if not acpt:
            msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
            msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
            return sendMessage(msg, listener.bot, listener.update)
    if ZIP_UNZIP_LIMIT is not None:
        LOGGER.info('Checking File/Folder Size...')
        if size > ZIP_UNZIP_LIMIT * 1024**3:
            msg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(size)}.'
            return sendMessage(msg, listener.bot, listener.update)
    LOGGER.info(f"Download Name: {name}")
    drive = GoogleDriveHelper(name, listener)
    gid = ''.join(random.SystemRandom().choices(string.ascii_letters +
                                                string.digits,
                                                k=12))
    download_status = GdDownloadStatus(drive, size, listener, gid)
    with download_dict_lock:
        download_dict[listener.uid] = download_status
    sendStatusMessage(listener.update, listener.bot)
    drive.download(link)
    if is_gdtot:
        drive.deletefile(link)
Esempio n. 11
0
 def onDownloadComplete(self):
     with download_dict_lock:
         LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
         download = download_dict[self.uid]
         name = str(download.name()).replace('/', '')
         gid = download.gid()
         size = download.size_raw()
         if name == "None" or self.isQbit:
             name = listdir(f'{DOWNLOAD_DIR}{self.uid}')[-1]
         m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     if self.isZip:
         try:
             with download_dict_lock:
                 download_dict[self.uid] = ZipStatus(name, m_path, size)
             pswd = self.pswd
             path = m_path + ".zip"
             LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
             if pswd is not None:
                 if self.isLeech and int(size) > TG_SPLIT_SIZE:
                     path = m_path + ".zip"
                     srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path])
                 else:
                     srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path])
             elif self.isLeech and int(size) > TG_SPLIT_SIZE:
                 path = m_path + ".zip"
                 srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
             else:
                 srun(["7z", "a", "-mx=0", path, m_path])
         except FileNotFoundError:
             LOGGER.info('File to archive not found!')
             self.onUploadError('Internal error occurred!!')
             return
         try:
             rmtree(m_path)
         except:
             osremove(m_path)
     elif self.extract:
         try:
             if ospath.isfile(m_path):
                 path = get_base_name(m_path)
             LOGGER.info(f"Extracting: {name}")
             with download_dict_lock:
                 download_dict[self.uid] = ExtractStatus(name, m_path, size)
             pswd = self.pswd
             if ospath.isdir(m_path):
                 for dirpath, subdir, files in walk(m_path, topdown=False):
                     for file_ in files:
                         if search(r'\.part0*1.rar$', file_) or search(r'\.7z.0*1$', file_) \
                            or (file_.endswith(".rar") and not search(r'\.part\d+.rar$', file_)) \
                            or file_.endswith(".zip") or search(r'\.zip.0*1$', file_):
                             m_path = ospath.join(dirpath, file_)
                             if pswd is not None:
                                 result = srun(["7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}", "-aot"])
                             else:
                                 result = srun(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
                             if result.returncode != 0:
                                 LOGGER.error('Unable to extract archive!')
                     for file_ in files:
                         if file_.endswith(".rar") or search(r'\.r\d+$', file_) \
                            or search(r'\.7z.\d+$', file_) or search(r'\.z\d+$', file_) \
                            or search(r'\.zip.\d+$', file_) or file_.endswith(".zip"):
                             del_path = ospath.join(dirpath, file_)
                             osremove(del_path)
                 path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
             else:
                 if pswd is not None:
                     result = srun(["bash", "pextract", m_path, pswd])
                 else:
                     result = srun(["bash", "extract", m_path])
                 if result.returncode == 0:
                     LOGGER.info(f"Extract Path: {path}")
                     osremove(m_path)
                     LOGGER.info(f"Deleting archive: {m_path}")
                 else:
                     LOGGER.error('Unable to extract archive! Uploading anyway')
                     path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
         except NotSupportedExtractionArchive:
             LOGGER.info("Not any valid archive, uploading file as it is.")
             path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     else:
         path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     up_name = PurePath(path).name
     up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
     size = get_path_size(f'{DOWNLOAD_DIR}{self.uid}')
     if self.isLeech and not self.isZip:
         checked = False
         for dirpath, subdir, files in walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False):
             for file_ in files:
                 f_path = ospath.join(dirpath, file_)
                 f_size = ospath.getsize(f_path)
                 if int(f_size) > TG_SPLIT_SIZE:
                     if not checked:
                         checked = True
                         with download_dict_lock:
                             download_dict[self.uid] = SplitStatus(up_name, up_path, size)
                         LOGGER.info(f"Splitting: {up_name}")
                     fssplit(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE)
                     osremove(f_path)
     if self.isLeech:
         LOGGER.info(f"Leech Name: {up_name}")
         tg = TgUploader(up_name, self)
         tg_upload_status = TgUploadStatus(tg, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = tg_upload_status
         update_all_messages()
         tg.upload()
     else:
         LOGGER.info(f"Upload Name: {up_name}")
         drive = GoogleDriveHelper(up_name, self)
         upload_status = UploadStatus(drive, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = upload_status
         update_all_messages()
         drive.upload(up_name)
Esempio n. 12
0
    def onDownloadComplete(self):
        with download_dict_lock:
            LOGGER.info(
                f"Download completed: {download_dict[self.uid].name()}")
            download = download_dict[self.uid]
            name = f"{download.name()}".replace('/', '')
            gid = download.gid()
            size = download.size_raw()
            if name == "None" or self.isQbit:  # when pyrogram's media.file_name is of NoneType
                name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0]
            m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        if self.isTar:
            try:
                with download_dict_lock:
                    download_dict[self.uid] = TarStatus(name, m_path, size)
                if self.isZip:
                    pswd = self.pswd
                    path = m_path + ".zip"
                    LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
                    if pswd is not None:
                        subprocess.run(["7z", "a", f"-p{pswd}", path, m_path])
                    else:
                        subprocess.run(["7z", "a", path, m_path])
                else:
                    path = fs_utils.tar(m_path)
            except FileNotFoundError:
                LOGGER.info('File to archive not found!')
                self.onUploadError('Internal error occurred!!')
                return
            try:
                shutil.rmtree(m_path)
            except:
                os.remove(m_path)
        elif self.extract:
            try:
                LOGGER.info(f"Extracting: {name}")
                with download_dict_lock:
                    download_dict[self.uid] = ExtractStatus(name, m_path, size)
                pswd = self.pswd
                if os.path.isdir(m_path):
                    for dirpath, subdir, files in os.walk(m_path,
                                                          topdown=False):
                        for file in files:
                            suffixes = (".part1.rar", ".part01.rar",
                                        ".part001.rar", ".part0001.rar")
                            if (file.endswith(".rar") and "part"
                                    not in file) or file.endswith(suffixes):
                                m_path = os.path.join(dirpath, file)
                                if pswd is not None:
                                    result = subprocess.run([
                                        "7z", "x", f"-p{pswd}", m_path,
                                        f"-o{dirpath}"
                                    ])
                                else:
                                    result = subprocess.run(
                                        ["7z", "x", m_path, f"-o{dirpath}"])
                                if result.returncode != 0:
                                    LOGGER.warning(
                                        'Unable to extract archive!')
                                break
                        for file in files:
                            if file.endswith(".rar") or fnmatch(
                                    file, "*.r[0-9]") or fnmatch(
                                        file, "*.r[0-9]*"):
                                del_path = os.path.join(dirpath, file)
                                os.remove(del_path)
                    path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                else:
                    path = fs_utils.get_base_name(m_path)
                    if pswd is not None:
                        result = subprocess.run(["pextract", m_path, pswd])
                    else:
                        result = subprocess.run(["extract", m_path])
                    if result.returncode == 0:
                        os.remove(m_path)
                        LOGGER.info(f"Deleting archive: {m_path}")
                    else:
                        LOGGER.warning(
                            'Unable to extract archive! Uploading anyway')
                        path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
                LOGGER.info(f'got path: {path}')

            except NotSupportedExtractionArchive:
                LOGGER.info("Not any valid archive, uploading file as it is.")
                path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        else:
            path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
        up_name = pathlib.PurePath(path).name
        up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
        size = fs_utils.get_path_size(up_path)
        if self.isLeech:
            checked = False
            for dirpath, subdir, files in os.walk(f'{DOWNLOAD_DIR}{self.uid}',
                                                  topdown=False):
                for file in files:
                    f_path = os.path.join(dirpath, file)
                    f_size = os.path.getsize(f_path)
                    if int(f_size) > TG_SPLIT_SIZE:
                        if not checked:
                            checked = True
                            with download_dict_lock:
                                download_dict[self.uid] = SplitStatus(
                                    up_name, up_path, size)
                            LOGGER.info(f"Splitting: {up_name}")
                        fs_utils.split(f_path, f_size, file, dirpath,
                                       TG_SPLIT_SIZE)
                        os.remove(f_path)
            LOGGER.info(f"Leech Name: {up_name}")
            tg = pyrogramEngine.TgUploader(up_name, self)
            tg_upload_status = TgUploadStatus(tg, size, gid, self)
            with download_dict_lock:
                download_dict[self.uid] = tg_upload_status
            update_all_messages()
            tg.upload()
        else:
            LOGGER.info(f"Upload Name: {up_name}")
            drive = gdriveTools.GoogleDriveHelper(up_name, self)
            upload_status = UploadStatus(drive, size, gid, self)
            with download_dict_lock:
                download_dict[self.uid] = upload_status
            update_all_messages()
            drive.upload(up_name)
Esempio n. 13
0
def add_mega_download(mega_link: str, path: str, listener):
    executor = AsyncExecutor()
    api = MegaApi(MEGA_API_KEY, None, None, 'mirror-leech-telegram-bot')
    mega_listener = MegaAppListener(executor.continue_event, listener)
    global listeners
    api.addListener(mega_listener)
    listeners.append(mega_listener)
    if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None:
        executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD))
    link_type = get_mega_link_type(mega_link)
    if link_type == "file":
        LOGGER.info(
            "File. If your download didn't start, then check your link if it's available to download"
        )
        executor.do(api.getPublicNode, (mega_link, ))
        node = mega_listener.public_node
    else:
        LOGGER.info(
            "Folder. If your download didn't start, then check your link if it's available to download"
        )
        folder_api = MegaApi(MEGA_API_KEY, None, None, 'mltb')
        folder_api.addListener(mega_listener)
        executor.do(folder_api.loginToFolder, (mega_link, ))
        node = folder_api.authorizeNode(mega_listener.node)
    if mega_listener.error is not None:
        return sendMessage(str(mega_listener.error), listener.bot,
                           listener.update)
    if STOP_DUPLICATE and not listener.isLeech:
        LOGGER.info('Checking File/Folder if already in Drive')
        mname = node.getName()
        if listener.isZip:
            mname = mname + ".zip"
        elif listener.extract:
            try:
                mname = get_base_name(mname)
            except:
                mname = None
        if mname is not None:
            smsg, button = GoogleDriveHelper().drive_list(mname, True)
            if smsg:
                msg1 = "File/Folder is already available in Drive.\nHere are the search results:"
                return sendMarkup(msg1, listener.bot, listener.update, button)
    if any([STORAGE_THRESHOLD, ZIP_UNZIP_LIMIT, MEGA_LIMIT]):
        size = api.getSize(node)
        arch = any([listener.isZip, listener.extract])
        if STORAGE_THRESHOLD is not None:
            acpt = check_storage_threshold(size, arch)
            if not acpt:
                msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
                msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
                return sendMessage(msg, listener.bot, listener.update)
        limit = None
        if ZIP_UNZIP_LIMIT is not None and arch:
            msg3 = f'Failed, Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.'
            limit = ZIP_UNZIP_LIMIT
        elif MEGA_LIMIT is not None:
            msg3 = f'Failed, Mega limit is {MEGA_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.'
            limit = MEGA_LIMIT
        if limit is not None:
            LOGGER.info('Checking File/Folder Size...')
            if size > limit * 1024**3:
                return sendMessage(msg3, listener.bot, listener.update)
    with download_dict_lock:
        download_dict[listener.uid] = MegaDownloadStatus(
            mega_listener, listener)
    makedirs(path)
    gid = ''.join(random.SystemRandom().choices(string.ascii_letters +
                                                string.digits,
                                                k=8))
    mega_listener.setValues(node.getName(), api.getSize(node), gid)
    sendStatusMessage(listener.update, listener.bot)
    executor.do(api.startDownload, (node, path))
Esempio n. 14
0
def _qb_listener(listener, client, ext_hash, select, path):
    stalled_time = time()
    uploaded = False
    sizeChecked = False
    dupChecked = False
    rechecked = False
    while True:
        sleep(4)
        try:
            tor_info = client.torrents_info(torrent_hashes=ext_hash)
            if len(tor_info) == 0:
                with download_dict_lock:
                    if listener.uid not in list(download_dict.keys()):
                        client.auth_log_out()
                        break
                continue
            tor_info = tor_info[0]
            if tor_info.state == "metaDL":
                stalled_time = time()
                if QB_TIMEOUT is not None and time(
                ) - tor_info.added_on >= QB_TIMEOUT:  #timeout while downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "downloading":
                stalled_time = time()
                if STOP_DUPLICATE and not dupChecked and ospath.isdir(
                        f'{path}') and not listener.isLeech:
                    LOGGER.info('Checking File/Folder if already in Drive')
                    qbname = str(listdir(f'{path}')[-1])
                    if qbname.endswith('.!qB'):
                        qbname = ospath.splitext(qbname)[0]
                    if listener.isZip:
                        qbname = qbname + ".zip"
                    elif listener.extract:
                        try:
                            qbname = get_base_name(qbname)
                        except:
                            qbname = None
                    if qbname is not None:
                        qbmsg, button = GoogleDriveHelper().drive_list(
                            qbname, True)
                        if qbmsg:
                            msg = "File/Folder is already available in Drive."
                            _onDownloadError(msg, client, ext_hash, listener)
                            sendMarkup("Here are the search results:",
                                       listener.bot, listener.update, button)
                            break
                    dupChecked = True
                if not sizeChecked:
                    sleep(1)
                    size = tor_info.size
                    arch = any([listener.isZip, listener.extract])
                    if STORAGE_THRESHOLD is not None:
                        acpt = check_storage_threshold(size, arch)
                        if not acpt:
                            msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
                            msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
                            _onDownloadError(msg, client, ext_hash, listener)
                            break
                    limit = None
                    if ZIP_UNZIP_LIMIT is not None and arch:
                        mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
                        limit = ZIP_UNZIP_LIMIT
                    elif TORRENT_DIRECT_LIMIT is not None:
                        mssg = f'Torrent limit is {TORRENT_DIRECT_LIMIT}GB'
                        limit = TORRENT_DIRECT_LIMIT
                    if limit is not None:
                        LOGGER.info('Checking File/Folder Size...')
                        if size > limit * 1024**3:
                            fmsg = f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}"
                            _onDownloadError(fmsg, client, ext_hash, listener)
                            break
                    sizeChecked = True
            elif tor_info.state == "stalledDL":
                if not rechecked and 0.99989999999999999 < tor_info.progress < 1:
                    msg = f"Force recheck - Name: {tor_info.name} Hash: "
                    msg += f"{ext_hash} Downloaded Bytes: {tor_info.downloaded} "
                    msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
                    LOGGER.info(msg)
                    client.torrents_recheck(torrent_hashes=ext_hash)
                    rechecked = True
                elif QB_TIMEOUT is not None and time(
                ) - stalled_time >= QB_TIMEOUT:  # timeout after downloading metadata
                    _onDownloadError("Dead Torrent!", client, ext_hash,
                                     listener)
                    break
            elif tor_info.state == "missingFiles":
                client.torrents_recheck(torrent_hashes=ext_hash)
            elif tor_info.state == "error":
                _onDownloadError("No enough space for this torrent on device",
                                 client, ext_hash, listener)
                break
            elif (tor_info.state.lower().endswith("up")
                  or tor_info.state == "uploading") and not uploaded:
                LOGGER.info(f"onQbDownloadComplete: {ext_hash}")
                uploaded = True
                if not QB_SEED:
                    client.torrents_pause(torrent_hashes=ext_hash)
                if select:
                    clean_unwanted(path)
                listener.onDownloadComplete()
                if QB_SEED and not listener.isLeech and not listener.extract:
                    with download_dict_lock:
                        if listener.uid not in list(download_dict.keys()):
                            client.torrents_delete(torrent_hashes=ext_hash,
                                                   delete_files=True)
                            client.auth_log_out()
                            break
                        download_dict[listener.uid] = QbDownloadStatus(
                            listener, client, ext_hash, select)
                    update_all_messages()
                    LOGGER.info(f"Seeding started: {tor_info.name}")
                else:
                    client.torrents_delete(torrent_hashes=ext_hash,
                                           delete_files=True)
                    client.auth_log_out()
                    break
            elif tor_info.state == 'pausedUP' and QB_SEED:
                listener.onUploadError(
                    f"Seeding stopped with Ratio: {round(tor_info.ratio, 3)} and Time: {get_readable_time(tor_info.seeding_time)}"
                )
                client.torrents_delete(torrent_hashes=ext_hash,
                                       delete_files=True)
                client.auth_log_out()
                update_all_messages()
                break
        except Exception as e:
            LOGGER.error(str(e))