def onDownloadComplete(self): with download_dict_lock: LOGGER.info( f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = download.name() size = download.size_raw() if name is None: # when pyrogram's media.file_name is of NoneType name = os.listdir(f'{DOWNLOAD_DIR}{self.uid}')[0] m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}' if self.isTar: download.is_archiving = True try: with download_dict_lock: download_dict[self.uid] = TarStatus(name, m_path, size) path = fs_utils.tar(m_path) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return elif self.extract: download.is_extracting = True try: path = fs_utils.get_base_name(m_path) LOGGER.info(f"Extracting : {name} ") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if pswd is not None: archive_result = subprocess.run(["pextract", m_path, pswd]) else: archive_result = subprocess.run(["extract", m_path]) if archive_result.returncode == 0: threading.Thread(target=os.remove, args=(m_path, )).start() LOGGER.info(f"Deleting archive : {m_path}") else: LOGGER.warning( 'Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' LOGGER.info(f'got path : {path}') except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = pathlib.PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' if up_name == "None": up_name = "".join(os.listdir(f'{DOWNLOAD_DIR}{self.uid}/')) LOGGER.info(f"Upload Name : {up_name}") drive = gdriveTools.GoogleDriveHelper(up_name, self) size = fs_utils.get_path_size(up_path) upload_status = UploadStatus(drive, size, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def upload(self, file_name: str): if USE_SERVICE_ACCOUNTS: self.service_account_count = len(os.listdir("accounts")) self.__listener.onUploadStarted() file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}" file_path = f"{file_dir}/{file_name}" size = get_readable_file_size(get_path_size(file_path)) LOGGER.info("Uploading File: " + file_path) self.start_time = time.time() self.updater = setInterval(self.update_interval, self._on_upload_progress) if os.path.isfile(file_path): try: mime_type = get_mime_type(file_path) link = self.upload_file(file_path, file_name, mime_type, parent_id) if link is None: raise Exception('Upload has been manually cancelled') LOGGER.info("Uploaded To G-Drive: " + file_path) except Exception as e: if isinstance(e, RetryError): LOGGER.info( f"Total Attempts: {e.last_attempt.attempt_number}") err = e.last_attempt.exception() else: err = e LOGGER.error(err) self.__listener.onUploadError(str(err)) return finally: self.updater.cancel() else: try: dir_id = self.create_directory( os.path.basename(os.path.abspath(file_name)), parent_id) result = self.upload_dir(file_path, dir_id) if result is None: raise Exception('Upload has been manually cancelled!') LOGGER.info("Uploaded To G-Drive: " + file_name) link = f"https://drive.google.com/folderview?id={dir_id}" except Exception as e: if isinstance(e, RetryError): LOGGER.info( f"Total Attempts: {e.last_attempt.attempt_number}") err = e.last_attempt.exception() else: err = e LOGGER.error(err) self.__listener.onUploadError(str(err)) return finally: self.updater.cancel() LOGGER.info(download_dict) self.__listener.onUploadComplete(link, size) LOGGER.info("Deleting downloaded file/folder..") return link
def processed_bytes(self): if self.obj.downloaded_bytes != 0: return self.obj.downloaded_bytes else: return get_path_size(f"{DOWNLOAD_DIR}{self.uid}")