async def copy(self) -> None: """ Copy file/folder in GDrive """ if not self._parent_id: await self._message.edit("First set parent path by `.gset`", del_in=5) return await self._message.try_to_edit("`Loading GDrive Copy...`") file_id, _ = self._get_file_id() pool.submit_thread(self._copy, file_id) start_t = datetime.now() count = 0 while not self._is_finished: count += 1 if self._message.process_is_canceled: self._cancel() if self._progress is not None and count >= Config.EDIT_SLEEP_TIMEOUT: count = 0 await self._message.try_to_edit(self._progress) await asyncio.sleep(1) end_t = datetime.now() m_s = (end_t - start_t).seconds if isinstance(self._output, HttpError): out = f"**ERROR** : `{self._output._get_reason()}`" # pylint: disable=protected-access elif self._output is not None and not self._is_canceled: out = f"**Copied Successfully** __in {m_s} seconds__\n\n{self._output}" elif self._output is not None and self._is_canceled: out = self._output else: out = "`failed to copy.. check logs?`" await self._message.edit(out, disable_web_page_preview=True, log=__name__)
async def download(self) -> None: """ Download file/folder from GDrive """ await self._message.try_to_edit("`Loading GDrive Download...`") file_id, _ = self._get_file_id() pool.submit_thread(self._download, file_id) start_t = datetime.now() count = 0 while not self._is_finished: count += 1 if self._message.process_is_canceled: self._cancel() if self._progress is not None and count >= Config.EDIT_SLEEP_TIMEOUT: count = 0 await self._message.try_to_edit(self._progress) await asyncio.sleep(1) end_t = datetime.now() m_s = (end_t - start_t).seconds if isinstance(self._output, HttpError): out = f"**ERROR** : `{self._output._get_reason()}`" # pylint: disable=protected-access elif self._output is not None and not self._is_canceled: out = f"**Downloaded Successfully** __in {m_s} seconds__\n\n`{self._output}`" elif self._output is not None and self._is_canceled: out = self._output else: out = "`failed to download.. check logs?`" await self._message.edit(out, disable_web_page_preview=True, log=__name__)
def combine(self) -> None: """ Combine Split files """ file_name, ext = splitext(basename(self._path)) self._final_file_path = join(dirname(self._path), file_name) file_list = sorted( glob(self._final_file_path + f".{'[0-9]' * len(ext.lstrip('.'))}")) self._total = len(file_list) self._file_size = sum((os.stat(f_).st_size for f_ in file_list)) pool.submit_thread(self._combine_worker, file_list)
def split(self, split_size: int) -> None: """ Split files """ split_size = int(split_size) * 1024 * 1024 self._file_size = os.stat(self._path).st_size if self._chunk_size > split_size: self._chunk_size = split_size times = int(ceil(split_size / self._chunk_size)) self._total = int(ceil(self._file_size / split_size)) self._final_file_path = join( dirname(self._path), f"split_{basename(self._path).replace('.', '_')}") if not isdir(self._final_file_path): os.makedirs(self._final_file_path) pool.submit_thread(self._split_worker, times)
def unpack_path(self) -> None: """ UNPACK file path """ chunked_file_names = [] temp_file_names = [] temp_size = 0 min_chunk_size = 1024 * 1024 * 10 for f_n, f_s in self.get_info(): self._total += 1 temp_size += f_s temp_file_names.append(f_n) if temp_size >= min_chunk_size: temp_size = 0 chunked_file_names.append(temp_file_names) temp_file_names = [] if temp_file_names: chunked_file_names.append(temp_file_names) dir_name = splitext(basename(self._file_path))[0] self._final_file_path = join( Config.DOWN_PATH, dir_name.replace('.tar', '').replace('.', '_')) for f_n_s in chunked_file_names: pool.submit_thread(self._unpack, f_n_s)
def pack_path(self, tar: bool) -> None: """ PACK file path """ file_paths = [] def explorer(path: Path) -> None: if path.is_file(): self._total += 1 file_paths.append(str(path)) elif path.is_dir(): for i in path.iterdir(): explorer(i) explorer(Path(self._file_path)) file_name = basename(self._file_path) if tar: file_name += '.tar' p_type = tar_open else: file_name += '.zip' p_type = ZipFile self._final_file_path = join(Config.DOWN_PATH, file_name) pool.submit_thread(self._zip, p_type, file_paths, self._final_file_path)
async def upload(self) -> None: """ Upload from file/folder/link/tg file to GDrive """ replied = self._message.reply_to_message is_url = re.search(r"(?:https?|ftp)://[^|\s]+\.[^|\s]+", self._message.input_str) dl_loc = "" if replied and replied.media: try: dl_loc, _ = await tg_download(self._message, replied) except ProcessCanceled: await self._message.edit("`Process Canceled!`", del_in=5) return except Exception as e_e: await self._message.err(e_e) return elif is_url: try: dl_loc, _ = await url_download(self._message, self._message.input_str) except ProcessCanceled: await self._message.edit("`Process Canceled!`", del_in=5) return except Exception as e_e: await self._message.err(e_e) return file_path = dl_loc if dl_loc else self._message.input_str if not os.path.exists(file_path): await self._message.err("invalid file path provided?") return if "|" in file_path: file_path, file_name = file_path.split("|") new_path = os.path.join(os.path.dirname(file_path.strip()), file_name.strip()) os.rename(file_path.strip(), new_path) file_path = new_path await self._message.try_to_edit("`Loading GDrive Upload...`") pool.submit_thread(self._upload, file_path) start_t = datetime.now() count = 0 while not self._is_finished: count += 1 if self._message.process_is_canceled: self._cancel() if self._progress is not None and count >= Config.EDIT_SLEEP_TIMEOUT: count = 0 await self._message.try_to_edit(self._progress) await asyncio.sleep(1) if dl_loc and os.path.exists(dl_loc): os.remove(dl_loc) end_t = datetime.now() m_s = (end_t - start_t).seconds if isinstance(self._output, HttpError): out = f"**ERROR** : `{self._output._get_reason()}`" # pylint: disable=protected-access elif self._output is not None and not self._is_canceled: out = f"**Uploaded Successfully** __in {m_s} seconds__\n\n{self._output}" elif self._output is not None and self._is_canceled: out = self._output else: out = "`failed to upload.. check logs?`" await self._message.edit(out, disable_web_page_preview=True, log=__name__)