def download_picture(self, picture_url: str, album_folder: Path) -> None: """ Download picture. :param picture_url: picture url :param album_folder: album folder path """ try: picture_url = normalize_url(picture_url) picture_name = picture_url.rsplit('/', 1)[1] picture_path = Path.joinpath(album_folder, picture_name) if not Path.exists(picture_path): logger.info(f'Start downloading: {picture_url}') retry = 1 response = requests.get(picture_url, stream=True, timeout=self.timeout) while response.status_code != 200 and retry <= self.retries: logger.warning(f'{retry}º Retry: {picture_name}') response = requests.get(picture_url, stream=True, timeout=self.timeout) retry += 1 if retry > self.retries: raise Exception('Reached maximum number of retries') if len(response.content) > 0: with picture_path.open('wb') as image: image.write(response.content) logger.log(5, f'Completed download of: {picture_name}') else: raise Exception('Zero content') else: logger.warning(f'Picture already exists: {picture_name} ') except Exception as e: logger.error(f'Failed to download picture: {picture_url}\n{e}')
def search_albums(search_query: str, sorting: str = 'date_trending', page: int = 1, max_pages: int = 1) -> List[Album]: """ Search for albums. :param search_query: keyword :param sorting: :param page: initial search page :param max_pages: maximum search page :return: Album list """ logger.log( 5, f'Searching albums with keyword: {search_query} | Page: {page} | Max pages: {max_pages}' ) albums = [] while True: response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumList', json=album_search_query(search_query, sorting, page)).json() data = response['data']['album']['list'] page += 1 for item in data['items']: albums.append( Album(item['id'], item['title'], item['created_by']['display_name'], item['number_of_pictures'], item['number_of_animated_pictures'])) if not data['info']['has_next_page'] or data['info'][ 'page'] == max_pages: break return albums
def download(function: Callable[[List[int], Downloader], None], inputs: List[str], extractor: Callable[[str], int], downloader: Downloader, prefix: str): ids = extract_ids_from_list(inputs, extractor) function(ids, downloader) list_txt_organizer(inputs, prefix) logger.log(5, 'URLs/IDs added to completed list.')
def get_pictures_urls(album_id): logger.log(5, 'Fetching album pictures...') n = 1 raw_data = [] while True: response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumListOwnPictures', json={ "id": 7, "operationName": "AlbumListOwnPictures", "query": "query AlbumListOwnPictures($input: PictureListInput!) {picture {list(input: $input) {info " "{...FacetCollectionInfo} items {...PictureStandardWithoutAlbum}}}} fragment FacetCollectionInfo on " "FacetCollectionInfo {page has_next_page has_previous_page total_items total_pages items_per_page " "url_complete} fragment PictureStandardWithoutAlbum on Picture {url_to_original url_to_video url}", "variables": { "input": { "filters": [{ "name": "album_id", "value": album_id }], "display": "rating_all_time", "page": n } } }).json() raw_data.append(response['data']['picture']['list']['items']) n += 1 if not response['data']['picture']['list']['info']['has_next_page']: break data = [obj['url_to_original'] for arr in raw_data for obj in arr] logger.info(f'Total of {len(data)} links found.') return data
def fetch_info(self) -> bool: """ Fetch album information. :return: bool - true if there are no error otherwise false """ logger.log(5, 'Fetching album information...') response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumGet', json=album_info_query(str(self.id_))).json() data = response['data']['album']['get'] if 'errors' in data: logger.error( f'Something wrong with album: {self.id_}\nErrors: {data["errors"]}' ) logger.warning('Skipping...') return False self.title = data['title'] self.author = data['created_by']['display_name'] self.number_of_pictures = data['number_of_pictures'] self.number_of_animated_pictures = data['number_of_animated_pictures'] self.info.update({ 'slug': data.get('slug', self.title), 'language': data.get('language', {}).get('title', ''), 'tags': [tag.get('text', '') for tag in data.get('tags', {})], 'genres': [genre.get('title', '') for genre in data.get('genres', {})], 'audiences': [ audience.get('title', '') for audience in data.get('audiences', {}) ], }) return True
def download_picture(self, picture_url: str, album_folder: str) -> None: """ Download picture. :param picture_url: picture url :param album_folder: folder path """ try: if picture_url.startswith('//'): picture_url = picture_url.replace('//', '', 1) picture_name = picture_url.rsplit('/', 1)[1] picture_path = os.path.join(album_folder, picture_name) if not os.path.exists(picture_path): logger.info(f'Start downloading: {picture_url}') retry = 1 response = requests.get(picture_url, stream=True, timeout=self.timeout) while response.status_code != 200 and retry <= self.retries: logger.warning(f'{retry}º Retry: {picture_name}') response = requests.get(picture_url, stream=True, timeout=self.timeout) retry += 1 if len(response.content) > 0: with open(picture_path, 'wb') as image: image.write(response.content) logger.log(5, f'Completed download of: {picture_name}') else: raise Exception('Zero content') else: logger.warning(f'Picture already exists: {picture_name} ') except Exception as e: logger.error(f'Failed to download picture: {picture_url}\n{e}')
def show(self) -> None: """Show album information.""" table = [['ID ', self.id_], ['Title', self.title], ['Author', self.author], ['Pictures', self.number_of_pictures], ['Gifs', self.number_of_animated_pictures]] logger.log(5, f'Album information:\n{tabulate(table, tablefmt="jira")}')
def show_album_info(album): try: logger.log( 5, f'Album Name: {album["title"]} - with {album["number_of_pictures"]} pictures.' ) except Exception as e: logger.warning(f'Failed to print album information.\n{e}')
def get_album_id(album_url): try: logger.log(5, 'Resolving album id...') split = 2 if album_url.endswith('/') else 1 album_id = album_url.rsplit('/', split)[1].rsplit('_', 1)[1] if isinstance(int(album_id), int): return album_id except Exception as e: logger.critical(f"Couldn't resolve album ID of {album_url}\n{e}") return False
def print_search(results: List[Album]) -> None: """ Shows information of the searched albums. :param results: Album list """ table = [[ album.id_, album.title, album.number_of_pictures, album.number_of_animated_pictures, album.author ] for album in results] headers = ('ID', 'Title', 'Pictures', 'Gifs', 'Author') logger.log( 5, f'Search Result Total: {len(results)}\n{tabulate(table, headers)}')
def add(string: str): """ Add string to list_completed.txt :param string: Mostly URL or ID of Album or User """ with open('./list_completed.txt') as completed: text = completed.read() with open('./list_completed.txt', 'a') as completed: if not text.endswith('\n'): completed.write('\n') completed.write(string) logger.log(5, f'Added to completed list: {string}')
def list_organizer(album_url): with open('./list.txt') as list_txt: temp = ['' if album_url in line else line for line in list_txt] with open('./list.txt', 'w') as list_txt: for line in temp: list_txt.write(line) with open('./list_completed.txt') as completed: text = completed.read() with open('./list_completed.txt', 'a') as completed: if not text.endswith("\n"): completed.write('\n') completed.write(album_url) logger.log(5, 'Album url added to completed list.')
def read_list() -> List[str]: """ Read text file content. :return: list content """ try: logger.log(5, 'Reading list...') with open('./list.txt') as file: list_txt = file.read().split('\n') logger.log(5, f'Total of Links: {len(list_txt)}.') return list_txt except Exception as e: print(f'Failed to read the list.txt.\n{e}')
def add(string: str) -> None: """ Add string to list_completed.txt :param string: Mostly URL or ID of Album or User """ path = get_root_path().joinpath('list_completed.txt') with path.open() as completed: text = completed.read() with path.open('a') as completed: if not text.endswith('\n'): completed.write('\n') completed.write(string) logger.log(5, f'Added to completed list: {string}')
def generate_pdf(output_dir: Path, formmatted_name: str, album_folder: Path, rm_origin_dir=False) -> None: """ Create pdf file containing album pictures [jpg,jpeg]. :param output_dir: output folder path :param formmatted_name: formmatted album name :param album_folder: album folder path :param rm_origin_dir: indicates whether the source folder will be deleted """ try: from PIL import Image logger.info('Generating album pdf file...') pictures_path_list = [] for file_name in album_folder.iterdir(): if file_name.suffix.lower() not in ['.jpg', '.jpeg', '.png']: continue picture_path = Path.joinpath(album_folder, file_name) if picture_path.is_dir(): continue pictures_path_list.append(picture_path) pictures = [] if len(pictures_path_list) > 0: for picture_path in pictures_path_list: img = Image.open(picture_path) if picture_path.suffix.lower() == '.png' or img.mode == 'RGBA': img = img.convert('RGB') pictures.append(img) if len(pictures) == 0: raise Exception('Pictures list is empty, probably has no valid images [jpg, jpeg, png]') pdf_filename = f'{formmatted_name}.pdf' pdf_path = Path.joinpath(output_dir, pdf_filename) logger.info(f'Adding {len(pictures)} pictures to pdf...') pictures[0].save(pdf_path, save_all=True, append_images=pictures[1:]) logger.log(5, f'Album PDF saved to: {output_dir}') for img in pictures: img.close() if rm_origin_dir: shutil.rmtree(album_folder, ignore_errors=True) logger.log(5, f'Album {formmatted_name} folder deleted.') except ImportError: logger.error('Please install Pillow package by using pip.') except Exception as e: logger.error(f'Failed to generate album pdf: {e}')
def read_list() -> list[str]: """ Read list.txt file content. :return: list.txt content """ try: logger.log(5, 'Reading list...') with get_root_path().joinpath('list.txt').open() as list_file: list_txt = list_file.read() if len(list_txt) > 0: list_txt = list_txt.split('\n') logger.log(5, f'Total of Items: {len(list_txt)}.') return list_txt except Exception as e: print(f'Failed to read the list.txt.\n{e}')
def fetch_pictures(self) -> None: """Fetch album pictures.""" logger.log(5, 'Fetching album pictures...') page = 1 while True: response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumListOwnPictures', json=album_list_pictures_query(str(self.id_), page)).json() self.pictures.extend([ picture['url_to_original'] for picture in response['data']['picture']['list']['items'] ]) page += 1 if not response['data']['picture']['list']['info']['has_next_page']: break logger.info(f'Total of {len(self.pictures)} links found.')
def get_album_info(album_id): logger.log(5, 'Fetching album information...') response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumGet', json={ "id": 6, "operationName": "AlbumGet", "query": "query AlbumGet($id: ID!) {album {get(id: $id) {... on Album {...AlbumStandard} ... on MutationError " "{errors {code message}}}}} fragment AlbumStandard on Album {id title number_of_pictures}", "variables": { "id": album_id } }).json() return response['data']['album']['get']
def fetch_info(self) -> bool: """ Fetch user information. :return: bool - true if there are no error otherwise false """ logger.log(5, 'Fetching user information...') response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=ProfileGet', json=user_info_query(str(self.id_))).json() data = response['data']['userprofile']['get'] if "errors" in data: logger.error( f'Something wrong with user: {self.id_}\nErrors: {data["errors"]}' ) logger.warning('Skipping...') return False self.name = data['user']['name'] self.number_of_albums = data['number_of_albums'] self.number_of_favorites = data['number_of_favorite_albums'] return True
def download_picture(picture_url, directory, album_name): try: picture_name = picture_url.rsplit('/', 1)[1] picture_path = f'{directory}{album_name}/{picture_name}' if not (os.path.exists(picture_path)): logger.info(f'Start downloading: {picture_url}') retries = 1 res = requests.get(picture_url, stream=True) while res.status_code != 200 and retries <= 5: logger.warning(f'{retries}º Retry: {picture_name}') res = requests.get(picture_url, stream=True) if len(res.content) > 0: with open(picture_path, 'wb') as image: image.write(res.content) logger.log(5, f'Completed download of: {picture_name}') else: raise Exception('Zero content') else: logger.warning(f'Picture: {picture_name} already exist.') except Exception as e: logger.error(f'Failed to download picture: {picture_url}\n{e}')
def fetch_info(self) -> bool: """ Fetch album information. :return: bool - true if there are no error otherwise false """ logger.log(5, 'Fetching album information...') response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumGet', json=album_info_query(str(self.id_))).json() data = response['data']['album']['get'] if 'errors' in data: logger.error( f'Something wrong with album: {self.id_}\nErrors: {data["errors"]}' ) logger.warning('Skipping...') return False self.title = data['title'] self.author = data['created_by']['display_name'] self.number_of_pictures = data['number_of_pictures'] self.number_of_animated_pictures = data['number_of_animated_pictures'] return True
def fetch_albums(self, only_favorites=False): """Fetch user albums.""" logger.log( 5, f'Fetching user {"favorites" if only_favorites else "albums"}...') n = 1 while True: logger.info( f'Fetching user {"favorites" if only_favorites else "albums"} page: {n}...' ) json_query = user_favorites_query if only_favorites else user_albums_query response = requests.post( 'https://members.luscious.net/graphql/nobatch/?operationName=AlbumList', json=json_query(str(self.id_), n)).json() self.albums_ids.extend([ album['id'] for album in response['data']['album']['list']['items'] ]) n += 1 if not response['data']['album']['list']['info']['has_next_page']: break logger.info(f'Total of {len(self.albums_ids)} ids found.')
def main(): create_default_files() while True: option = input('Options:\n' '1 - Enter album URL.\n' '2 - Download from list.txt.\n' '3 - Settings.\n' '0 - Exit.\n' '> ') cls() if option == '1': input_url = input('0 - Back.\n' 'Album URL: ') if input_url != '0': cls() start(input_url) else: cls() elif option == '2': logger.log(5, 'Checking List.') with open('./list.txt') as x: url_list = x.readlines() logger.log(5, f'Total of Links: {len(url_list)}.') for url in url_list: start(url.rstrip('\n')) elif option == '3': open_config_menu() elif option == '0': exit() else: print('Invalid Option.\n')
def menu() -> None: """Menu""" info() create_default_files() logger_file_handler() configs = get_config_data() base_namespace = Namespace( output_dir=Path(os.path.normcase(configs.get('directory', './albums/'))).resolve(), threads=configs.get('pool', os.cpu_count() or 1), retries=configs.get('retries', 5), timeout=configs.get('timeout', 30), delay=configs.get('delay', 0), foldername_format=configs.get('foldername_format', '%t'), gen_pdf=configs.get('gen_pdf', False), rm_origin_dir=configs.get('rm_origin_dir', False), album_inputs=None, user_inputs=None, only_favorites=False, keyword=None, search_download=False, sorting='date_trending', page=1, max_pages=1) while True: option = input('Options:\n' '1 - Download albums by URL or ID.\n' '2 - Download all user albums.\n' '3 - Download all user favorites.\n' '4 - Search albums by keyword.\n' '5 - Download albums from list.txt.\n' '6 - Settings.\n' '0 - Exit.\n' '> ') if option in ('1', '2', '3'): inputs = input('\n0 - Back.\n' f'Enter {"album" if option == "1" else "user"} URL or ID.\n> ') cls() if inputs != '0': args = copy(base_namespace) args.album_inputs = inputs if option == '1' else None args.user_inputs = inputs if option in ('2', '3') else None args.only_favorites = option == '3' start(args) list_txt_organizer(inputs_string_to_list(inputs), 'album' if option == '1' else 'user') logger.log(5, 'URLs/IDs added to completed list.') elif option == '4': keyword = input('Enter keyword\n> ') if not keyword: print('Please enter a keyword.\n') return page = input('Enter starting page number or leave blank\n> ') page = int(page) if is_a_valid_integer(page) else 1 max_pages = input('Enter max page or leave blank\n> ') max_pages = int(max_pages) if is_a_valid_integer(max_pages) else 1 search_download = input('Download search results? ("Y/N") ').strip() in 'yY' args = copy(base_namespace) args.keyword = keyword args.search_download = search_download args.page = page args.max_pages = max_pages start(args) elif option == '5': list_txt = list(set(read_list())) args = copy(base_namespace) args.album_inputs = ','.join(list_txt) start(args) list_txt_organizer(list_txt, 'album') logger.log(5, 'URLs/IDs added to completed list.') elif option == '6': open_config_menu() elif option == '0': exit() else: print('Invalid Option.\n')
def show(self) -> None: """Show user information.""" table = [['ID', self.id_], ['Name', self.name], ['Albums', self.number_of_albums], ['Favorites', self.number_of_favorites]] logger.log(5, f'User information:\n{tabulate(table, tablefmt="jira")}')