def website_validator(self, url): match_prefix = "^(http\:\/\/|https\:\/\/)?(www\.)?([^\.]?)" # http:// or https:// + www. match_base = "(.*\.)+" # base. Replace with domain match_tld = "[a-zA-Z0-9][a-zA-Z0-9\-]*" # com end = "/?$" # ATTENTION: the prefix will automatically get prepended to the pattern string! Don't try to match it. def regex_validate(r): if re.fullmatch(match_prefix + r + end, url): return True return False if regex_validate("((g\.e-hentai)\.org\/g\/[0-9]+\/[a-z0-9]+)"): manager = pewnet.HenManager() elif regex_validate("((exhentai)\.org\/g\/[0-9]+\/[a-z0-9]+)"): exprops = settings.ExProperties() if exprops.check(): manager = pewnet.ExHenManager() else: return elif regex_validate("(panda\.chaika\.moe\/(archive|gallery)\/[0-9]+)"): manager = pewnet.ChaikaManager() else: raise app_constants.WrongURL return manager
def login(cls, user, password): exprops = settings.ExProperties(settings.ExProperties.NHENTAI) if cls.COOKIES: if cls.check_login(cls.COOKIES): return cls.COOKIES elif exprops.cookies: if cls.check_login(exprops.cookies): cls.COOKIES.update(exprops.cookies) return cls.COOKIES cls._browser.open(cls.LOGIN_URL) login_form = cls._browser.get_form() if login_form: login_form['username'].value = user login_form['password'].value = password cls._browser.submit_form(login_form) n_c = cls._browser.session.cookies.get_dict() if not cls.check_login(n_c): log_w("NH login failed") raise app_constants.WrongLogin log_i("NH login succes") exprops.cookies = n_c exprops.username = user exprops.password = password exprops.save() cls.COOKIES.update(n_c) return n_c
def add_download_entry(self, url=None): log_i('Adding download entry: {}'.format(url)) self.info_lbl.hide() h_item = None try: if not url: url = self.url_inserter.text().lower() if not url: return self.url_inserter.clear() if 'g.e-hentai.org' in url: manager = pewnet.HenManager() elif 'exhentai.org' in url: exprops = settings.ExProperties() if exprops.check(): manager = pewnet.ExHenManager(exprops.ipb_id, exprops.ipb_pass) else: return elif 'panda.chaika.moe' in url and ('/archive/' in url or '/gallery/' in url): manager = pewnet.ChaikaManager() else: raise pewnet.WrongURL h_item = manager.from_gallery_url(url) except pewnet.WrongURL: self.info_lbl.setText("<font color='red'>Failed to add to add:\n{}</font>".format(url)) self.info_lbl.show() return if h_item: log_i('Successfully added download entry') self.download_list.add_entry(h_item)
def __init__(self): super().__init__() self.e_url = 'http://g.e-hentai.org/' exprops = settings.ExProperties() cookies = exprops.cookies if not cookies: if exprops.username and exprops.password: cookies = EHen.login(exprops.username, exprops.password) else: raise app_constants.NeedLogin self._browser.session.cookies.update(cookies)
def login(cls, user, password): """ Logs into g.e-h """ log_i("Attempting EH Login") eh_c = {} exprops = settings.ExProperties() if cls.COOKIES: if cls.check_login(cls.COOKIES): return cls.COOKIES elif exprops.cookies: if cls.check_login(exprops.cookies): cls.COOKIES.update(exprops.cookies) return cls.COOKIES p = { 'CookieDate': '1', 'b': 'd', 'bt': '1-1', 'UserName': user, 'PassWord': password } eh_c = requests.post( 'https://forums.e-hentai.org/index.php?act=Login&CODE=01', data=p).cookies.get_dict() exh_c = requests.get('http://exhentai.org', cookies=eh_c).cookies.get_dict() eh_c.update(exh_c) if not cls.check_login(eh_c): log_w("EH login failed") raise app_constants.WrongLogin log_i("EH login succes") exprops.cookies = eh_c exprops.username = user exprops.password = password exprops.save() cls.COOKIES.update(eh_c) return eh_c
def from_gallery_url(self, g_url): """ Finds gallery download url and puts it in download queue """ if 'exhentai' in g_url: hen = ExHen(settings.ExProperties().cookies) else: hen = EHen() log_d("Using {}".format(hen.__repr__())) api_metadata, gallery_gid_dict = hen.add_to_queue(g_url, True, False) gallery = api_metadata['gmetadata'][0] h_item = HenItem(self._browser.session) h_item.gallery_url = g_url h_item.metadata = EHen.parse_metadata(api_metadata, gallery_gid_dict) try: h_item.metadata = h_item.metadata[g_url] except KeyError: raise app_constants.WrongURL h_item.thumb_url = gallery['thumb'] h_item.gallery_name = gallery['title'] h_item.size = "{0:.2f} MB".format(gallery['filesize'] / 1048576) if self.ARCHIVE: h_item.download_type = 0 d_url = self._archive_url_d(gallery['gid'], gallery['token'], gallery['archiver_key']) # ex/g.e self._browser.open(d_url) log_d("Opening {}".format(d_url)) download_btn = self._browser.get_form() if download_btn: log_d("Parsing download button!") f_div = self._browser.find('div', id='db') divs = f_div.find_all('div') h_item.cost = divs[0].find('strong').text h_item.cost = divs[0].find('strong').text h_item.size = divs[1].find('strong').text self._browser.submit_form(download_btn) log_d("Submitted download button!") if self._browser.response.status_code == 302: self._browser.open(self._browser.response.headers['location'], "post") # get dl link log_d("Getting download URL!") continue_p = self._browser.find("p", id="continue") if continue_p: dl = continue_p.a.get('href') else: dl_a = self._browser.find('a') dl = dl_a.get('href') self._browser.open(dl) succes_test = self._browser.find('p') if succes_test and 'successfully' in succes_test.text: gallery_dl = self._browser.find('a').get('href') gallery_dl = self._browser.url.split( '/archive')[0] + gallery_dl f_name = succes_test.find('strong').text h_item.download_url = gallery_dl h_item.fetch_thumb() h_item.name = f_name Downloader.add_to_queue(h_item, self._browser.session) return h_item elif self.TORRENT: h_item.download_type = 1 h_item.torrents_found = int(gallery['torrentcount']) h_item.fetch_thumb() if h_item.torrents_found > 0: g_id_token = EHen.parse_url(g_url) if g_id_token: url_and_file = self._torrent_url_d(g_id_token[0], g_id_token[1]) if url_and_file: h_item.download_url = url_and_file[0] h_item.name = url_and_file[1] Downloader.add_to_queue(h_item, self._browser.session) return h_item else: return h_item return False
def auto_web_metadata(self): """ Auto fetches metadata for the provided list of galleries. Appends or replaces metadata with the new fetched metadata. """ log_i('Initiating auto metadata fetcher') self._hen_list = pewnet.hen_list_init() if self.galleries and not app_constants.GLOBAL_EHEN_LOCK: log_i('Auto metadata fetcher is now running') app_constants.GLOBAL_EHEN_LOCK = True def fetch_cancelled(rsn=''): if rsn: self.AUTO_METADATA_PROGRESS.emit( "Metadata fetching cancelled: {}".format(rsn)) app_constants.SYSTEM_TRAY.showMessage( "Metadata", "Metadata fetching cancelled: {}".format(rsn), minimized=True) else: self.AUTO_METADATA_PROGRESS.emit( "Metadata fetching cancelled!") app_constants.SYSTEM_TRAY.showMessage( "Metadata", "Metadata fetching cancelled!", minimized=True) app_constants.GLOBAL_EHEN_LOCK = False self.FINISHED.emit(False) if 'exhentai' in self._default_ehen_url: try: exprops = settings.ExProperties() hen = pewnet.ExHen(exprops.cookies) if hen.check_login(exprops.cookies): valid_url = 'exhen' log_i("using exhen") else: raise ValueError except ValueError: hen = pewnet.EHen() valid_url = 'ehen' log_i("using ehen") else: hen = pewnet.EHen() valid_url = 'ehen' log_i("Using Exhentai") try: self._auto_metadata_process(self.galleries, hen, valid_url, color=True) except app_constants.MetadataFetchFail as err: fetch_cancelled(err) return if self.error_galleries: if self._hen_list: log_i("Using fallback source") self.AUTO_METADATA_PROGRESS.emit("Using fallback source") for hen in self._hen_list: if not self.error_galleries: break galleries = [x[0] for x in self.error_galleries] self.error_galleries.clear() valid_url = "" if hen == pewnet.ChaikaHen: valid_url = "chaikahen" log_i("using chaika hen") try: self._auto_metadata_process( galleries, hen(), valid_url) except app_constants.MetadataFetchFail as err: fetch_cancelled(err) return if not self.error_galleries: self.AUTO_METADATA_PROGRESS.emit( 'Successfully fetched metadata! Went through {} galleries successfully!' .format(len(self.galleries))) app_constants.SYSTEM_TRAY.showMessage( 'Successfully fetched metadata', 'Went through {} galleries successfully!'.format( len(self.galleries)), minimized=True) self.FINISHED.emit(True) else: self.AUTO_METADATA_PROGRESS.emit( 'Finished fetching metadata! Could not fetch metadata for {} galleries. Check happypanda.log for more details!' .format(len(self.error_galleries))) app_constants.SYSTEM_TRAY.showMessage( 'Finished fetching metadata', 'Could not fetch metadata for {} galleries. Check happypanda.log for more details!' .format(len(self.error_galleries)), minimized=True) for tup in self.error_galleries: log_e("{}: {}".format( tup[1], tup[0].title.encode(errors='ignore'))) self.FINISHED.emit(self.error_galleries) log_i('Auto metadata fetcher is done') app_constants.GLOBAL_EHEN_LOCK = False else: log_e('Auto metadata fetcher is already running') self.AUTO_METADATA_PROGRESS.emit( 'Auto metadata fetcher is already running!') self.FINISHED.emit(False)
def auto_web_metadata(self): """ Auto fetches metadata for the provided list of galleries. Appends or replaces metadata with the new fetched metadata. """ log_i('Initiating auto metadata fetcher') if self.galleries and not app_constants.GLOBAL_EHEN_LOCK: log_i('Auto metadata fetcher is now running') app_constants.GLOBAL_EHEN_LOCK = True if 'exhentai' in self._default_ehen_url: try: exprops = settings.ExProperties() if exprops.ipb_id and exprops.ipb_pass: hen = pewnet.ExHen(exprops.ipb_id, exprops.ipb_pass) valid_url = 'exhen' else: raise ValueError except ValueError: hen = pewnet.EHen() valid_url = 'ehen' else: hen = pewnet.EHen() valid_url = 'ehen' hen.LAST_USED = time.time() self.AUTO_METADATA_PROGRESS.emit("Checking gallery urls...") fetched_galleries = [] checked_pre_url_galleries = [] for x, gallery in enumerate(self.galleries, 1): self.AUTO_METADATA_PROGRESS.emit( "({}/{}) Generating gallery hash: {}".format( x, len(self.galleries), gallery.title)) log_i("Generating gallery hash: {}".format( gallery.title.encode(errors='ignore'))) hash = None try: if not gallery.hashes: hash_dict = add_method_queue(HashDB.gen_gallery_hash, False, gallery, 0, 'mid') hash = hash_dict['mid'] else: hash = gallery.hashes[random.randint( 0, len(gallery.hashes) - 1)] except app_constants.app_constants.CreateArchiveFail: pass if not hash: self.error_galleries.append( (gallery, "Could not generate hash")) log_e("Could not generate hash for gallery: {}".format( gallery.title.encode(errors='ignore'))) continue gallery.hash = hash log_i("Checking gallery url") if gallery.link and app_constants.USE_GALLERY_LINK: check = self._website_checker(gallery.link) if check == valid_url: gallery.temp_url = gallery.link checked_pre_url_galleries.append(gallery) continue # dict -> hash:[list of title,url tuples] or None self.AUTO_METADATA_PROGRESS.emit( "({}/{}) Finding url for gallery: {}".format( x, len(self.galleries), gallery.title)) found_url = hen.eh_hash_search(gallery.hash) if found_url == 'error': app_constants.GLOBAL_EHEN_LOCK = False self.FINISHED.emit(True) return if not gallery.hash in found_url: self.error_galleries.append( (gallery, "Could not find url for gallery")) self.AUTO_METADATA_PROGRESS.emit( "Could not find url for gallery: {}".format( gallery.title)) log_w('Could not find url for gallery: {}'.format( gallery.title.encode(errors='ignore'))) continue title_url_list = found_url[gallery.hash] if app_constants.ALWAYS_CHOOSE_FIRST_HIT: title = title_url_list[0][0] url = title_url_list[0][1] else: if len(title_url_list) > 1: self.AUTO_METADATA_PROGRESS.emit( "Multiple galleries found for gallery: {}".format( gallery.title)) app_constants.SYSTEM_TRAY.showMessage( 'Happypanda', 'Multiple galleries found for gallery:\n{}'.format( gallery.title), minimized=True) log_w( "Multiple galleries found for gallery: {}".format( gallery.title.encode(errors='ignore'))) self.GALLERY_PICKER.emit(gallery, title_url_list, self.GALLERY_PICKER_QUEUE) user_choice = self.GALLERY_PICKER_QUEUE.get() else: user_choice = title_url_list[0] if not user_choice: continue title = user_choice[0] url = user_choice[1] if not gallery.link: gallery.link = url self.GALLERY_EMITTER.emit(gallery) gallery.temp_url = url self.AUTO_METADATA_PROGRESS.emit( "({}/{}) Adding to queue: {}".format( x, len(self.galleries), gallery.title)) if x == len(self.galleries): self.fetch_metadata(gallery, hen, True) else: self.fetch_metadata(gallery, hen) if checked_pre_url_galleries: for x, gallery in enumerate(checked_pre_url_galleries, 1): self.AUTO_METADATA_PROGRESS.emit( "({}/{}) Adding to queue: {}".format( x, len(checked_pre_url_galleries), gallery.title)) if x == len(checked_pre_url_galleries): self.fetch_metadata(gallery, hen, True) else: self.fetch_metadata(gallery, hen) log_d('Auto metadata fetcher is done') app_constants.GLOBAL_EHEN_LOCK = False if not self.error_galleries: self.AUTO_METADATA_PROGRESS.emit( 'Done! Went through {} galleries successfully!'.format( len(self.galleries))) app_constants.SYSTEM_TRAY.showMessage( 'Done', 'Went through {} galleries successfully!', minimized=True) self.FINISHED.emit(True) else: self.AUTO_METADATA_PROGRESS.emit( 'Done! Could not fetch metadata for {} galleries. Check happypanda.log for more details!' .format(len(self.error_galleries))) app_constants.SYSTEM_TRAY.showMessage( 'Done!', 'Could not fetch metadata for {} galleries. Check happypanda.log for more details!' .format(len(self.error_galleries)), minimized=True) for tup in self.error_galleries: log_e("{}: {}".format( tup[1], tup[0].title.encode(errors='ignore'))) self.FINISHED.emit(self.error_galleries) else: log_e('Auto metadata fetcher is already running') self.AUTO_METADATA_PROGRESS.emit( 'Auto metadata fetcher is already running!') self.FINISHED.emit(False)