def _real_extract(self, url): try: with OnlyFansPostIE._LOCK: while True: _server_port = 18080 + 100 * OnlyFansPostIE._NUM _server = Server( path= "/Users/antoniotorres/Projects/async_downloader/browsermob-proxy-2.1.4/bin/browsermob-proxy", options={'port': _server_port}) if _server._is_listening(): OnlyFansPostIE._NUM += 1 if OnlyFansPostIE._NUM == 25: raise Exception("mobproxy max tries") else: _server.start({'log_path': '/dev', 'log_file': 'null'}) OnlyFansPostIE._NUM += 1 break _host = 'localhost' _port = _server_port + 1 _harproxy = _server.create_proxy({'port': _port}) driver = self.get_driver(host=_host, port=_port) self.send_driver_request(driver, self._SITE_URL) for cookie in OnlyFansPostIE._COOKIES: driver.add_cookie(cookie) self.report_extraction(url) post, account = re.search(self._VALID_URL, url).group("post", "account") self.to_screen("post:" + post + ":" + "account:" + account) entries = {} _harproxy.new_har(options={ 'captureHeaders': False, 'captureContent': True }, ref=f"har_{post}", title=f"har_{post}") self.send_driver_request(driver, url) res = self.wait_until(driver, 30, error404_or_found()) if not res or res[0] == "error404": raise ExtractorError("Error 404: Post doesnt exists") har = _harproxy.har data_json = self.scan_for_request(har, f"har_{post}", f"/api2/v2/posts/{post}") if data_json: self.write_debug(data_json) _entry = self._extract_from_json(data_json, user_profile=account) if _entry: for _video in _entry: if not _video['id'] in entries.keys(): entries[_video['id']] = _video else: if _video['duration'] > entries[ _video['id']]['duration']: entries[_video['id']] = _video if entries: return self.playlist_result(list(entries.values()), "Onlyfans:" + account, "Onlyfans:" + account) else: raise ExtractorError("No entries") except ExtractorError as e: raise except Exception as e: lines = traceback.format_exception(*sys.exc_info()) self.to_screen(f'{repr(e)} \n{"!!".join(lines)}') raise ExtractorError(repr(e)) finally: _harproxy.close() _server.stop() self.rm_driver(driver)
def _real_extract(self, url): try: self.report_extraction(url) with OnlyFansPostIE._LOCK: while True: _server_port = 18080 + 100 * OnlyFansPostIE._NUM _server = Server( path= "/Users/antoniotorres/Projects/async_downloader/browsermob-proxy-2.1.4/bin/browsermob-proxy", options={'port': _server_port}) if _server._is_listening(): OnlyFansPostIE._NUM += 1 if OnlyFansPostIE._NUM == 25: raise Exception("mobproxy max tries") else: _server.start({'log_path': '/dev', 'log_file': 'null'}) OnlyFansPostIE._NUM += 1 break _host = 'localhost' _port = _server_port + 1 _harproxy = _server.create_proxy({'port': _port}) driver = self.get_driver(host=_host, port=_port) driver = self.get_driver(host=_host, port=_port) self.send_driver_request(driver, self._SITE_URL) for cookie in OnlyFansPlaylistIE._COOKIES: driver.add_cookie(cookie) account, mode = re.search(self._VALID_URL, url).group("account", "mode") if not mode: mode = "latest" entries = {} if mode in ("all", "latest", "favorites", "tips"): self.send_driver_request(driver, f"{self._SITE_URL}/{account}") res = self.wait_until(driver, 60, error404_or_found()) if not res or res[0] == "error404": raise ExtractorError( "Error 404: User profile doesnt exists") _url = f"{self._SITE_URL}/{account}/videos{self._MODE_DICT[mode]}" _harproxy.new_har(options={ 'captureHeaders': False, 'captureContent': True }, ref=f"har_{account}_{mode}", title=f"har_{account}_{mode}") self.send_driver_request(driver, _url) self.wait_until( driver, 60, ec.presence_of_all_elements_located( (By.CLASS_NAME, "b-photos__item.m-video-item"))) if mode in ("latest"): har = _harproxy.har data_json = self.scan_for_request(har, f"har_{account}_{mode}", "posts/videos?") if data_json: self.write_debug(data_json) list_json = data_json.get('list') if list_json: for info_json in list_json: _entry = self._extract_from_json( info_json, user_profile=account) if _entry: for _video in _entry: if not _video['id'] in entries.keys(): entries[_video['id']] = _video else: if _video.get( 'duration', 1) > entries[ _video['id']].get( 'duration', 0): entries[_video['id']] = _video else: #lets scroll down in the videos pages till the end self.wait_until(driver, 600, scroll(10)) har = _harproxy.har _reg_str = r'/api2/v2/users/\d+/posts/videos\?' data_json = self.scan_for_all_requests( har, f"har_{account}_{mode}", _reg_str) if data_json: self.write_debug(data_json) list_json = [] for el in data_json: list_json += el.get('list') self.write_debug(list_json) for info_json in list_json: _entry = self._extract_from_json( info_json, user_profile=account) if _entry: for _video in _entry: if not _video['id'] in entries.keys(): entries[_video['id']] = _video else: if _video.get( 'duration', 1) > entries[_video['id']].get( 'duration', 0): entries[_video['id']] = _video elif mode in ("chat"): _harproxy.new_har(options={ 'captureHeaders': False, 'captureContent': True }, ref=f"har_{account}_{mode}", title=f"har_{account}_{mode}") _url = f"{self._SITE_URL}/{account}" self.send_driver_request(driver, _url) res = self.wait_until(driver, 60, error404_or_found()) if not res or res[0] == "error404": raise ExtractorError("User profile doesnt exists") har = _harproxy.har data_json = self.scan_for_request(har, f"har_{account}_{mode}", f"users/{account}") #self.to_screen(data_json) userid = try_get(data_json, lambda x: x['id']) if not userid: raise ExtractorError("couldnt get id user for chat room") url_chat = f"https://onlyfans.com/my/chats/chat/{userid}/" self.to_screen(url_chat) self.send_driver_request(driver, url_chat) #init start of chat is to be at the end, with all the previous messages above. Lets scroll # up to the start of the chat el_chat_scroll = self.wait_until( driver, 60, ec.presence_of_element_located(( By.CSS_SELECTOR, "div.b-chats__scrollbar.m-custom-scrollbar.b-chat__messages.m-native-custom-scrollbar.m-scrollbar-y.m-scroll-behavior-auto" ))) self.wait_until(driver, 1) el_chat_scroll.send_keys(Keys.HOME) self.wait_until(driver, 5) har = _harproxy.har _reg_str = r'/api2/v2/chats/\d+/messages' data_json = self.scan_for_all_requests( har, f"har_{account}_{mode}", _reg_str) if data_json: self.write_debug(data_json) list_json = [] for el in data_json: list_json += el.get('list') for info_json in list_json: _entry = self._extract_from_json(info_json, user_profile=account) if _entry: for _video in _entry: if not _video['id'] in entries.keys(): entries[_video['id']] = _video else: if _video.get( 'duration', 1) > entries[_video['id']].get( 'duration', 0): entries[_video['id']] = _video if entries: return self.playlist_result(list(entries.values()), "Onlyfans:" + account, "Onlyfans:" + account) else: raise ExtractorError("no entries") except ExtractorError as e: raise except Exception as e: lines = traceback.format_exception(*sys.exc_info()) self.to_screen(f'{repr(e)} \n{"!!".join(lines)}') raise ExtractorError(repr(e)) finally: _harproxy.close() _server.stop() self.rm_driver(driver)
def _real_extract(self, url): try: with VideovardIE._LOCK: self.report_extraction(url) videoid = self._match_id(url) while True: _server_port = 18080 + VideovardIE._NUM * 100 _server = Server( path= "/Users/antoniotorres/Projects/async_downloader/browsermob-proxy-2.1.4/bin/browsermob-proxy", options={'port': _server_port}) try: if _server._is_listening(): VideovardIE._NUM += 1 if VideovardIE._NUM == 25: raise Exception("mobproxy max tries") else: _server.start({ "log_path": "/dev", "log_file": "null" }) self.to_screen( f"[{url}] browsermob-proxy start OK on port {_server_port}" ) VideovardIE._NUM += 1 break except Exception as e: lines = traceback.format_exception(*sys.exc_info()) self.to_screen( f'[{url}] {repr(e)} \n{"!!".join(lines)}') if _server.process: _server.stop() raise ExtractorError( f"[{url}] browsermob-proxy start error - {repr(e)}" ) _host = 'localhost' _port = _server_port + 1 _harproxy = _server.create_proxy({'port': _port}) driver = self.get_driver(host=_host, port=_port) try: _harproxy.new_har(options={ 'captureHeaders': True, 'captureContent': True }, ref=f"har_{videoid}", title=f"har_{videoid}") self.send_multi_request(driver, url.replace('/e/', '/v/')) title = try_get( self.wait_until( driver, 60, ec.presence_of_element_located( (By.TAG_NAME, "h1"))), lambda x: x.text) vpl = self.wait_until( driver, 60, ec.presence_of_element_located((By.ID, "vplayer"))) for i in range(2): try: vpl.click() self.wait_until(driver, 1) vpl.click() break except Exception as e: el_kal = self.wait_until( driver, 60, ec.presence_of_element_located( (By.CSS_SELECTOR, "div.kalamana"))) if el_kal: el_kal.click() self.wait_until(driver, 1) el_rul = self.wait_until( driver, 60, ec.presence_of_element_located( (By.CSS_SELECTOR, "div.rulezco"))) if el_rul: el_rul.click() self.wait_until(driver, 1) continue har = _harproxy.har m3u8_url = self.scan_for_request(har, f"har_{videoid}", f"master.m3u8") if m3u8_url: self.write_debug(f"[{url}] m3u8 url - {m3u8_url}") res = self.send_multi_request(None, m3u8_url) if not res: raise ExtractorError(f"[{url}] no m3u8 doc") m3u8_doc = (res.content).decode('utf-8', 'replace') self.write_debug(f"[{url}] \n{m3u8_doc}") formats_m3u8, _ = self._parse_m3u8_formats_and_subtitles( m3u8_doc, m3u8_url, ext="mp4", entry_protocol='m3u8_native', m3u8_id="hls") if not formats_m3u8: raise ExtractorError( f"[{url}] Can't find any M3U8 format") self._sort_formats(formats_m3u8) return ({ "id": videoid, "title": sanitize_filename(title, restricted=True), "formats": formats_m3u8, "ext": "mp4" }) except ExtractorError as e: raise except Exception as e: lines = traceback.format_exception(*sys.exc_info()) self.to_screen(f'{repr(e)} \n{"!!".join(lines)}') raise ExtractorError(repr(e)) finally: _harproxy.close() _server.stop() self.rm_driver(driver) except Exception as e: lines = traceback.format_exception(*sys.exc_info()) self.to_screen(f'{repr(e)} \n{"!!".join(lines)}') raise ExtractorError(repr(e))