def search_newznab(self, url_base, apikey, **params): ''' Searches Newznab for imdbid url_base: str base url for all requests (https://indexer.com/) apikey: str api key for indexer params: parameters to url encode and append to url Creates url based off url_base. Appends url-encoded **params to url. Returns list of dicts of search results ''' url = u'{}api?apikey={}&{}'.format(url_base, apikey, urllib.urlencode(params)) logging.info(u'SEARCHING: {}api?apikey=APIKEY&{}'.format( url_base, urllib.urlencode(params))) proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] request = Url.request(url) try: if proxy_enabled and Proxy.whitelist(url) is True: response = Proxy.bypass(request) else: response = Url.open(request) return self.parse_newznab_xml(response) except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Newz/TorzNab backlog search.', exc_info=True) return []
def search(imdbid, title, year): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching Torrentz2 for {}'.format(title)) url = u'https://torrentz2.eu/feed?f={}+{}'.format(title, year).replace( ' ', '+') request = urllib2.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) try: if proxy_enabled and Proxy.whitelist( 'https://torrentz2.eu') is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) response = urllib2.urlopen(request, timeout=60).read() if response: results = Torrentz2.parse(response, imdbid) return results else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Torrentz2 search.', exc_info=True) return []
def search(imdbid, title, year): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching BitSnoop for {}'.format(title)) url = u'https://bitsnoop.com/search/video/{}+{}/c/d/1/?fmt=rss'.format( title, year).replace(' ', '+').encode('ascii', 'ignore') request = urllib2.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) try: if proxy_enabled and Proxy.whitelist( 'https://bitsnoop.com') is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) response = urllib2.urlopen(request, timeout=60).read() if response: results = BitSnoop.parse(response, imdbid) return results else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'BitSnoop search.', exc_info=True) return []
def search(imdbid, title, year): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching ExtraTorrent for {}'.format(title)) url = u'https://extratorrent.cc/rss.xml?type=search&cid=4&search={}+{}'.format( title, year).replace(' ', '+').encode('ascii', 'ignore') request = urllib2.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) try: if proxy_enabled and Proxy.whitelist( 'https://www.limetorrents.cc') is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) response = urllib2.urlopen(request, timeout=60).read() if response: results = ExtraTorrent.parse(response, imdbid) return results else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'ExtraTorrent search.', exc_info=True) return []
def search_all(self, imdbid): ''' Search all Newznab indexers. :param imdbid: string imdb movie id. tt123456 Returns list of dicts with sorted nzb information. ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] indexers = core.CONFIG['Indexers']['NewzNab'].values() self.imdbid = imdbid results = [] imdbid_s = imdbid[2:] # just imdbid numbers for indexer in indexers: if indexer[2] is False: continue url = indexer[0] if url[-1] != u'/': url = url + '/' apikey = indexer[1] search_string = u'{}api?apikey={}&t=movie&imdbid={}'.format( url, apikey, imdbid_s) logging.info( u'SEARCHING: {}api?apikey=APIKEY&t=movie&imdbid={}'.format( url, imdbid_s)) request = urllib2.Request(search_string, headers={'User-Agent': 'Mozilla/5.0'}) try: if proxy_enabled and Proxy.whitelist(url) is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) results_xml = response.read() nn_results = self.parse_newznab_xml(results_xml) for result in nn_results: results.append(result) except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'NewzNab search_all get xml', exc_info=True)
def search(imdbid): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching ThePirateBay for {}.'.format(imdbid)) url = 'https://www.thepiratebay.org/search/{}/0/99/200'.format(imdbid) headers = {'Cookie': 'lw=s'} try: if proxy_enabled and Proxy.whitelist( 'https://www.thepiratebay.org') is True: response = Url.open(url, proxy_bypass=True, headers=headers).text else: response = Url.open(url, headers=headers).text if response: return ThePirateBay.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: # noqa logging.error('ThePirateBay search failed.', exc_info=True) return []
def search_potato(self, imdbid): ''' Search all TorrentPotato providers imdbid: str imdb id # Returns list of dicts with movie info ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] indexers = core.CONFIG['Indexers']['TorrentPotato'].values() results = [] for indexer in indexers: if indexer[2] is False: continue url = indexer[0] if url[-1] == u'/': url = url[:-1] passkey = indexer[1] search_string = u'{}?passkey={}&t=movie&imdbid={}'.format( url, passkey, imdbid) logging.info( u'SEARCHING: {}?passkey=PASSKEY&t=movie&imdbid={}'.format( url, imdbid)) request = urllib2.Request(search_string, headers={'User-Agent': 'Mozilla/5.0'}) try: if proxy_enabled and Proxy.whitelist(url) is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) torrent_results = json.loads(response.read()).get('results') if torrent_results: for i in torrent_results: results.append(i) else: continue except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Torrent search_potato.', exc_info=True) continue
def _get_rss(self): ''' Get latest uploads from all indexers Returns list of dicts with parsed nzb info ''' self.imdbid = None proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] if self.feed_type == 'nzb': indexers = core.CONFIG['Indexers']['NewzNab'].values() else: indexers = core.CONFIG['Indexers']['TorzNab'].values() results = [] for indexer in indexers: if indexer[2] is False: continue url_base = indexer[0] if url_base[-1] != u'/': url_base = url_base + '/' apikey = indexer[1] url = u'{}api?t=movie&cat=2000&extended=1&offset=0&apikey={}'.format( url_base, apikey) logging.info( u'RSS_SYNC: {}api?t=movie&cat=2000&extended=1&offset=0&apikey=APIKEY' .format(url_base)) request = Url.request(url) try: if proxy_enabled and Proxy.whitelist(url) is True: response = Proxy.bypass(request) else: response = Url.open(request) return self.parse_newznab_xml(response) except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Newz/TorzNab rss get xml.', exc_info=True)
def search(imdbid): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching Rarbg for {}'.format(imdbid)) if Rarbg.timeout: now = datetime.datetime.now() while Rarbg.timeout > now: time.sleep(1) now = datetime.datetime.now() if not Rarbg.token: Rarbg.token = Rarbg.get_token() if Rarbg.token is None: logging.error(u'Unable to get rarbg token.') return [] url = u'https://torrentapi.org/pubapi_v2.php?token={}&mode=search&search_imdb={}&category=movies&format=json_extended'.format( Rarbg.token, imdbid) request = urllib2.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) Rarbg.timeout = datetime.datetime.now() + datetime.timedelta(seconds=2) try: if proxy_enabled and Proxy.whitelist( 'https://torrentapi.org') is True: response = Proxy.bypass(request) else: response = urllib2.urlopen(request) response = urllib2.urlopen(request, timeout=60).read() response = json.loads(response).get('torrent_results') if response: results = Rarbg.parse(response) return results else: logging.info(u'Nothing found on rarbg.to') return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Rarbg search.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Fetching latest RSS from Rarbg.') if Rarbg.timeout: now = datetime.datetime.now() while Rarbg.timeout > now: time.sleep(1) now = datetime.datetime.now() if not Rarbg.token: Rarbg.token = Rarbg.get_token() if Rarbg.token is None: logging.error(u'Unable to get Rarbg token.') return [] url = u'https://www.torrentapi.org/pubapi_v2.php?token={}&mode=list&category=movies&format=json_extended&app_id=Watcher'.format(Rarbg.token) request = Url.request(url) Rarbg.timeout = datetime.datetime.now() + datetime.timedelta(seconds=2) try: if proxy_enabled and Proxy.whitelist('https://www.torrentapi.org') is True: response = Proxy.bypass(request) else: response = Url.open(request) results = json.loads(response).get('torrent_results') if results: return Rarbg.parse(results) else: logging.info(u'Nothing found in Rarbg RSS.') return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Rarbg RSS fetch failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Fetching latest RSS from BitSnoop.') url = u'https://www.bitsnoop.com/browse/video-movies/?sort=dt_reg&fmt=rss' request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.bitsnoop.com') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return BitSnoop.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'BitSnoop RSS fetch failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching Torrentz2 for {}.'.format(term)) url = u'https://www.torrentz2.eu/feed?f={}'.format(term) request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.torrentz2.eu') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return Torrentz2.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Torrentz2 search failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Fetching latest RSS from ThePirateBay.') url = u'https://www.thepiratebay.org/browse/201/0/3/0' request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.thepiratebay.org') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return ThePirateBay.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'ThePirateBay RSS fetch failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Fetching latest RSS from Torrentz2.') url = u'https://www.torrentz2.eu/feed?f=movies' request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.torrentz2.eu') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return Torrentz2.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Torrentz2 RSS fetch failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching BitSnoop for {}.'.format(term)) url = u'https://www.bitsnoop.com/search/video/{}/c/d/1/?fmt=rss'.format(term) request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.bitsnoop.com') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return BitSnoop.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'BitSnoop search failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Fetching latest RSS from ExtraTorrent.') url = u'https://www.extratorrent.cc/rss.xml?cid=4&type=today' request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.extratorrent.cc') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return ExtraTorrent.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'ExtraTorrent RSS fetch failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching ExtraTorrent for {}.'.format(term)) url = u'https://www.extratorrent.cc/rss.xml?type=search&cid=4&search={}'.format(term) request = Url.request(url) try: if proxy_enabled and Proxy.whitelist('https://www.extratorrent.cc') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return ExtraTorrent.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'ExtraTorrent search failed.', exc_info=True) return []
def __init__(self, settings): super(Application, self).__init__([]) self.settings = settings self._frames_data = {} self._visible = int(self.settings['application.visible']) self.web_page = WebPage() self.web_page.onLog.connect(self.onLog) self.web_page.frameCreated.connect(self._on_frame_created) frame = self.web_page.mainFrame() frame_data = self._register_frame(frame) frame.javaScriptWindowObjectCleared.connect( partial(self._on_pageload_finished, frame=frame)) st = self.web_page.settings() st.setAttribute(st.AutoLoadImages, int(self.settings['application.settings.load_images'])) self.web_view = QWebView() self.web_view.setPage(self.web_page) self.timer = QTimer() self.timer.timeout.connect(self._on_timeout) self.proxy = Proxy() self.proxy.onLog.connect(self.onLog) self.proxy.onAddQueue.connect(self.add_queue) self.proxy.onTrigger.connect(self._on_trigger) self.proxy.onCallHandler.connect(self._on_call_handler) self._queue = Queue() self.clear_handlers()
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info(u'Searching ThePirateBay for {}.'.format(term)) url = u'https://www.thepiratebay.org/search/{}/0/99/200'.format(term) request = Url.request(url) request.add_header('Cookie', 'lw=s') try: if proxy_enabled and Proxy.whitelist('https://www.thepiratebay.org') is True: response = Proxy.bypass(request) else: response = Url.open(request) if response: return ThePirateBay.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'ThePirateBay search failed.', exc_info=True) return []
def search(imdbid): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching Rarbg for {}.'.format(imdbid)) if Rarbg.timeout: now = datetime.datetime.now() while Rarbg.timeout > now: time.sleep(1) now = datetime.datetime.now() if not Rarbg.token: Rarbg.token = Rarbg.get_token() if Rarbg.token is None: logging.error('Unable to get Rarbg token.') return [] url = 'https://www.torrentapi.org/pubapi_v2.php?token={}&mode=search&search_imdb={}&category=movies&format=json_extended&app_id=Watcher'.format( Rarbg.token, imdbid) Rarbg.timeout = datetime.datetime.now() + datetime.timedelta(seconds=2) try: if proxy_enabled and Proxy.whitelist( 'https://www.torrentapi.org') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text results = json.loads(response).get('torrent_results') if results: return Rarbg.parse(results) else: logging.info('Nothing found on Rarbg.') return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: # noqa logging.error('Rarbg search failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching SkyTorrents for {}.'.format(term)) url = 'https://www.skytorrents.in/rss/all/ed/1/{}'.format(term) try: if proxy_enabled and Proxy.whitelist( 'https://www.skytorrents.in') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return SkyTorrents.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: # noqa logging.error('SkyTorrents search failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from LimeTorrents.') url = 'https://www.limetorrents.cc/rss/16/' try: if proxy_enabled and Proxy.whitelist( 'https://www.limetorrents.cc') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return LimeTorrents.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: # noqa logging.error('LimeTorrent RSS fetch failed.', exc_info=True) return []
def test_build_fetch_with_proxy(self): p = Proxy('http', '113.31.46.205', '81') fetch = build_fetch(proxy=p) self.assertTrue('html' in fetch('http://www.baidu.com'))
class Application(QApplication): _queue = None _handlers = None _active_task = None _timeout_expects = None _visible = False _frames_data = None onLog = pyqtSignal(int, str) def __init__(self, settings): super(Application, self).__init__([]) self.settings = settings self._frames_data = {} self._visible = int(self.settings['application.visible']) self.web_page = WebPage() self.web_page.onLog.connect(self.onLog) self.web_page.frameCreated.connect(self._on_frame_created) frame = self.web_page.mainFrame() frame_data = self._register_frame(frame) frame.javaScriptWindowObjectCleared.connect( partial(self._on_pageload_finished, frame=frame)) st = self.web_page.settings() st.setAttribute(st.AutoLoadImages, int(self.settings['application.settings.load_images'])) self.web_view = QWebView() self.web_view.setPage(self.web_page) self.timer = QTimer() self.timer.timeout.connect(self._on_timeout) self.proxy = Proxy() self.proxy.onLog.connect(self.onLog) self.proxy.onAddQueue.connect(self.add_queue) self.proxy.onTrigger.connect(self._on_trigger) self.proxy.onCallHandler.connect(self._on_call_handler) self._queue = Queue() self.clear_handlers() def start(self): self._on_next_queue() if self._visible: self.show() return self.exec_() def _on_next_queue(self, app=None): if not self._active_task is None: self._queue.task_done() try: self._active_task = task = self._queue.get(timeout=15) except Empty: self.onLog.emit(INFO, 'No more task in queue') self.exit(0) return self.proxy.set_expects(task['expects']) self.proxy._trigger_wait_page_load = True self.web_page.mainFrame().load(QUrl(task['goto'])) def show(self): self.web_view.show() def add_queue(self, task): self._queue.put(task) def _on_timeout(self): self.onLog.emit(DEBUG, 'Expects timeout') self.set_expects(self._timeout_expects or []) self.web_page.triggerAction(QWebPage.Stop) def _register_frame(self, frame): data = FrameData() data.set_name('frame-' + uuid4().hex) frame.setObjectName(data.name) self._frames_data[data.name] = data return data def _on_frame_created(self, frame): frame_data = self._register_frame(frame) frame.javaScriptWindowObjectCleared.connect( partial(self._on_pageload_finished, frame=frame)) frame.destroyed.connect(self._on_frame_destroyed) def _on_frame_destroyed(self, frame): del self._frames_data[str(frame.objectName())] def _on_pageload_finished(self, frame=None): if frame is None: # this happens if the frame was forced stop, probably, not sure self.error('Problem while loading the page.') self.exit(-1) return self.proxy._trigger_wait_page_load = False self.onLog.emit(DEBUG, 'DOMContentLoaded ' + frame.baseUrl().toString()) frame.addToJavaScriptWindowObject('bot', self.proxy) frame.addToJavaScriptWindowObject( 'botFrameData', self._frames_data[str(frame.objectName())]) frame.evaluateJavaScript(""" (function() { function process_expectation(expect) { var selectors; if (expect.host) { var host = document.location.host; if (!expect.host.match(host)) { bot.debug(expect.trigger + ' location.host: "' + host + '" "' + expect.host + '"'); return; } } if (expect.path) { var path = document.location.pathname; if (!expect.path.match(path)) { bot.debug(expect.trigger + ' location.pathname: "' + path + '" "' + expect.path + '"'); return; } } if (expect.hash) { var hash = document.location.hash; if (!expect.hash.match(hash)) { bot.debug(expect.trigger + ' location.hash: "' + hash + '" "' + expect.hash + '"'); return; } } selectors = expect.selectorNotExists || []; for (var ii = 0; ii < selectors.length; ii++) { if (document.querySelector(selectors[ii])) { bot.debug(expect.trigger + ' selector-not-exists: ' + selectors[ii]); return; } } selectors = expect.selectorExists || []; for (var ii = 0; ii < selectors.length; ii++) { if (!document.querySelector(selectors[ii])) { bot.debug(expect.trigger + ' selector-exists: ' + selectors[ii]); return; } } bot.info(expect.trigger + ' triggered.'); bot.trigger(expect.trigger, expect.triggerArgs || {}, expect.triggerDelay || 0, botFrameData); } window.bothelp_clickElement = function(el) { if (el.fireEvent) { el.fireEvent('onclick'); } else { var evt = document.createEvent('Events'); evt.initEvent('click', true, false); el.dispatchEvent(evt); } }; document.addEventListener('DOMContentLoaded', function() { window.setInterval(function() { if (!bot.active || bot.trigger_wait_page_load) return; for (var ii = 0; ii < bot.expects.length; ii++) { process_expectation(bot.expects[ii]); } }, 3000); }); }()); """) def execjs(self, text): return self.frame.evaluateJavaScript(text) def set_expects(self, expects): newlist = make_list(expects) for item in newlist: for key in item: if not key in ('path', 'hash', 'host', 'selectorExists', 'selectorNotExists', 'trigger', 'triggerArgs', 'triggerDelay'): self.onLog.emit(WARNING, '%s is not a valid expect field' % key) item['selectorExists'] = make_list(item.get('selectorExists')) item['selectorNotExists'] = make_list( item.get('selectorNotExists')) self.proxy.set_expects(newlist) def set_timeout_expects(self, timeout, expects): self.timer.start(timeout * 1000) self._timeout_expects = timeout_expects def add_handler(self, name, value): self._handlers[name] = value def clear_handlers(self): # clear handlers registration self._handlers = {'bot.nextQueue': self._on_next_queue} def _on_trigger(self, trigger_name, trigger_args, frame_data): # in python2 QString can't be used as dictionary keys trigger_name = str(trigger_name) trigger_args = dict((str(key), trigger_args[key]) for \ key in trigger_args) if trigger_name in self._handlers: self.frame = self.web_page.findChild(QWebFrame, frame_data.name) self._handlers[trigger_name](self, **trigger_args) else: self.onLog.emit(ERROR, 'no handler for trigger %s' % trigger_name) self.exit(-1) def _on_call_handler(self, handler_name, handler_args, frame_data): # in python2 QString can't be used as dictionary keys handler_name = str(handler_name) handler_args = dict((str(key), handler_args[key]) for \ key in handler_args) if handler_name in self._handlers: self.frame = self.web_page.findChild(QWebFrame, frame_data.name) self._handlers[handler_name](self, **handler_args) else: self.onLog.emit(ERROR, 'no handler for call %s' % handler_name) self.exit(-1) def info(self, message): self.onLog.emit(INFO, message) def debug(self, message): self.onLog.emit(DEBUG, message) def error(self, message): self.onLog.emit(ERROR, message) def warn(self, message): self.onLog.emit(WARNING, message)
bruteforce_parser.add_argument("-m", metavar="MASK", dest="mask", help="a masked, 10-digit US phone number as in: 555XXX1234") bruteforce_parser.add_argument("-d", metavar="DICTIONARY", dest="file", help="a file with a list of numbers to try") bruteforce_parser.add_argument("-p", metavar="PROXYLIST", dest="proxies", help="a file with a list of HTTPS proxies to use. Format: https://127.0.0.1:8080") bruteforce_parser.add_argument("-q", dest="quiet", action="store_true", help="use services that do not alert the victim") bruteforce_parser.add_argument("-v", dest="verbose", action="store_true", help="verbose output") return parser.parse_args() if __name__ == '__main__': args = parse_arguments() settings = Settings(args) colors = Colors() proxy_instance = Proxy(settings, colors) user_agents_instance = UserAgentsCycle(settings) if args.action == Action.SCRAPE: start_scraping(args.email, args.quiet, user_agents_instance, proxy_instance, colors) elif args.action == Action.GENERATE: phonenumber_supplier = AgnosticSupplier.get_supplier(PHONE_NUMBER)(settings, user_agents_instance, proxy_instance, colors, args.mask) possible_phone_numbers = phonenumber_supplier.get() phonenumber_supplier.dump_supplied_phones(args.file, possible_phone_numbers) elif args.action == Action.BRUTE_FORCE: bruteforce(args, colors, user_agents_instance, proxy_instance, settings)
def test_assemble(self): p = Proxy('http', 'localhost', '80') self.assertEqual(p.assemble(), 'http://localhost:80')