def getPosts(self, uri, min=None, max=None, text=None): """ Get mailing list's posts @param uri: mailing list's uri @param min: min date @param max: max date @param text: text to search """ if (self.cache == None): pb = LoadProgressBar() self.cache = Cache(uri, pb) pb.destroy() else: if (uri != self.cache.uri or self.cache.bad): pb = LoadProgressBar() self.cache = Cache(uri, pb) pb.destroy() min, max = self.getDates() if (not self.cache.bad): posts = self.cache.query() if (posts == None): self.messageBar('unknow problem parsing RDF at ' + self.uri) return None else: if (min != None or max != None or text != None): posts = self.cache.filterPosts(posts, min, max, text) return posts else: self.alert('An exception ocurred parsing this URI') return None
def __init__(self, args): self.options = { 'url': args.url, 'prefix': '', 'user_agent': args.user_agent, 'proxy': args.proxy, 'verbosity': args.verbosity, 'threads': 10, 'chunk_size': 10, # same as threads 'run_all': args.run_all, 'match_all': args.match_all, 'stop_after': args.stop_after, 'no_cache_load': args.no_cache_load, 'no_cache_save': args.no_cache_save, } self.data = { 'cache': Cache(), 'results': Results(self.options), 'fingerprints': Fingerprints(), 'matcher': Match(), 'colorizer': Color(), 'printer': Printer(args.verbosity, Color()), 'detected_cms': set(), 'error_pages': set(), 'queue': queue.Queue(), 'requested': queue.Queue() } self.data['results'].set_printer(self.data['printer']) self.data['requester'] = Requester(self.options, self.data)
def __init__(self, args): urls = None if args.input_file is not None: args.quiet = True with open(args.input_file, 'r') as input_file: urls = [] for url in input_file.readlines(): url = url.strip() urls.append(url if '://' in url else 'http://' + url) else: args.url = args.url.lower() if '://' not in args.url: args.url = 'http://' + args.url text_printer = Printer(args.verbosity) cache = Cache() cache.printer = text_printer self.options = { 'url': args.url, 'urls': urls, 'quiet': args.quiet, 'prefix': '', 'user_agent': args.user_agent, 'proxy': args.proxy, 'verbosity': args.verbosity, 'threads': 10, 'batch_size': 20, 'run_all': args.run_all, 'match_all': args.match_all, 'stop_after': args.stop_after, 'no_cache_load': args.no_cache_load, 'no_cache_save': args.no_cache_save, 'write_file': args.output_file, 'subdomains': args.subdomains } self.data = { 'cache': cache, 'results': Results(self.options), 'fingerprints': Fingerprints(), 'matcher': Match(), 'printer': text_printer, 'detected_cms': set(), 'error_pages': set(), 'requested': queue.Queue() } if self.options['write_file'] is not None: self.json_outputter = OutputJSON(self.options, self.data) self.data['printer'].print_logo() self.results = None
def getPosts(self, uri, min=None, max=None, text=None): if (self.cache == None): pb = LoadProgressBar() self.cache = Cache(uri, pb) pb.destroy() else: if (uri != self.cache.uri or self.cache.bad): pb = LoadProgressBar() self.cache = Cache(uri, pb) pb.destroy() min, max = self.getDates() if (not self.cache.bad): posts = self.cache.query() if (min != None or max != None or text != None): posts = self.cache.filterPosts(posts, min, max, text) return posts else: self.alert('An exception ocurred parsing this URI') return None
def __init__(self, args): urls = None interactive = True if args.input_file is not None: interactive = False with open(args.input_file, 'r') as input_file: urls = [] for url in input_file.readlines(): u = url.strip() urls.append(u if '://' in u else 'http://' + u) elif '://' not in args.url: args.url = 'http://' + args.url self.options = { 'url': args.url, 'urls': urls, 'interactive': interactive, 'prefix': '', 'user_agent': args.user_agent, 'proxy': args.proxy, 'verbosity': args.verbosity, 'threads': 10, 'batch_size': 20, 'run_all': args.run_all, 'match_all': args.match_all, 'stop_after': args.stop_after, 'no_cache_load': args.no_cache_load, 'no_cache_save': args.no_cache_save, 'write_file': args.output_file } self.data = { 'cache': Cache(), 'results': Results(self.options), 'fingerprints': Fingerprints(), 'matcher': Match(), 'colorizer': Color(), 'printer': Printer(args.verbosity, Color()), 'detected_cms': set(), 'error_pages': set(), 'requested': queue.Queue() } if self.options['write_file'] is not None: self.json_outputter = OutputJSON(self.options, self.data)
def __init__(self, host, cache, results): super().__init__(requests) self.threads = 10 self.queue = queue.Queue() self.requested = queue.Queue() self.workers = [] self.host = host # set cache if not cache: self.cache = Cache() else: self.cache = cache # set results if not results: self.results = Results() else: self.results = results
def __init__(self, host, profile, verbose, desperate): self.plugins = self.load_plugins() self.host = host self.results = Results() self.cache = Cache() self.profile = Profile(profile) self.colorizer = Color() self.logs = Log() self.verbose = verbose self.check_url() self.redirect() self.cache.set_host(self.host) if desperate: self.desperate = Desperate() else: self.desperate = None
def reset(self): self.data['results'] = Results(self.options) self.data['cache'] = Cache()
ap.add_argument("-d", "--debug", dest="log_level", action="store_const", const=logging.DEBUG, default=logging.INFO, help="Show debug log messages") args = ap.parse_args() logging.getLogger().setLevel(args.log_level) ngrok_service = NgrokService() ngrok_service.start(settings.API_PORT) ngrok_url = ngrok_service.get_url() postback_api = PostbackApi(ngrok_url) catappult_api = CatappultApi(settings.CATAPPULT_API_HOST, Cache()) one_step_validator = OneStepPaymentValidator(postback_api, catappult_api) logging.info("Testing One-Step Payment...") purchase_info = get_purchase_info(ngrok_url) try: is_verified = one_step_validator.validate(purchase_info) except ValidatorException as e: logging.error(e) is_verified = False if is_verified: logging.info("Transaction: '{}' was successfully verified".format( purchase_info)) else: logging.error(