def _getQuote(self, source, searchString, searchNickname, index): if len(self.storage) == 0 or source not in self.storage: return IRCResponse(ResponseType.Say, "There are no quotes in the log.", source) regex = re2.compile(searchString, re2.IGNORECASE) matches = [] if searchNickname: for x in self.storage[source]: if x[21] == "*": match = re2.search(regex, x[:x.find(" ", 23)]) else: match = re2.search(regex, x[x.find("<") + 1:x.find(">")]) if match: matches.append(x) else: for x in self.storage[source]: if re2.search(regex, x[x.find(">") + 1:]): matches.append(x) if len(matches) == 0: return IRCResponse(ResponseType.Say, f"No matches for '{searchString}' found.", source) if index < 0 or index > len(matches) - 1: index = random.randint(0, len(matches) - 1) return IRCResponse( ResponseType.Say, f"Quote #{index + 1}/{len(matches)}: {matches[index]}", source)
def on_call(self, call, process): if call["api"] == "CreateProcessInternalW": clbuf = self.get_argument(call, "CommandLine").lower() cfbuf = int(self.get_argument(call, "CreationFlags"), 16) # Handle Powershell CommandLine Arguments if "powershell" in clbuf and (re.search("-win[ ]+hidden", clbuf) or re.search("-windowstyle[ ]+hidden", clbuf)): proc = process["process_name"] spawn = self.get_argument(call, "ApplicationName") self.hidden.append((proc, spawn)) self.data.append({"Process": proc + " -> " + spawn}) # Handle CREATE_NO_WINDOW flag, ignored for CREATE_NEW_CONSOLE and DETACHED_PROCESS elif cfbuf & 0x08000000 and not (cfbuf & 0x10 or cfbuf & 0x8): proc = process["process_name"] spawn = self.get_argument(call, "ApplicationName") self.hidden.append((proc, spawn)) self.data.append({"Process": proc + " -> " + spawn}) elif call["api"] == "ShellExecuteExW": buf = int(self.get_argument(call, "Show"), 10) # Handle SW_HIDE flag if buf == 0: proc = process["process_name"] spawn = self.get_argument(call, "FilePath") self.hidden.append((proc, spawn)) self.data.append({"Process": proc + " -> " + spawn})
def impfuzzy_comp(self, list, list_new): ssdeep = re.compile("^[0-9]{1,5}:[0-9a-zA-Z\/\+]+:[0-9a-zA-Z\/\+]+$", re.DOTALL) complist = [] list_len = len(list_new) i = 0 for item_new in list_new: i += 1 if re.search(ssdeep, item_new[2]) and len(item_new[2]) < 150: for j in range(i, list_len): if re.search(ssdeep, list_new[j][2]) and len(list_new[j][2]) < 150: complist.append([ item_new[0], list_new[j][0], pyimpfuzzy.hash_compare(item_new[2], list_new[j][2]) ]) if list: for item_new in list_new: if re.search(ssdeep, item_new[2]) and len(item_new[2]) < 150: for item in list: if re.search(ssdeep, item[2]) and len(item[2]) < 150: complist.append([ item_new[0], item[0], pyimpfuzzy.hash_compare(item_new[2], item[2]) ]) return complist
def on_call(self, call, process): if call["api"] == "CreateProcessInternalW": clbuf = call["arguments"]["command_line"].lower() cfbuf = call["arguments"]["creation_flags"] # Handle Powershell CommandLine Arguments if "powershell" in clbuf and (re.search("-win[ ]+hidden", clbuf) or re.search("-windowstyle[ ]+hidden", clbuf)): proc = process["process_name"] spawn = call["arguments"]["command_line"] self.hidden.append((proc, spawn)) self.mark_ioc("Process", proc + " -> " + spawn) # Handle CREATE_NO_WINDOW flag, ignored for CREATE_NEW_CONSOLE and DETACHED_PROCESS elif cfbuf & 0x08000000 and not (cfbuf & 0x10 or cfbuf & 0x8): proc = process["process_name"] spawn = call["arguments"]["command_line"] self.hidden.append((proc, spawn)) self.mark_ioc("Process", proc + " -> " + spawn) elif call["api"] == "ShellExecuteExW": buf = call["arguments"]["show_type"] # Handle SW_HIDE flag if buf == 0: proc = process["process_name"] spawn = call["arguments"]["filepath"] self.hidden.append((proc, spawn)) self.mark_ioc("Process", proc + " -> " + spawn)
def _route(user, jid, msg): session = env.user.session() try: return session(msg['body']) except SessionCallError: pass message = env.user.resolve_aliases(msg['body']) args = {} for r in self.route: m = re.search(r['resource'], msg['resource']) if m: args = m.groupdict() route = r['route'] break for regex, view in route: match = re.search(regex, message) if match: for g, val in match.groupdict().iteritems(): args[g] = val log.debug(">>> %s routed to %s(%s) via '%s'" % \ (jid, view.__name__, str(args), regex.pattern)) return view(**args)
def _vrfy_response(self, r): """ szuka stringów występujących tylko w poprawnych odpowiedziach, jeśli występują przekazuje odpowiedź, jeśli nie -> False ----------------------------------------------------------------""" return r if search('userAcceptedTradeIDs',r) and search('tradeOffers',r)\ and search('defaultGame.Communication.VO.TradeWindow.dTradeWindowResultVO',r)\ else False
def response(context, flow): """======================================================================== "Called when a server response has been received"... łapię wyłącznie odpowiedzi, bo interesują mnie zestawy (request/response). Przechwycony response wraz z requestem wchodzą w skład transakcji, reprezentowanej przez mitmproxy.models.HTTPFlow() "HTTPFlow is collection of objects representing a single HTTP transaction". Więcej info na WWW: http://docs.mitmproxy.org/en/stable/dev/models.html ===========================================================================""" if flow.request.host.endswith('.thesettlersonline.pl'): if "application/x-amf" in flow.response.headers.get("Content-Type", "_"): with decoded(flow.response): res = flow.response.content req = flow.request.content if search( 'defaultGame.Communication.VO.TradeWindow.dTradeWindowResultVO', res )\ and search( 'userAcceptedTradeIDs', res ) and search( 'tradeOffers', res )\ and search( 'GetAvailableOffers', req ): log.debug("got trade REQ/RESP pair, feeding TDD thread...") try: t= Thread(target=ttd._incoming_traffic_handler, args=(context, flow,)) t.setDaemon(True) t.start() except (KeyboardInterrupt, SystemExit): log.info('caught either KeyboardInterrupt or SystemExit, quitting threads') t.__stop() import thread thread.interrupt_main()
def find_competitor_list(search_text): processed_text = grammar_matcher.StringProcessor(search_text) results_match = re.search(r'\n0*1[^\d].+\n^0*2[^\d].+\n(?:^\d+.+\n){2,}', processed_text.text, re.MULTILINE) if results_match: numbered_list = results_match.group(0) num_lines = numbered_list.count('\n') if len(re.findall(r'\d ?[.:h] ?\d\d|\bam\b|\bpm\b', numbered_list)) > num_lines / 4: return None # good list of times! workshops, etc! performance/shows/club-set times! processed_numbered_list = grammar_matcher.StringProcessor(numbered_list, processed_text.match_on_word_boundaries) event_keywords = processed_numbered_list.get_tokens(rules.EVENT) if len(event_keywords) > num_lines / 8: return None if processed_text.has_token(keywords.WRONG_NUMBERED_LIST): return None if num_lines > 10: return numbered_list else: lines = numbered_list.split('\n') qualified_lines = len([x for x in lines if re.search(r'[^\d\W].*[-(]', x)]) if qualified_lines > num_lines / 2: return numbered_list for type in ['crew', 'pop|boog', 'lock', 'b\W?(?:boy|girl)']: qualified_lines = len([x for x in lines if re.search(type, x)]) if qualified_lines > num_lines / 8: return numbered_list if processed_text.match_on_word_boundaries == regex_keywords.WORD_BOUNDARIES: # maybe separate on kana vs kanji? avg_words = 1.0 * sum([len([y for y in x.split(' ')]) for x in lines]) / num_lines if avg_words < 3: return numbered_list return None
def on_call(self, call, process): if call["api"] == "CreateProcessInternalW": clbuf = self.get_argument(call, "CommandLine").lower() cfbuf = int(self.get_argument(call, "CreationFlags"), 16) # Handle Powershell CommandLine Arguments if "powershell" in clbuf and ( re.search("-win[ ]+hidden", clbuf) or re.search("-windowstyle[ ]+hidden", clbuf)): proc = process["process_name"] spawn = self.get_argument(call, "ApplicationName") if not spawn: spawn = self.get_argument(call, "CommandLine") self.hidden.append((proc, spawn)) #self.data.append({"Process": proc + " -> " + spawn}) # Handle CREATE_NO_WINDOW flag, ignored for CREATE_NEW_CONSOLE and DETACHED_PROCESS elif cfbuf & 0x08000000 and not (cfbuf & 0x10 or cfbuf & 0x8): proc = process["process_name"] spawn = self.get_argument(call, "ApplicationName") if not spawn: spawn = self.get_argument(call, "CommandLine") self.hidden.append((proc, spawn)) #self.data.append({"Process": proc + " -> " + spawn}) elif call["api"] == "ShellExecuteExW": buf = int(self.get_argument(call, "Show"), 10) # Handle SW_HIDE flag if buf == 0: proc = process["process_name"] spawn = self.get_argument(call, "FilePath") self.hidden.append((proc, spawn))
def _postList(self, source, searchString, searchNickname): if len(self.storage) == 0 or source not in self.storage: return IRCResponse(ResponseType.Say, "There are no quotes in the log.", source) regex = re2.compile(searchString, re2.IGNORECASE) matches = [] if searchNickname: for x in self.storage[source]: if x[21] == "*": match = re2.search(regex, x[:x.find(" ", 23)]) else: match = re2.search(regex, x[x.find("<") + 1:x.find(">")]) if match: matches.append(x) else: for x in self.storage[source]: if re2.search(regex, x[x.find(">") + 1:]): matches.append(x) if len(matches) == 0: return IRCResponse(ResponseType.Say, f"No matches for '{searchString}' found.", source) pasteLink = self.bot.moduleHandler.runActionUntilValue( 'upload-dbco', string.stripFormatting("\n".join(matches)), 10 * 60) return IRCResponse( ResponseType.Say, f"Link posted! (Expires in 10 minutes) {pasteLink}.", source)
def find_enc_function(macro): match, type = re.search( r"(?ims)Public Function (\w+).+? Xor .+?End Function", macro), "xor" if not match: match, type = re.search( r"(?ims)Public Function (\w+).+?\d+\s*-\s*\d+.+?End Function", macro), "sub" return (match.group(1), type) if match else (None, None)
def extract_config(file_path, decomp_jar): enckey = coded_jar = False if not decomp_jar: return None ret = {} try: with ZipFile(file_path, "r") as zip: for name in zip.namelist(): if name == "e-data": coded_data = zip.read(name) seed = coded_data[:8] enckey = unpack(">Q", seed)[0] if enckey and coded_data: java_rand = JavaRandom(enckey) coded_data = coded_data[8:] decoded_data = "" for i in range(len(coded_data)): key = java_rand.nextInt(255) dec_byte = chr((ord(coded_data[i]) - key + 256) % 256) decoded_data += dec_byte decoded_path = store_temp_file(decoded_data, "qrat.jar") try: p = Popen(["java", "-jar", decomp_jar, decoded_path], stdout=PIPE) decompiled_data = p.stdout.read() except: pass match = re.search("Utils\.serverHost = new String\[\] \{(?P<stringlist>[^};\r\n]*)\};", decompiled_data) if match: hostlist = match.group("stringlist").split(",") serverhosts = [x.strip(' "') for x in hostlist] for i in range(len(serverhosts)): ret["ServerHost" + str(i)] = serverhosts[i] match = re.search("Utils\.serverPort = (?P<portnum>\d+);", decompiled_data) if match: ret["ServerPort"] = int(match.group("portnum")) match = re.search("Utils\.instanceControlPortAgent = (?P<portnum>\d+);", decompiled_data) if match: ret["InstanceControlPortAgent"] = int(match.group("portnum")) match = re.search("Utils\.instanceControlPortClient = (?P<portnum>\d+);", decompiled_data) if match: ret["InstanceControlPortClient"] = int(match.group("portnum")) try: os.unlink(decoded_path) except: pass return ret except: pass return None
def extract_config(file_path, decomp_jar): enckey = coded_jar = False if not decomp_jar: return None ret = { } try: with ZipFile(file_path, 'r') as zip: for name in zip.namelist(): if name == 'e-data': coded_data = zip.read(name) seed = coded_data[:8] enckey = unpack('>Q', seed)[0] if enckey and coded_data: java_rand = JavaRandom(enckey) coded_data = coded_data[8:] decoded_data = "" for i in range(len(coded_data)): key = java_rand.nextInt(255) dec_byte = chr((ord(coded_data[i]) - key + 256) % 256) decoded_data += dec_byte decoded_path = store_temp_file(decoded_data, "qrat.jar") try: p = Popen(["java", "-jar", decomp_jar, decoded_path], stdout=PIPE) decompiled_data = p.stdout.read() except: pass match = re.search("Utils\.serverHost = new String\[\] \{(?P<stringlist>[^};\r\n]*)\};", decompiled_data) if match: hostlist = match.group('stringlist').split(',') serverhosts = [x.strip(" \"") for x in hostlist] for i in xrange(len(serverhosts)): ret["ServerHost" + str(i)] = serverhosts[i] match = re.search("Utils\.serverPort = (?P<portnum>\d+);", decompiled_data) if match: ret["ServerPort"] = int(match.group('portnum')) match = re.search("Utils\.instanceControlPortAgent = (?P<portnum>\d+);", decompiled_data) if match: ret["InstanceControlPortAgent"] = int(match.group('portnum')) match = re.search("Utils\.instanceControlPortClient = (?P<portnum>\d+);", decompiled_data) if match: ret["InstanceControlPortClient"] = int(match.group('portnum')) try: os.unlink(decoded_path) except: pass return ret except: pass return None
def on_call(self, call, process): if call["api"].startswith("CopyFile"): self.sname = self.get_argument(call, "ExistingFileName") if self.sname: for tool in self.devtools: if re.search(tool, self.sname.lower()): self.dname = self.get_argument(call, "NewFileName") if call["api"] == "CreateProcessInternalA" or call[ "api"] == "CreateProcessInternalW": cmdline = self.get_argument(call, "CommandLine").lower() appname = self.get_argument(call, "ApplicationName") if cmdline: flags = int(self.get_argument(call, "CreationFlags"), 16) # CREATE_SUSPENDED or CREATE_SUSPENDED|CREATE_NO_WINDOW if flags & 0x4 or flags & 0x08000004: for tool in self.devtools: if "{path}" in cmdline: appname = self.get_argument( call, "ApplicationName") if appname: if re.search(tool, appname): procname = process["process_name"] self.data.append({ "Process": procname + " > " + appname }) elif self.dname and self.dname.lower() in cmdline: self.executecopy = True procname = process["process_name"] self.data.append({ "Copy": self.sname.lower() + " > " + self.dname.lower() }) self.data.append({ "Process": procname + " > " + self.dname.lower() }) elif re.search(tool, cmdline): procname = process["process_name"] spawnapp = self.get_argument( call, "ApplicationName") if not spawnapp: spawnapp = cmdline self.data.append( {"Process": procname + " > " + spawnapp}) # Handle cases were CommandLine is null elif appname: flags = int(self.get_argument(call, "CreationFlags"), 16) # CREATE_SUSPENDED or CREATE_SUSPENDED|CREATE_NO_WINDOW if flags & 0x4 or flags & 0x08000004: for tool in self.devtools: if re.search(tool, appname): procname = process["process_name"] self.data.append( {"Process": procname + " > " + appname})
def _ignore_link_pattern(url: Optional[str]) -> bool: """Return true if the url or redirect_url matches the ignore link pattern.""" if url is None: return False p = IGNORE_LINK_PATTERN nu = normalize_url_lossy(url) return re2.search(p, url, re2.I) or re2.search(p, nu, re2.I)
def parsesections(pattern, pattern_replace, section): sectionsref = re.search(pattern, section) while sectionsref: i1 = sectionsref.start(1) i2 = sectionsref.end(2) #print "found multiple secs at", i1, "-", i2 section = section[:i1]+re.sub(pattern_replace[0], pattern_replace[1] % sectionsref.group(2), section[i1:i2]) + section[1+i2:] sectionsref = re.search(pattern, section) return section
def _ignore_link_pattern(url: typing.Optional[str]) -> bool: """Return true if the url or redirect_url matches the ignore link pattern.""" if url is None: return False p = mediawords.tm.extract_story_links.IGNORE_LINK_PATTERN nu = mediawords.util.url.normalize_url_lossy(url) return re2.search(p, url, re2.I) or re2.search(p, nu, re2.I)
def _ignore_link_pattern(url: typing.Optional[str]) -> bool: """Return true if the url or redirect_url matches the ignore link pattern.""" if url is None: return False p = mediawords.tm.extract_story_links.IGNORE_LINK_PATTERN nu = mediawords.util.url.normalize_url_lossy(url) return re2.search(p, url, flags=re2.I) or re2.search(p, nu, flags=re2.I)
def run(self, info): m_return = [] if info.has_url_params: #param_dict = info.url_params for k, v in info.url_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload={ 'k': k, 'pos': 1, 'payload': cmd_inject_case['input'], 'type': 0 }, bmethod=info.method, timeout=15.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: Logger.log_verbose('[+] found cmd inject!') return m_return if info.has_post_params: #param_dict = info.post_params for k, v in info.post_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload={ 'k': k, 'pos': 1, 'payload': cmd_inject_case['input'], 'type': 0 }, bmethod=info.method, timeout=15.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: Logger.log_verbose('[+] found cmd inject!') return m_return # Send the results return m_return
def _filter(source): """Extracts and decode payload (original file) from `source`""" try: varname = re.search(r'eval\(\w+\(\w+\((\w+)\)\)\);', source).group(1) reverse = re.search(r"var +%s *\= *'(.*)';" % varname, source).group(1) except AttributeError: raise UnpackingError('Malformed MyObfuscate data.') try: return base64.b64decode(reverse[::-1].encode('utf8')).decode('utf8') except TypeError: raise UnpackingError('MyObfuscate payload is not base64-encoded.')
def test_search_star_plus(self): self.assertEqual(re.search('x*', 'axx').span(0), (0, 0)) self.assertEqual(re.search('x*', 'axx').span(), (0, 0)) self.assertEqual(re.search('x+', 'axx').span(0), (1, 3)) self.assertEqual(re.search('x+', 'axx').span(), (1, 3)) self.assertEqual(re.search('x', 'aaa'), None) self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0)) self.assertEqual(re.match('a*', 'xxx').span(), (0, 0)) self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3)) self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3)) self.assertEqual(re.match('a+', 'xxx'), None)
def on_call(self, call, process): if call["api"] == "CreateProcessInternalW": clbuf = call["arguments"]["command_line"].lower() # Handle Powershell CommandLine Arguments if "powershell" in clbuf and (re.search("-win[ ]+hidden", clbuf) or re.search("-windowstyle[ ]+hidden", clbuf)): self.mark_call() # CREATE_NO_WINDOW flag elif call["flags"]["creation_flags"] == "CREATE_NO_WINDOW": self.mark_call() elif call["api"] == "ShellExecuteExW": if call["arguments"]["show_type"] == 0: self.mark_call()
def on_call(self, call, process): if call["api"] == "CreateProcessInternalW": clbuf = call["arguments"]["command_line"].lower() # Handle Powershell CommandLine Arguments if "powershell" in clbuf and ( re.search("-win[ ]+hidden", clbuf) or re.search("-windowstyle[ ]+hidden", clbuf)): self.mark_call() # CREATE_NO_WINDOW flag elif call["flags"]["creation_flags"] == "CREATE_NO_WINDOW": self.mark_call() elif call["api"] == "ShellExecuteExW": if call["arguments"]["show_type"] == 0: self.mark_call()
def getMatches(regex): # print "getMatches called" # print os.path for root, dirs, files in os.walk("./public/rawonions", topdown=False): for name in files: # filename = os.path.join("./public/rawonions", name) filename = os.path.join(root, name) print "fname = " + filename # print filename url = name.replace('+', '/') try: with open(filename, 'r') as f: text = f.read() # print text match = re2.search(regex, text) if not match is None: # print match.group() context = text[max(0, (match.span()[0]-width)) : (match.span()[1]+width)] # print context # print url yield (url, match.group(), context) except re2.RegexError: print "Had a regex error." pass
def add_post(): text = env.request.args('text', '').strip() tags = env.request.args('tags', '').strip(' \t*,;') if isinstance(tags, str): tags = tags.decode('utf-8') tags = [t.replace(u"\xa0", " ") for t in re.split(r'\s*[,;*]\s*', tags)] private = bool(env.request.args('private')) m = re.search(r'^\s*(?P<to>(?:@[a-z0-9_-]+[,\s]*)+)', text) to = parse_logins(m.group('to')) if m else [] files = _files([]) sess = Session() sess['clear_post_input'] = True sess.save() try: id = posts.add_post(text, tags=tags, to=to, private=private, files=files) except PostTextError: return render('/post-error.html') return Response(redirect='%s://%s.%s/%s' % \ (env.request.protocol, env.user.login, settings.domain, id))
def assign_date_guess_tag( db: DatabaseHandler, story: dict, date_guess: GuessDateResult, fallback_date: Optional[str]) -> None: """Assign a guess method tag to the story based on the date_guess result. If date_guess found a result, assign a date_guess_method:guess_by_url, guess_by_tag_*, or guess_by_unknown tag. Otherwise if there is a fallback_date, assign date_guess_metehod:fallback_date. Else assign date_invalid:date_invalid. Arguments: db - db handle story - story dict from db date_guess - GuessDateResult from guess_date() call Returns: None """ if date_guess.found: tag_set = GUESS_METHOD_TAG_SET guess_method = date_guess.guess_method if guess_method.startswith('Extracted from url'): tag = 'guess_by_url' elif guess_method.startswith('Extracted from tag'): match = re2.search(r'\<(\w+)', guess_method) html_tag = match.group(1) if match is not None else 'unknown' tag = 'guess_by_tag_' + str(html_tag) else: tag = 'guess_by_unknown' elif fallback_date is not None: tag_set = GUESS_METHOD_TAG_SET tag = 'fallback_date' else: tag_set = INVALID_TAG_SET tag = INVALID_TAG ts = db.find_or_create('tag_sets', {'name': tag_set}) t = db.find_or_create('tags', {'tag': tag, 'tag_sets_id': ts['tag_sets_id']}) stories_id = story['stories_id'] tags_id = t['tags_id'] db.query( """ DELETE FROM stories_tags_map WHERE stories_id = %(stories_id)s """, {'stories_id': stories_id} ) db.query(""" INSERT INTO stories_tags_map (stories_id, tags_id) VALUES (%(stories_id)s, %(tags_id)s) ON CONFLICT (stories_id, tags_id) DO NOTHING """, { 'stories_id': stories_id, 'tags_id': tags_id, })
def set_param(param, value): """Set profile/info parameter, add/remove account """ value = value.strip() try: if re.search(r'[^a-z0-9_\.]', param, re.I): raise KeyError #if param in ('passwd', 'password'): # env.user.set_password(value) elif param.startswith('info.') and param[5:] in fields['info']: env.user.set_info(param[5:], value) elif param in fields['account']: if value.startswith('-'): return del_account(param, value[1:].strip()) if value.startswith('+'): value = value[1:] return add_account(param, value.strip()) else: env.user.set_profile(param, value) env.user.save() except ValueError, e: v = e.message if e.message else value return xmpp_template('profile_value_err', value=v)
def on_call(self, call, process): if call["api"] == "CDocument_write": buf = self.get_argument(call, "Buffer") elif call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") buf = buf.strip() if "style" in buf.lower() and "iframe" in buf.lower( ) and len(buf) < 500: check = re.match(self.styleRE, buf) if check: style = check.group("styleName") hclass1 = "class=\"{0}\"".format(style) hclass2 = "class='{0}'".format(style) if hclass1 in buf or hclass2 in buf: redirect = re.search(self.iframeRE, buf) if redirect: self.ret = True self.severity = 3 self.data.append({ "Info": "Javascript generated CSS styling for a div " "containing an iframe redirect." }) self.data.append({"Redirect": redirect.group("redir")})
def content_matches_topic(content: str, topic: dict, assume_match: bool = False) -> bool: """Test whether the content matches the topic['pattern'] regex. Only check the first megabyte of the string to avoid the occasional very long regex check. Arguments: content - text content topic - topic dict from db assume_match - assume that the content matches Return: True if the content matches the topic pattern """ if assume_match: return True if content is None: return False content = content[0:1024 * 1024] # for some reason I can't reproduce in dev, in production a small number of fields come from # the database into the stories fields or the text value produced in the query below in _story_matches_topic # as bytes objects, which re2.search chokes on if isinstance(content, bytes): content = content.decode('utf8', 'backslashreplace') return re2.search(topic['pattern'], content, re2.I | re2.X | re2.S) is not None
def getBestMessage(lines, codeStr): """Extracts message from one AssertionLocation.lines entry Args: lines: list of contiguous C++ source lines codeStr: assertion code found in first line """ line = lines if isinstance(lines, str) else " ".join(lines) err = line.partition(codeStr)[2] if not err: return "" # Trim to outer quotes m = re.search(r'"(.*)"', err) if not m: return "" err = m.group(1) # Trim inner quote pairs err = re.sub(r'" +"', '', err) err = re.sub(r'" *<< *"', '', err) err = re.sub(r'" *<<[^<]+<< *"', '<X>', err) err = re.sub(r'" *\+[^+]+\+ *"', '<X>', err) # Trim escaped quotes err = re.sub(r'\\"', '', err) # Iff doublequote still present, trim that and any trailing text err = re.sub(r'".*$', '', err) return err.strip()
def search(self, regex, flags=0, all=False): if all: result = dict() result["detail"] = [] matches = [] for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.findall( regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) if match: matches.extend(match) result["detail"].append({ "match": match, "chunk": chunk }) result["matches"] = matches return result else: for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.search( regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) if match: result = dict() result["match"] = match result["chunk"] = chunk return result
def search(self, regex, flags=0, all=False): if all: result = dict() result["detail"] = [] matches = [] for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.finditer(regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) thismatch = [] try: while True: m = match.next() thismatch.append(m) matches.append(m.group(0)) except StopIteration: pass if thismatch: result["detail"].append({"match": thismatch, "chunk": chunk}) result["matches"] = matches return result else: for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.search(regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) if match: result = dict() result["match"] = match result["chunk"] = chunk return result
def on_call(self, call, process): if call["api"] == "RegQueryValueExA": # There are many more ways to get the computer name, this is the # pattern observed with all Dridex varients 08/14 - 03/15 so far. testkey = self.get_argument(call, "FullName").lower() if testkey == "hkey_local_machine\\system\\controlset001\\control\\computername\\computername\\computername": buf = self.get_argument(call, "Data") if buf: self.compname = buf.lower() if testkey == "hkey_current_user\\volatile environment\\username": if call["status"]: buf = self.get_argument(call, "Data") if buf: self.username = buf.lower() else: self.is_xp = True if call["api"] == "CryptHashData": self.crypted.append(self.get_argument(call, "Buffer").lower()) if call["api"] == "connect": if not self.extract: return None socknum = str(self.get_argument(call, "socket")) if socknum and socknum not in self.sockmon.keys(): self.sockmon[socknum] = "" lastip = self.get_argument(call, "ip") self.sockmon[socknum] = lastip if call["api"] == "send": if not self.extract: return None socknum = str(self.get_argument(call, "socket")) if socknum and socknum in self.sockmon.keys(): buf = self.get_argument(call, "buffer") # POST is a stable indicator observed so far if buf and buf[:4] == "POST": self.payloadip["send"] = self.sockmon[socknum] if call["api"] == "recv": if not self.extract: return None socknum = str(self.get_argument(call, "socket")) if socknum and socknum in self.sockmon.keys(): buf = self.get_argument(call, "buffer") if buf: clen = re.search(r"Content-Length:\s([^\s]+)", buf) if clen: length = int(clen.group(1)) if length > 100000: if "send" in self.payloadip and self.sockmon[socknum] == self.payloadip["send"]: # Just a sanity check to make sure the IP hasn't changed # since this is a primitive send/recv monitor self.payloadip["recv"] = self.sockmon[socknum] return None
def check(self, msg): if not self.active: return False channels = self.channels.get(str(msg.guild.id), []) if channels and msg.channel.id not in channels: return False content = msg.content triggered_by = self.triggered_by if (self.server == msg.guild.id or self.server is None) is False: return False if not self.case_sensitive: triggered_by = triggered_by.lower() content = content.lower() if not self.regex: if triggered_by not in content: return False else: found = re.search(triggered_by, content) if not found: return False timestamp = datetime.datetime.now() passed = (timestamp - self.last_triggered).seconds if passed > self.cooldown: self.last_triggered = timestamp return True else: return False
def run(self,results): """Run Moloch to import pcap @return: nothing """ self.key = "moloch" self.alerthash ={} self.MOLOCH_CAPTURE_BIN = self.options.get("capture", None) self.MOLOCH_CAPTURE_CONF = self.options.get("captureconf",None) self.CUCKOO_INSTANCE_TAG = self.options.get("node",None) self.MOLOCH_USER = self.options.get("user",None) self.MOLOCH_PASSWORD = self.options.get("pass",None) self.MOLOCH_REALM = self.options.get("realm",None) self.pcap_path = os.path.join(self.analysis_path, "dump.pcap") self.MOLOCH_URL = self.options.get("base",None) m = re.search(r"/(?P<task_id>\d+)/dump.pcap$",self.pcap_path) if m == None: log.warning("Unable to find task id from %s" % (self.pcap_path)) return results else: self.task_id = m.group("task_id") if not os.path.exists(self.MOLOCH_CAPTURE_BIN): log.warning("Unable to Run moloch-capture: BIN File %s Does Not Exist" % (self.MOLOCH_CAPTURE_BIN)) return if not os.path.exists(self.MOLOCH_CAPTURE_CONF): log.warning("Unable to Run moloch-capture Conf File %s Does Not Exist" % (self.MOLOCH_CAPTURE_CONF)) return try: cmd = "%s -c %s -r %s -n %s -t %s:%s" % (self.MOLOCH_CAPTURE_BIN,self.MOLOCH_CAPTURE_CONF,self.pcap_path,self.CUCKOO_INSTANCE_TAG,self.CUCKOO_INSTANCE_TAG,self.task_id) except Exception,e: log.warning("Unable to Build Basic Moloch CMD: %s" % e)
def add_post(): text = env.request.args('text', '').strip() tags = env.request.args('tags', '').strip(' \t*,;') if isinstance(tags, str): tags = tags.decode('utf-8') tags = [t.replace(u"\xa0", " ") for t in re.split(r'\s*[,;*]\s*', tags)] private = bool(env.request.args('private')) m = re.search(r'^\s*(?P<to>(?:@[a-z0-9_-]+[,\s]*)+)', text) to = parse_logins(m.group('to')) if m else [] files = _files([]) try: id = posts.add_post(text, tags=tags, to=to, private=private, files=files) except PostTextError: return render('/post-error.html') return Response(redirect='%s://%s.%s/%s' % \ (env.request.protocol, env.user.login, settings.domain, id))
def search(self, table, line): message_dict = {} matches = [] try: choice = int(line.split(" ")[-1]) line = line.replace(line.split(" ")[-1], "", 1).strip() specific = True except Exception: choice = None specific = False with sqlite3.connect(self.bot.database_file) as conn: c = conn.cursor() for row in c.execute("SELECT * FROM {}".format(table)): message_dict[row[0]] = row[1] if len(message_dict) == 0: return "There are no entries in this list!" for number, text in message_dict.items(): match = re.search(line, text, re.IGNORECASE) if match: for nr, txt in message_dict.items(): if txt == match.string: found_number = nr matches.append("{}. {}".format(found_number, match.string)) if len(matches) > 0: if not specific: choice = random.choice(matches) return "Match #{}/{} - {}".format(matches.index(choice) + 1, len(matches), choice) elif choice <= len(matches): return "Match #{}/{} - {}".format(choice, len(matches), matches[choice - 1]) else: return "There is no '#{}' entry of '{}'!".format(choice, line) else: return "Could not find '{}'!".format(line)
def create_blacklist_ip_trie(): print('* Creating Blacklist IP Trie.') try: print('* Last update: {}'.format(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(os.path.getmtime(BLACKLISTED_IP_TRIE_JOBLIB))))) except: pass blip = None blacklisted_ip_trie = trie.CharTrie() z = download_zipfile() if(z != None): try: with z.open(z.filelist[0].filename) as myfile: blip = myfile.read() blip = blip.decode() blip = blip.split('\n') for row in blip: re_obj = re.search(IP_PATTERN, row) if re_obj is not None: matched_ip = row[re_obj.span()[0]:re_obj.span()[1]].split('\t')[0] blacklisted_ip_trie[matched_ip] = True dump(blacklisted_ip_trie, BLACKLISTED_IP_TRIE_JOBLIB) print('* Blacklist IP Trie created.') except Exception as e: print('* ERROR IN CREATING BLACKLIST IP TRIE : ', e) sys.exit(1) else: print('* UNABLE TO UPDATE Blacklist IP Trie!!') sys.exit(1)
def render_pony(name, text, balloonstyle, width=80, center=False, centertext=False): pony = load_pony(name) balloon = link_l = link_r = '' if text: [link_l, link_r] = balloonstyle[-2:] for i,line in enumerate(pony): match = re.search('\$balloon([0-9]*)\$', line) if match: minwidth = int(match.group(1) or '0') pony[i:i+1] = render_balloon(text, balloonstyle, minwidth=minwidth, maxwidth=int(width/2), pad=str.center if centertext else str.ljust) break try: first = pony.index('$$$') second = pony[first+1:].index('$$$') pony[first:] = pony[first+1+second+1:] except: pass pony = [ line.replace('$\\$', link_l).replace('$/$', link_r) for line in pony ] indent = '' if center: ponywidth = max([ len(re.sub(r'\x1B\[[0-9;]+m|\$.*\$', '', line)) for line in pony ]) indent = ' '*int((width-ponywidth)/2) wre = re.compile('((\x1B\[[0-9;]+m)*.){0,%s}' % width) reset = '[39;49m\n' return indent+(reset+indent).join([ wre.search(line).group() for line in pony ])+reset
def on_complete(self): if self.sigchanged: return True ret = False if self.found: ret = True if self.c2s: for c2 in self.c2s: self.data.append({"C2": c2}) if self.carve_mem: if "procmemory" in self.results and self.results["procmemory"]: dump_path = str() for process in self.results["procmemory"]: if process["pid"] == int(self.found): dump_path = process["file"] break if dump_path: with open(dump_path, "rb") as dump_file: cData = dump_file.read() buf = re.search(r"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3},[\d.,]+)\x00", cData) if buf: for c2 in buf.group(1).split(","): tmp = {"C2": c2} if tmp not in self.data: self.data.append(tmp) if self.payment: for url in self.payment: self.data.append({"Payment": url}) return ret
def _add_http(self, conn, tcpdata): """Adds an HTTP flow. @param conn: TCP connection info. @param tcpdata: TCP data flow. """ if tcpdata in self.http_requests: self.http_requests[tcpdata]["count"] += 1 return True try: http = dpkt.http.Request() http.unpack(tcpdata) except dpkt.dpkt.UnpackError: pass try: entry = {"count": 1} if "host" in http.headers and re.match( "^([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9])(\.([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9]))+(:[0-9]{1,5})?$", http.headers["host"], re.IGNORECASE, ): entry["host"] = convert_to_printable(http.headers["host"]) else: entry["host"] = conn["dst"] if enabled_passlist: for reject in domain_passlist_re: if re.search(reject, entry["host"]): return False entry["port"] = conn["dport"] # Manually deal with cases when destination port is not the default one, # and it is not included in host header. netloc = entry["host"] if entry["port"] != 80 and ":" not in netloc: netloc += ":" + str(entry["port"]) entry["data"] = convert_to_printable(tcpdata) entry["uri"] = convert_to_printable( urlunparse(("http", netloc, http.uri, None, None, None))) entry["body"] = convert_to_printable(http.body) entry["path"] = convert_to_printable(http.uri) if "user-agent" in http.headers: entry["user-agent"] = convert_to_printable( http.headers["user-agent"]) else: entry["user-agent"] = "" entry["version"] = convert_to_printable(http.version) entry["method"] = convert_to_printable(http.method) self.http_requests[tcpdata] = entry except Exception: return False return True
def search(self, regex, flags=0, all=False): if all: result = dict() result["detail"] = [] matches = [] for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.finditer(regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) thismatch = [] try: while True: m = next(match) thismatch.append(m) matches.append(m.group(0)) except StopIteration: pass if thismatch: result["detail"].append({"match": thismatch, "chunk": chunk}) result["matches"] = matches return result else: for map in self.address_space: for chunk in map["chunks"]: self.dumpfile.seek(chunk["offset"]) match = re.search(regex, self.dumpfile.read(chunk["end"] - chunk["start"]), flags) if match: result = dict() result["match"] = match result["chunk"] = chunk return result
def _content_matches_topic(content: str, topic: dict, assume_match: bool = False) -> bool: """Test whether the content matches the topic['pattern'] regex. Only check the first megabyte of the string to avoid the occasional very long regex check. Arguments: content - text content topic - topic dict from db assume_match - assume that the content matches Return: True if the content matches the topic pattern """ if assume_match: return True if content is None: return False content = content[0:1024 * 1024] # for some reason I can't reproduce in dev, in production a small number of fields come from # the database into the stories fields or the text value produced in the query below in _story_matches_topic # as bytes objects, which re2.search chokes on if isinstance(content, bytes): content = content.decode('utf8', 'backslashreplace') return re2.search(topic['pattern'], content, flags=re2.I | re2.X | re2.S) is not None
def run(self, info, **kwargs): #if not info.has_url_params and not info.has_post_params: # return m_return = [] if info.has_url_params: ''' cookie_dict = Config.audit_config.cookie print cookie_dict if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k ,v) for (k, v) in sorted(cookie_params.iteritems()) ) print cookie_param print "GET" ''' method = kwargs.get('method', None) if method is None or not isinstance(method, str): raise LalascanValueError("run plugin param has not method!") param = kwargs.get('param', None) if param is None or not isinstance(param, dict): raise LalascanValueError("run plugin param has not param!") for any_file_read_case in any_file_read_detect_test_cases: p, payload_resource = payload_muntants( info, payload={ 'k': param['param_key'], 'pos': 1, 'payload': any_file_read_case['input'], 'type': 1 }, bmethod=method) if p is not None: __ = re.search(any_file_read_case['target'], p.data) if __ is not None: vul = WebVulnerability( target=payload_resource, vulparam_point=param['param_key'], method=method, payload=any_file_read_case['input'], injection_type="ANY_FILE_READ") vulresult.put_nowait(vul) logger.log_success( '[!+>>>] found %s reflect xss vulnerable!' % payload_resource.url) return m_return # Send the results return m_return
def on_call(self, call, process): if call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search("allowscriptaccess\s*?=\s*?[\x22\x27]?always", buf, re.I|re.M): return True
def on_call(self, call, process): if call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search(".*\<applet.*code[ \t\n]*=.*archive[ \t\n]*=.*\<\/applet\>.*", buf, re.IGNORECASE|re.DOTALL): return True
def on_call(self, call, process): if call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search("application\/x\-silverlight.*?\<param name[ \t\n]*=.*?value[ \t\n]*=.*?\<\/object\>.*", buf, re.IGNORECASE|re.DOTALL): return True
def check_stoplist(text): slist = cache_get('stoplist') if not slist: slist = load_stoplist() for s in slist: if re.search(s, text, re.I): return True return False
def on_call(self, call, process): if call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search("(Save|Write)ToFile(\(|\/).*?\.exe\".*?Run(\(|\/).*?\.exe\"", buf, re.IGNORECASE|re.DOTALL): return True
def check_user_conditions(self, item): """Checks an item's author against the defined requirements.""" # if no user conditions are set, no need to check at all if not self.user_conditions: return True must_satisfy = self.user_conditions.get('must_satisfy', 'all') user = item.author for attr, compare in self.user_conditions.iteritems(): if attr == 'must_satisfy': continue # extract the comparison operator operator = '=' if not isinstance(compare, bool): operator_regex = '^(==?|<|>)' match = re.search(operator_regex, compare) if match: operator = match.group(1) compare = compare[len(operator):].strip() if operator == '==': operator = '=' # convert rank to a numerical value if attr == 'rank': rank_values = {'user': 0, 'contributor': 1, 'moderator': 2} compare = rank_values[compare] if user: if attr == 'rank': value = rank_values[get_user_rank(user, item.subreddit)] elif attr == 'account_age': user_date = datetime.utcfromtimestamp(user.created_utc) value = (datetime.utcnow() - user_date).days elif attr == 'combined_karma': value = user.link_karma + user.comment_karma else: value = getattr(user, attr, 0) else: value = 0 if operator == '<': result = int(value) < int(compare) elif operator == '>': result = int(value) > int(compare) elif operator == '=': result = int(value) == int(compare) if result and must_satisfy == 'any': return True elif not result and must_satisfy == 'all': return False # if we reached this point, success depends on if this is any/all if must_satisfy == 'any' and not result: return False return True
def filtercomments(source): """NOT USED: strips trailing comments and put them at the top.""" trailing_comments = [] comment = True while comment: if re.search(r'^\s*\/\*', source): comment = source[0, source.index('*/') + 2] elif re.search(r'^\s*\/\/', source): comment = re.search(r'^\s*\/\/', source).group(0) else: comment = None if comment: source = re.sub(r'^\s+', '', source[len(comment):]) trailing_comments.append(comment) return '\n'.join(trailing_comments) + source
def on_call(self, call, process): if call["api"] == "JsEval": buf = self.get_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search('(Save|Write)ToFile(\(|\/).*?\.exe"', buf, re.IGNORECASE | re.DOTALL): self.data.append({"dropper_script": buf}) return True
def unpack(source): """Unpacks js code packed with MyObfuscate.com""" if not detect(source): return source payload = unquote(_filter(source)) match = re.search(r"^var _escape\='<script>(.*)<\/script>'", payload, re.DOTALL) polished = match.group(1) if match else source return CAVEAT + polished