def parseua(uaobject, uas_parser): x = uaobject result = uas_parser.parse(x.uaString) ostype, osversion = osparse(result['os_name']) result2 = UserAgent.factory(x.uaString).pretty() result2 = UserAgent.parse_pretty(result2) version = result2[1:4] for (j, k) in enumerate(version): if j == 0: s = version[0] elif k: s = ".".join([s, k]) if s: x.data['Family Version'] = s if result['typ'] != 'unknown': x.data['Type'] = result['typ'] if result['ua_family'] != 'unknown': x.data['Family'] = result['ua_family'] elif result2[0] != "Other": x.data['Family'] = result2[0] if ostype != 'unknown': x.data['OS'] = ostype if osversion != '': x.data['OS Version'] = osversion return x
def parseua(uaobject,uas_parser): x = uaobject result = uas_parser.parse(x.uaString) ostype, osversion = osparse(result['os_name']) result2 = UserAgent.factory(x.uaString).pretty() result2 = UserAgent.parse_pretty(result2) version = result2[1:4] for (j, k) in enumerate(version): if j == 0: s = version[0] elif k: s = ".".join([s, k]) if s: x.data['Family Version'] = s if result['typ'] != 'unknown': x.data['Type'] = result['typ'] if result['ua_family'] != 'unknown': x.data['Family'] = result['ua_family'] elif result2[0] != "Other": x.data['Family'] = result2[0] if ostype != 'unknown': x.data['OS'] = ostype if osversion != '': x.data['OS Version'] = osversion return x
def __init__(self, project_id, connection, thread_semaphore, database_semaphore, url): """ Paramters: ========== :param thread_semaphore: This semaphore is used to control the running threads :param database_semaphore: This semaphore is used to add control the threads which adds information to the database :param url: The url for which the information is to be gathered :param connection: MySQL database connection object :return: None """ self.__project_id = project_id self.__connection = connection self.__thread_semaphore = thread_semaphore self.__database_semaphore = database_semaphore self.__url = url # get the ip address of the url with ThreadPoolExecutor(max_workers=1) as executor: ip = executor.submit(URL().get_ip, self.__url) self.__ip = ip.result() # we will get the headers of the request if URL().get_head_request( url=self.__url, user_agent=UserAgent.get_user_agent()) is not None: self.__headers = URL().get_head_request( url=self.__url, user_agent=UserAgent.get_user_agent()).headers else: self.__headers = ""
async def requestChallenge(pin, client): options = { "headers": { "User-Agent": UserAgent(), "Origin": "kahoot.it", "Referer": "https://kahoot.it", "Accept-Language": "en-US,en;q=0.8", "Accept": "*/*" }, "host": "kahoot.it", "protocol": "https:", "path": f"/rest/challenges/pin/{pin}" } proxyOptions = await client.defaults["proxy"](options) # proxy options either returns the options listed above # or returns an object with: # - headers (list of headers) # - text (text response) r = None try: if proxyOptions.get("headers") and proxyOptions.get("text"): # Proxied request r = proxyOptions def json(): return JSON.loads(r["text"]) r.json = json else: if proxyOptions: options.update(proxyOptions) url = (options.get("protocol") or "https:") + "//" + (options.get("host") or "kahoot.it") + ( options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET", url, headers=options.get("headers")) except Exception: if proxyOptions: options.update(proxyOptions) url = (options.get("protocol") or "https:") + "//" + ( options.get("host") or "kahoot.it") + (options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET", url, headers=options.get("headers")) try: data = r.json() out = { "data": { "isChallenge": True, "twoFactorAuth": False, "kahootData": data.get("kahoot"), "rawChallengeData": data["challenge"] } } out["data"].update(data["challenge"]["game_options"]) return out except Exception as e: raise e
def relog(self, cid): if not self.ready: time.sleep(0.5) self.relog(cid) return self.msgID += 1 packet = { "channel": "/service/controller", "clientId": self.clientID, "data": { "cid": cid, "content": JSON.dumps({ "device": { "userAgent": UserAgent(), "screen": { "width": 2000, "height": 1000 } } }), "gameid": self.gameID, "host": "kahoot.it", "type": "relogin" }, "ext": {}, "id": str(self.msgID) } self.send([packet])
def __get_programming_language(self): """ We will use to get the programming language from headers of the request :return: None """ self.__thread_semaphore.acquire() try: self.__programming_language_used = self.__headers['X-Powered-By'] except KeyError: self.__programming_language_used = None except Exception as e: print(e) self.__programming_language_used = None # If we didn't get the programming language we will try to get # it from the cookies if self.__programming_language_used is None: r = URL().get_request(url=self.__url, user_agent=UserAgent.get_user_agent()) cookies = r.cookies if r is not None else "" session_id = requests.utils.dict_from_cookiejar(cookies) # session_id contains the session id of the targetted url if "PHPSESSID" in session_id: self.__programming_language_used = "PHP" elif "JSESSIONID" in session_id: self.__programming_language_used = "J2EE" elif "ASP.NET_SessionId" in session_id: self.__programming_language_used = "ASP.NET" elif "CFID & CFTOKEN" in session_id: self.__programming_language_used = "COLDFUSION" else: self.__programming_language_used = "None" self.__thread_semaphore.release()
async def requestChallenge(pin,client): options = { "headers": { "User-Agent": UserAgent(), "Origin": "kahoot.it", "Referer": "https://kahoot.it", "Accept-Language": "en-US,en;q=0.8", "Accept": "*/*" }, "host": "kahoot.it", "protocol": "https:", "path": f"/rest/challenges/pin/{pin}" } url = (options.get("protocol") or "https:") + "//" + (options.get("host") or "kahoot.it") + (options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET",url,headers=options.get("headers")) try: data = r.json() out = { "data": { "isChallenge": True, "twoFactorAuth": False, "kahootData": data.get("kahoot"), "rawChallengeData": data["challenge"] } } out["data"].update(data["challenge"]["game_options"]) return out except Exception as e: raise e
async def requestToken(pin,client): options = { "headers": { "User-Agent": UserAgent(), "Origin": "kahoot.it", "Referer": "https://kahoot.it", "Accept-Language": "en-US,en;q=0.8", "Accept": "*/*" }, "host": "kahoot.it", "protocol": "https:", "path": f"/reserve/session/{pin}/?{int(time.time() * 1000)}" } url = (options.get("protocol") or "https:") + "//" + (options.get("host") or "kahoot.it") + (options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET",url,headers=options.get("headers")) if not r.headers.get("x-kahoot-session-token"): raise Exception("Invalid PIN") try: data = r.json() token = r.headers.get("x-kahoot-session-token") token = decodeBase64(token) return { "token": token, "data": data } except Exception as e: raise e
def __check_numerical_vulnerability(self): """ Description: ----------- This method is used to check the numerical SQL vulnerability in the give url. See: ----- Numerical Vulnerability in references.txt :return: None """ self.__thread_semaphore.acquire() payloaded_urls = Query.add_one(self.__url) for payloaded_url in payloaded_urls: r = URL().get_request( url=payloaded_url, user_agent=UserAgent.get_user_agent()) if r is not None: new_soup_object = BeautifulSoup(r.content, "html.parser") if self.__soup_object == new_soup_object: print("[+] NUMERICAL VULNERABILITY FOUND IN THE DATABASE") print("[+] PAYLOAD: ", payloaded_url) SivaDB.update_analysis( connection=self.__connection, database_semaphore=self.__database_semaphore, project_id=self.__project_id, method="GET", source=self.__url, payload=payloaded_url, description="NUMERICAL VULNERABILITY") self.__thread_semaphore.release()
def __check_escape_sequence_vulnerability(self): """ Description: ------------ We will append a single quote (') to check if the sql vulnerability is happended or not :return: """ # We will append ' to all the individual parameters and store it to payloaded urls self.__thread_semaphore.acquire() payloaded_urls = Query().append_payload_to_all_queries( url=self.__url, payload="'") for payloaded_url in payloaded_urls: print(payloaded_url) r = URL().get_request( url=payloaded_url, user_agent=UserAgent.get_user_agent()) if r is not None: new_soup_object = BeautifulSoup(r.content, "html.parser") # Now compare bot soup objects SQLErrorIdentifier( project_id=self.__project_id, thread_semaphore=self.__thread_semaphore, database_semaphore=self.__database_semaphore, original_soup_object=self.__soup_object, payloaded_soup_object=new_soup_object, original_url=self.__url, payloaded_url=payloaded_url, connection=self.__connection, poc_object=self.__poc_object) self.__thread_semaphore.release()
async def requestToken(pin, client): options = { "headers": { "User-Agent": UserAgent(), "Origin": "kahoot.it", "Referer": "https://kahoot.it", "Accept-Language": "en-US,en;q=0.8", "Accept": "*/*" }, "host": "kahoot.it", "protocol": "https:", "path": f"/reserve/session/{pin}/?{int(time.time() * 1000)}" } proxyOptions = await client.defaults["proxy"](options) # proxy options either returns the options listed above # or returns an object with: # - headers (list of headers) # - text (text response) r = None try: if proxyOptions.get("headers") and proxyOptions.get("text"): # Proxied request r = proxyOptions def json(): return JSON.loads(r["text"]) r.json = json else: if proxyOptions: options.update(proxyOptions) url = (options.get("protocol") or "https:") + "//" + (options.get("host") or "kahoot.it") + ( options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET", url, headers=options.get("headers")) except Exception: if proxyOptions: options.update(proxyOptions) url = (options.get("protocol") or "https:") + "//" + ( options.get("host") or "kahoot.it") + (options.get("port") or "") + options.get("path") r = requests.request(options.get("method") or "GET", url, headers=options.get("headers")) if not r.headers.get("x-kahoot-session-token"): raise "Invalid PIN" try: data = r.json() token = r.headers.get("x-kahoot-session-token") token = decodeBase64(token) return {"token": token, "data": data} except Exception as e: raise e
def run(self): self.__requests_object = URL().get_request( url=self.__url, user_agent=UserAgent.get_user_agent()) self.__soup_object = BeautifulSoup(self.__requests_object.content, "html.parser") # By now we have got the requests object and soup object #================== SQL Injection Test ==================== sqli_thread = Thread(target=self.check_sql_injection) sqli_thread.start() # ================= HTML VULNERABILITIES ============ self.check_html_vulnerabilities()
def __init__(self, options={}): super().__init__() self.defaults = {} # Assign the default values for i in client._defaults: if callable(client._defaults[i]): # TODO: Check if this is needed. self.defaults[i] = copy_func(client._defaults[i]) continue self.defaults[i] = copy.deepcopy(client._defaults[i]) # Assign values from options self.defaults["options"].update(options.get("options") or {}) self.defaults["modules"].update(options.get("modules") or {}) self.defaults["proxy"] = options.get("proxy") or self.defaults["proxy"] self.defaults["wsproxy"] = options.get( "wsproxy") or self.defaults["wsproxy"] # Set up some data self.classes = {} self.handlers = {} self.waiting = {} self.data = {} self.cid = None self.gameid = None self.socket = None self.settings = None self.questionStartTime = None self.reconnectRecovery = None self.feedbackTime = None self.connected = False self.name = None self.quiz = None self.clientId = None self.loggingMode = False self.lastEvent = (None, None) self._timesync = {} self.twoFactorResetTime = None self.disconnectReason = None # Import modules for mod in self.defaults["modules"]: if self.defaults["modules"].get( mod) or self.defaults["modules"].get(mod) == None: try: f = getattr( importlib.import_module(".src.modules." + mod, "kahootpy"), "main") f(self) except Exception: pass m = getattr(importlib.import_module(".src.modules.main", "kahootpy"), "main") m(self) self.userAgent = UserAgent() self.messageId = 0
def __set_response_time_of_fastest_website(self): """ Description: ------------ This method will calculate the response time of the fastest website :return: """ start_time = time.time() r = URL().get_request( url=self.__fastest_website, user_agent=UserAgent.get_user_agent()) end_time = time.time() if r is not None: self.__response_time_of_fastest_website = end_time - start_time
def login(self, name, team): if not self.ready: time.sleep(0.5) self.login(name, team) return self.name = name self.msgID += 1 joinPacket = [{ "channel": "/service/controller", "clientId": self.clientID, "data": { "content": '{"device":{"userAgent":"' + UserAgent() + '","screen":{"width":1280,"height":800}}}', "gameid": self.gameID, "host": consts.ENDPOINT_URI, "name": self.name, "type": "login" }, "ext": {}, "participantUserId": None, "id": str(self.msgID) }] time.sleep(0.5) self.send(joinPacket) if self.kahoot.gamemode == "team": joinPacket2 = [{ "channel": "/service/controller", "clientId": self.clientID, "data": { "content": JSON.dumps( team if type(team) == type(list()) else team if len(team) else ["Player 1", "Player 2", "Player 3", "Player 4"]), "gameid": self.gameID, "host": consts.ENDPOINT_URI, "id": 18, "type": "message" }, "ext": {}, "participantUserId": None, "id": str(self.msgID) }] self.msgID += 1 time.sleep(0.5) self.send(joinPacket2)
def __check_programming_language(self, url): """ Description: ============ This method will try its level best to get the name of the programming language used to build the website. Notes: ====== This method will heavily used URL class from url package :return: """ self.__thread_semaphore.acquire() print("[+] ANALYSING PROGRAMMING LANGUAGE") # These are the popular programming languages used for designing websites language_names = { ".php": "PHP", ".jsp": "JSP", ".asp": "ASP", ".aspx": "ASPX", ".py": "PYTHON", ".pl": "PERL" } user_agent = UserAgent.get_user_agent() r = URL().get_request(url=url, user_agent=user_agent) if r is not None: soup = BeautifulSoup(r.content, "html.parser") for i in soup.find_all("a"): try: partial_url = i.get("href") if "http" not in partial_url: new_url = URL.join_urls(url, partial_url) else: new_url = partial_url if URL.is_same_domain( url, new_url) else "" file_name = URL.get_file_name(new_url) for i in language_names: if i in file_name: self.__programming_language_used = language_names[ i] # Now we will update the programming language used into the database InfoGatheringPhaseOneDatabase.update_programming_language( self.__database_semaphore, self.__connection, self.__project_id, self.__programming_language_used) break if i in file_name: break except Exception: pass self.__thread_semaphore.release()
def crawl(self, url): """ Description: ------------ This will crawl the urls completely :param url: The url to be crawled :return: None """ start_time = time.time() r = URL().get_request(url=url, user_agent=UserAgent.get_user_agent()) end_time = time.time() total_time = end_time - start_time self.__bob_object.predict(total_time) if r is not None: soup = BeautifulSoup(r.content, "html.parser") # At this stage we have got the beautiful soup objects #First find all the href links for i in soup.find_all("a"): try: partial_url = i.get("href") url_to_be_scanned = None # we will scan this urls # Check if the partial url is actually a partial url if "http" in partial_url: if URL.is_same_domain(self.__base_url, partial_url): if partial_url not in self.__crawled_urls: self.__urls.put(partial_url) self.__crawled_urls.append(partial_url) url_to_be_scanned = partial_url else: full_url = URL.join_urls(self.__base_url, partial_url) if full_url not in self.__crawled_urls: self.__urls.put(full_url) self.__crawled_urls.append(full_url) url_to_be_scanned = full_url # run a simple scan in the url if url_to_be_scanned is not None: print("[i] CURRENTLY SCANNING [GET]: ", url_to_be_scanned) # Make the scanning as a new process SimpleScan( project_id=self.__project_id, thread_semaphore=self.__thread_semaphore, database_semaphore=self.__database_semaphore, url=url_to_be_scanned, connection=self.__connection, poc_object=self.__poc_object) except Exception as e: print("[-] EXCEPTION OCCURED ", e) while not self.__urls.empty(): self.crawl(self.__urls.get())
async def main(): bus = AIORedisSignalBus('redis://localhost/7') broker = AIORedisBroker('redis://localhost/7') await bus.started.send('user_agent') user_agent = UserAgent(bus=bus, broker=broker) comment_agent = CommentAgent(bus=bus, broker=broker) email_agent = EmailAgent(broker=broker) await user_agent.start() await comment_agent.start() await email_agent.start() while True: await asyncio.sleep(60)
def add_if_page_found(self, url): """ Description: ------------ This will add the information to the database if admin page is found :param url: The url to be added to the database :return: None """ r = URL().get_head_request(url=url, user_agent=UserAgent.get_user_agent()) try: if r.status_code == 200: if url not in self.__admin_pages: self.__admin_pages.append(url) except AttributeError: pass self.__thread_semaphore.release()
def add_new_users(self): num_agents = random.choice([i for i in range(MAX_NUMBER_OF_NEW_USERS)]) # Create users for i in range(num_agents): user = UserAgent( self.increment_curr_user_id(), define_user_interests(), define_user_actions_probabilities(self.exp_normalized), self.define_user_influence(), self) self.schedule.add(user) self.users.append(user) user.add_random_friends(10) user.expand_influence()
def requestChallenge(sessionID, callback, proxy): proxyOptions = None nopath = None if type(proxy) == type(str()): proxy = proxy or "" elif proxy and proxy.get("proxy"): proxyOptions = proxy.get("options") or {} nopath = proxy.get("nopath") proxy = proxy.get("proxy") else: proxy = "" uri = None if not nopath: if proxy[-1] == "/": uri = proxy + "https://" + consts.ENDPOINT_URI + consts.CHALLENGE_ENDPOINT + "/pin/" + sessionID else: uri = proxy + "/https://" + consts.ENDPOINT_URI + consts.CHALLENGE_ENDPOINT + "/pin/" + sessionID _uri = urllib.parse.urlparse(uri) options = { "port": consts.ENDPOINT_PORT, "headers": { "user-agent": UserAgent(), "host": (proxy and uri.hostname) or "kahoot.it", "referer": "https://kahoot.it/", "accept-language": "en-US,en;q=0.8", "accept": "*/*" } } if proxyOptions: options.update(proxyOptions) r = requests.get(uri) try: data = r.json() except Exception as e: return callback(None, e, None) try: inf = { "twoFactorAuth": False, "gameMode": data["challenge"]["type"], "kahootData": data["kahoot"], "rawChallengeData": data["challenge"] }.update(data["challenge"]["game_options"]) return callback(True, inf) except Exception as e: return callback(None, e, None)
def __init__(self, project_id, url, thread_semaphore, database_semaphore, soup_object, connection, poc_object): self.__project_id = project_id self.__url = url self.__thread_semaphore = thread_semaphore self.__database_semaphore = database_semaphore self.__connection = connection self.__poc_object = poc_object # NOTE: self.__soup_object is the original unaltered BeautifulSoup object if soup_object is not None: self.__soup_object = soup_object else: r = URL().get_request( url=self.__url, user_agent=UserAgent.get_user_agent()) self.__soup_object = BeautifulSoup(r.content, "html.parser") if URL.is_query_present(self.__url): self.__check_escape_sequence_vulnerability() self.__check_numerical_vulnerability()
def requestToken(sessionID, callback, proxy): proxyOptions = None nopath = None if type(proxy) == type(str()): proxy = proxy or "" elif proxy and proxy.get("proxy"): proxyOptions = proxy.get("options") or {} nopath = proxy.get("nopath") proxy = proxy.get("proxy") else: proxy = "" uri = None if not nopath: if proxy and proxy[-1] == "/": uri = proxy + "https://" + consts.ENDPOINT_URI + consts.TOKEN_ENDPOINT + str( sessionID) + "/?" + str(math.floor(time.time() * 1000)) elif proxy: uri = proxy + "/https://" + consts.ENDPOINT_URI + consts.TOKEN_ENDPOINT + str( sessionID) + "/?" + str(math.floor(time.time() * 1000)) else: uri = "https://" + consts.ENDPOINT_URI + consts.TOKEN_ENDPOINT + str( sessionID) + "/?" + str(math.floor(time.time() * 1000)) _uri = urllib.parse.urlparse(uri) options = { "port": consts.ENDPOINT_PORT, "headers": { "user-agent": UserAgent(), "host": (proxy and uri.hostname) or "kahoot.it", "referer": "https://kahoot.it/", "accept-language": "en-US,en;q=0.8", "accept": "*/*" } } if proxyOptions: options.update(proxyOptions) r = requests.get(uri) if not r.headers.get("x-kahoot-session-token"): return callback(None, None, None) try: data = r.json() except Exception as e: return callback(None, e, None) callback(r.headers.get("x-kahoot-session-token"), data["challenge"], data)
def __init__(self, num_agents): super().__init__() self.running = True self.num_agents = num_agents self.schedule = RandomActivation(self) self.schedule_groups = RandomActivation(self) self.schedule_roles = RandomActivation(self) self.exp = np.random.exponential(1, num_agents * 2) self.exp_normalized = [ float(value) / max(self.exp) for value in self.exp ] self.influence_values = deepcopy(self.exp_normalized) self.users = [] self.role_agents = [] self.groups = [] self.curr_user_id = 0 self.negotiations = {} self.roles_count = defaultdict(list) self.curr_iteration = 0 self.role_changes_from = {} for role in roles: self.roles_count[role] = [] # Create users for i in range(num_agents): user = UserAgent( self.increment_curr_user_id(), define_user_interests(), define_user_actions_probabilities(self.exp_normalized), self.define_user_influence(), self) self.schedule.add(user) self.users.append(user) for user in self.users: user.add_random_friends( round( math.ceil( random.choice(self.exp_normalized) * num_agents / 3)) + 1) user.expand_influence() self.data_collector = DataCollector( agent_reporters={"Post_written": get_number_of_post_written}) self.data_group_collector = DataCollector( model_reporters={ "biggestGroup": biggest_group, "groupSizeDistribution": group_size_dist, "number_of_groups": get_number_of_groups })
def main(): key = os.environ.get('API_KEY', None) if key is None: sys.exit('Please provide an API key provided for lookup [env=API_KEY]') args = parse_args(sys.argv[1:]) print 'Looking for the closest store near the provided address...' client = GoogleClient(UserAgent(), key) current_location = client.get_coords(args.address) closest_store = get_closest_store(current_location) output = client.get_distance(args.address, closest_store, units=args.units) print '...Done!' print '==============================================================' print format_output(output, args.output) print '=============================================================='
def __get_robots(self): """ Description: ------------ This method is used to get the robots.txt file from the remote server :return: """ self.__thread_semaphore.acquire() robots_url = URL.join_urls(self.__url, "/robots.txt") print("[+] GETTING ROBOTS.TXT AT ", robots_url) r = URL().get_head_request(url=self.__url, user_agent=UserAgent.get_user_agent()) if r is not None: if r.status_code == 200: robots_file_location = "projects/project-" + str( self.__project_id) + "/robots.txt" File.download_file(local_file_location=robots_file_location, remote_file_location=robots_url) else: print("[-] NO robots.txt FOUND IN THE SERVER") self.__thread_semaphore.release()
def __init__(self): super().__init__() self.classes = {} self.connected = False self.data = {} self.gameid = None self.handlers = {} self.loggingMode = False self.name = None self.reconnectRecovery = None self.settings = {} self.socket = None self.twoFactorResetTime = None self.userAgent = UserAgent() self.quiz = {} for module in ("answer", "backup", "extraData", "feedback", "gameReset", "main", "nameAccept", "podium", "questionEnd", "questionReady", "questionStart", "quizEnd", "quizStart", "teamAccept", "teamTalk", "timeOver"): f = getattr( importlib.import_module(f".src.modules.{module}", "KahootPY"), "main") f(self)