def parse(self, response): #print self.urls #print self.loginRequired #------------attempt login attacks print str(len(self.actual_payloads)) + " " + str(len(self.urls)) if self.loginRequired == "false": for temp in self.urls: login = "******" if any ( term in temp["url"] for term in self.ignoreLinks ) and "app4" in temp["url"]: print "**********" + temp["url"] continue if self.domain in temp["url"]: start_time = time.time()*1000 if len(temp["param"]) == 0: metaobj = {'temp':str(temp["url"]), 'login':login, 'form':"false", "params":[], 'method':"", 'start_time':start_time} yield Request(url=str(temp["url"]), meta=metaobj , callback=self.save_original_resp) ''' else: ''' else: #print response.url + ": " + self.login_user[0] + ": " + self.login_pass[0] args, url, method, self.loginid, self.passid = fill_login_form(response.url, response.body, self.login_user[0], self.login_pass[0]) print "IDs: " + self.loginid + ", " + self.passid metaobj = {'temp':url, 'login':self.loginRequired, 'form':"true", "params":args, 'method':method} yield FormRequest(url, method=method, meta=metaobj, formdata=args, callback=self.after_login) #time.sleep(5) return
def parse(self, response): if self.loginRequired == "false": print self.domain for temp in self.urls: login = "******" if self.domain in temp: yield Request(url=temp, meta={ 'temp': temp, 'login': login }, callback=self.save_original_resp) else: #print response args, url, method = fill_login_form(response.url, response.body, self.login_user[0], self.login_pass[0]) print str(args) + " , " + url self.login_reqd = "true" for a in args: if a[1] == self.login_user[0]: self.loginid = a[0] if a[1] == self.login_pass[0]: self.passid = a[0] print "IDs: " + self.loginid + ", " + self.passid yield FormRequest(url, method=method, meta={'login': self.login_reqd}, formdata=args, callback=self.after_login) return
def parse(self,response): print "Status:",response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" login_user ="******" #[email protected]"#self.credentials[response.request.url][0] login_pass ="******" #"admin"#self.credentials[response.request.url][1] main_file = open("Singleinput.json",'r') infoList = json.load(main_file) start_urls = infoList.get("starturl") login_url = infoList.get("loginurl") login_user = infoList.get("username") login_pass = infoList.get("password") urlDomain = login_url[login_url.find("//"):] urlDomain = urlDomain[2:] if (urlDomain.find("/") != -1): allowed_domains = urlDomain[0:urlDomain.find("/")] else: allowed_domains = urlDomain args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) #print "args",args #self.firstloginscrape() #yield FormRequest(start_urls[0], method=method, formdata=args,dont_filter=True,callback=self.firstpagescrape) #yield FormRequest(self.start_urls[0], meta={'url':self.start_urls[0]},callback=self.firstpagescrape) yield FormRequest(response.url, method=method, formdata=args,dont_filter=True,callback=self.after_login) """
def parse(self, response): # deleting files: try: if os.path.exists("newPost.txt"): os.remove("newPost.txt") if os.path.exists("newgetLinks.txt"): os.remove("newgetLinks.txt") if os.path.exists("scrappedurls.txt"): os.remove("scrappedurls.txt") except: pass # print "Status:",response.status # print "Request Headers" # print response.request.headers.items() # print "\n\n" # print "Response Headers" # print response.headers.items() # print "\n\n" login_user = self.credentials[response.request.url][0] print login_user login_pass = self.credentials[response.request.url][1] print login_pass args, url, method, name, number = fill_login_form(response.url, response.body, login_user, login_pass) if name: yield FormRequest.from_response( response, method=method, formdata=args, formname=name, callback=self.after_login ) else: yield FormRequest.from_response( response, method=method, formdata=args, formnumber=number, callback=self.after_login )
def login(self, response): data, url, method = fill_login_form(response.url, response.body, DVWA_USER_NAME, DVWA_PASSWORD) yield scrapy.FormRequest(url, formdata=dict(data), method=method, callback=self.post_login)
def parse_login(self, response): # got the login page, let's fill the login form... data, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_password) device_auth = self.try_get_element_by_xpath( "//h1[contains(text(),'Device authorization')]") if device_auth is not None: self.prepare_input_by_name('deviceAuth[answer]', self.secret_answer) self.click_button_by_xpath("//button[@button-role='save']") if self.contact_info_url not in self.browser.current_url: re_enter_password_header = self.try_get_element_by_xpath( "//h1[contains(text(),'Re-enter password')]") if re_enter_password_header is not None: self.prepare_input_by_name('sensitiveZone[password]', self.password) self.click_button_by_xpath( "//button[@button-role='continue']") # yield req return FormRequest(url, formdata=dict(data), method=method, callback=self.after_login)
def parse(self, response): # args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) #return FormRequest(url, method=method, formdata=args, dont_filter=True,callback=self.after_login) args, url, method, name , number = fill_login_form(response.url, response.body, self.login_user, self.login_pass) credentials = list() tmpparam = dict() print args for a in args: if a[0].find("user") > -1 or a[0].find("admin") > -1: tmpparam["userid"] = a[0] if a[0].find("password") > -1: tmpparam["passwordid"] = a[0] # tmpparam["submit"] = "submit" tmpparam["url"] = self.start_urls[0] tmpparam["login"] = "" credentials.append(tmpparam) f = open("json/credentials.json", 'w') f.write(json.dumps(credentials,indent= 4, sort_keys = True)) f.close() if name: yield FormRequest.from_response(response, method=method, formdata=args, formname=name, dont_filter=True,callback=self.after_login) else: yield FormRequest.from_response(response, method=method, formdata=args, formnumber=number, dont_filter=True,callback=self.after_login)
def parse(self, response): if parameter.login==True: args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) #return FormRequest(url, method=method, formdata=args, callback=self.after_login) #args={'username':'******', 'password':'******'} #print args argsdict={} print response.headers for i in args: argsdict[i[0]]=i[1] print argsdict #print response.request return FormRequest.from_response( response, formdata=argsdict, dont_filter=True, meta = {'dont_merge_cookies': True}, callback=self.after_login ) else: #args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) return Request( response.url, meta = {'dont_merge_cookies': True}, callback=self.parse_page )
def parse_login_page(self, response): username = response.request.meta["username"] password = response.request.meta["password"] args, url, method = fill_login_form(response.url, response.body, username, password) return FormRequest(url, method=method, formdata=args, callback=self.after_login, dont_filter=True)
def parse(self, response): args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) return FormRequest(url, method=method, formdata=args, callback=self.after_login)
def login(self, response): data, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_password) return scrapy.FormRequest(url, formdata=dict(data), method=method, callback=self.parse)
def parse_login(self, response): '''perform authentication for urls that require it''' data, url, method = fill_login_form(response.url, response.body, self.userid, self.password) return FormRequest(url, formdata=dict(data), method=method, callback=self.start_crawl)
def check_sample(name): with open(sample_json(name), 'rb') as f: url, expected_values = json.loads(f.read().decode('utf8')) with open(sample_html(name), 'rb') as f: body = f.read().decode('utf-8') values = fill_login_form(url, body, "USER", "PASS") values = json.loads(json.dumps(values)) # normalize tuple -> list assert values == expected_values
def parse(self, response): # args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) #return FormRequest(url, method=method, formdata=args, dont_filter=True,callback=self.after_login) args, url, method, name , number = fill_login_form(response.url, response.body, self.login_user, self.login_pass) if name: yield FormRequest.from_response(response, method=method, formdata=args, formname=name, dont_filter=True,callback=self.after_login) else: yield FormRequest.from_response(response, method=method, formdata=args, formnumber=number, dont_filter=True,callback=self.after_login)
def parse(self, response): args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) return FormRequest(url, method=method, formdata=args, callback=self.confirm_login, dont_filter=True)
def __are_cookies_the_same_after_login(self, url): r = requests.get(url, verify=False) # User login form to get input name and POST url response = fill_login_form(url, r.text, "user", "pass") user_input_box_name = '' pass_input_box_name = '' post_url = '' for input_tuple in response[0]: if input_tuple[1] == 'user': user_input_box_name = input_tuple[0] if input_tuple[1] == 'pass': pass_input_box_name = input_tuple[0] post_url = response[1] if not user_input_box_name or not pass_input_box_name or not post_url: return payload = {} for res in response[0]: if res[1] == 'user': payload[res[0]] = Scanner.accounts[0][0] elif res[1] == 'pass': payload[res[0]] = Scanner.accounts[0][1] else: payload[res[0]] = res[1] account_index = 1 is_cookie_the_same = False # Actual login using requests s = requests.Session() login_request = s.post(post_url, data=payload, verify=False) while not self.__has_logout_link_in_page( login_request.text) and account_index < len(Scanner.accounts): for res in response[0]: if res[1] == 'user': payload[res[0]] = Scanner.accounts[account_index][0] elif res[1] == 'pass': payload[res[0]] = Scanner.accounts[account_index][1] else: payload[res[0]] = res[1] login_request = s.post(post_url, data=payload, verify=False) account_index += 1 first = s.cookies.get_dict() second_request = s.get(url, verify=False) second = s.cookies.get_dict() for k1, v1 in first.iteritems(): for k2, v2 in second.iteritems(): if k1 == k2 and v1 == v2: is_cookie_the_same = True return is_cookie_the_same
def login(self, response): ''' Fill out the login form and return the request''' args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) self.log('Logging in...') return FormRequest(url, method=method, formdata=args, callback=self.confirm_login, dont_filter=True)
def check_sample(name): from nose.tools import assert_equal with open(sample_json(name)) as f: url, expected_values = json.load(f) with open(sample_html(name)) as f: body = f.read().decode('utf-8') values = fill_login_form(url, body, "USER", "PASS") values = json.loads(json.dumps(values)) # normalize tuple -> list assert_equal(values, expected_values)
def parse_login(self, response): # got the login page, let's fill the login form... data, url, method = fill_login_form( response.url, response.body, self.login_user, self.login_password) # ... and send a request with our login data return FormRequest(url, formdata=dict(data), method=method, callback=self.start_crawl)
def login(self, response): """Generate a login request.""" logging.log(logging.WARNING, "---------------Logging in---------------") args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) return FormRequest(url, method=method, formdata=args, callback=self.check_login_response)
def parse(self,response): print "Status:",response.status args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) #print "args",args #self.firstloginscrape() #yield FormRequest(start_urls[0], method=method, formdata=args,dont_filter=True,callback=self.firstpagescrape) #yield FormRequest(self.start_urls[0], meta={'url':self.start_urls[0]},callback=self.firstpagescrape) yield FormRequest(response.url, method=method, formdata=args,dont_filter=True,callback=self.after_login) """
def parse(self, response): print "beginning authentication" args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) return FormRequest(url, method=method, formdata=args, callback=self.auth_login)
def login(self,response): args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) print args,url,method #print args argsdict={} for i in args: argsdict[i[0]]=i[1] #print argsdict return FormRequest(url,formdata=args, dont_filter=True,callback=self.after_login)
def login(self, response): # got the login page, let's fill the login form... data, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) # and send a request with our login data return scrapy.FormRequest(url, formdata=dict(data), dont_filter=True, method=method, callback=self.set_exam_category)
def parse(self, response): self.logger.info('O usuário esta logado: ' + str(self.is_logged)) if not self.is_logged: args, url, method = fill_login_form(response.url, response.body, cfg.pythonbt['username'], cfg.pythonbt['password']) return FormRequest(url, method=method, formdata=args, callback=self.after_login, dont_filter=False)
def parse(self, response): #print self.urls #print self.loginRequired #------------attempt login attacks print str(len(self.actual_payloads)) + " " + str(len(self.urls)) if self.loginRequired == "false": for temp in self.urls: login = "******" if any(term in temp["url"] for term in self.ignoreLinks) and "app4" in temp["url"]: print "**********" + temp["url"] continue if self.domain in temp["url"]: start_time = time.time() * 1000 if len(temp["param"]) == 0: metaobj = { 'temp': str(temp["url"]), 'login': login, 'form': "false", "params": [], 'method': "", 'start_time': start_time } yield Request(url=str(temp["url"]), meta=metaobj, callback=self.save_original_resp) ''' else: ''' else: #print response.url + ": " + self.login_user[0] + ": " + self.login_pass[0] args, url, method, self.loginid, self.passid = fill_login_form( response.url, response.body, self.login_user[0], self.login_pass[0]) print "IDs: " + self.loginid + ", " + self.passid metaobj = { 'temp': url, 'login': self.loginRequired, 'form': "true", "params": args, 'method': method } yield FormRequest(url, method=method, meta=metaobj, formdata=args, callback=self.after_login) #time.sleep(5) return
def parse(self,response): #print "Status:",response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" login_user = "******"#self.credentials[response.request.url][0] login_pass = "******"#self.credentials[response.request.url][1] args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) yield FormRequest(response.url, method=method, formdata=args,dont_filter=True,callback=self.after_login) """
def login(self, response): args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) print args, url, method #print args argsdict = {} for i in args: argsdict[i[0]] = i[1] #print argsdict return FormRequest(url, formdata=args, dont_filter=True, callback=self.after_login)
def main(): opts, args = parse_opts() if opts.list: print("\n".join(list_samples())) else: url = args[0] r = requests.get(url) values = fill_login_form(url, r.text, "USER", "PASS") values = (url, values) print(json.dumps(values, indent=3)) if opts.write: with open(sample_html(opts.write), 'wb') as f: f.write(r.text.encode('utf-8')) with open(sample_json(opts.write), 'wb') as f: json.dump(values, f, indent=3)
def parse_start_url(self, response): self.user = response.meta['user'] self.club = response.meta['club'] self.password = response.meta['password'] self.logger.info('Logging in to: %s', response.url) if self.url_exists(response.url.split('/')[2]): self.logger.info('User: %s', response.meta['user']) self.logger.info('Password: %s', response.meta['password']) args, url, method = fill_login_form(response.url, response.body, response.meta['user'], response.meta['password']) return FormRequest(url, method=method, formdata=args, callback=self.logged_in) else: # TODO: Nori returns 404 so let´s add that later raise CloseSpider('Url not found :-(')
def get_login_form_attributes(url): print url r = requests.get(url, verify=False) try: response = fill_login_form(url, r.text, "user", "pass") user_input_box_name = '' pass_input_box_name = '' for input_tuple in response[0]: if input_tuple[1] == 'user': user_input_box_name = input_tuple[0] if input_tuple[1] == 'pass': pass_input_box_name = input_tuple[0] except: print('\033[91m Cannot parse form... Skipping exploit... \033[0m') return ('', '') return (user_input_box_name, pass_input_box_name)
def main(): ap = ArgumentParser() ap.add_argument('-u', '--username', default='username') ap.add_argument('-p', '--password', default='secret') ap.add_argument('url') args = ap.parse_args() try: import requests except ImportError: print('requests library is required to use loginform as a tool') r = requests.get(args.url) values, action, method = fill_login_form(args.url, r.text, args.username, args.password) print('url: {0}\nmethod: {1}\npayload:'.format(action, method)) for k, v in values: print('- {0}: {1}'.format(k, v))
def parse(self, response): print "responce is", response.url args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) foo = response.headers['Set-Cookie'] print "foo is", foo values = {k.strip(): v for k, v in re.findall(r'(.*?)=(.*?);', foo)} print "values", values #self.driver.add_cookie(values) tmpCookie = response.headers.getlist('Set-Cookie')[0].split( ";")[0].split("=")[1] print 'cookie from login', tmpCookie cookieHolder = dict(SESSION_ID=tmpCookie) return FormRequest(url, method=method, formdata=args, callback=self.after_login)
def parse(self, response): print self.domain for temp in self.urls: login = "******" if self.domain in temp: yield Request(url=temp, meta={'temp':temp, 'login':login} , callback=self.save_original_resp) #print response args, url, method = fill_login_form(self.start_urls[0], response.body, self.login_user[0], self.login_pass[0]) #print args self.login_reqd = "true" for a in args: if a[1] == self.login_user[0]: self.loginid = a[0] if a[1] == self.login_pass[0]: self.passid = a[0] print "IDs: " + self.loginid + ", " + self.passid yield FormRequest(url, method=method, meta={'login':self.login_reqd}, formdata=args, callback=self.after_login) return
def parse(self,response): print "Status:",response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" print "url:" + response.url login_user ="******" #[email protected]"#self.credentials[response.request.url][0] login_pass ="******" #"admin"#self.credentials[response.request.url][1] args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) print "args" + str(args) + ": " + url #self.firstloginscrape() #yield FormRequest(start_urls[0], method=method, formdata=args,dont_filter=True,callback=self.firstpagescrape) #yield FormRequest(self.start_urls[0], meta={'url':self.start_urls[0]},callback=self.firstpagescrape) yield FormRequest(response.url, method=method, formdata=args,dont_filter=True,callback=self.after_login) """
def parse(self,response): if self.start_urls[0].find('app8.com') > -1: self.printText(self.login_user) self.printText(self.login_pass) return scrapy.FormRequest.from_response(response, formdata={ 'email': self.login_user, 'password': self.login_pass}, method="POST", dont_filter=True, callback=self.after_login) else: args, url, method = fill_login_form(get_base_url(response), response.body, self.login_user, self.login_pass) self.printText("args "+str(args)) self.printText("url "+str(url)) self.printText("Method "+str(method)) self.printText("Printing login response") self.requestwriter.write("{\"data\":[") self.loginFormFileWriter(self.start_urls[0],url,method,args) self.printText(response) return scrapy.FormRequest(url,method=method,formdata=args,dont_filter=True,callback=self.after_login)
def parse(self,response): main_file = open("Singleinput.json",'r') infoList = json.load(main_file) start_urls = infoList.get("starturl") login_url = infoList.get("loginurl") login_user = infoList.get("username") login_pass = infoList.get("password") urlDomain = login_url[login_url.find("//"):] urlDomain = urlDomain[2:] if (urlDomain.find("/") != -1): allowed_domains = urlDomain[0:urlDomain.find("/")] else: allowed_domains = urlDomain # login_user = "******"#self.credentials[response.request.url][0] # login_pass = "******"#self.credentials[response.request.url][1] args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) yield FormRequest(response.url, method=method, formdata=args,dont_filter=True,callback=self.after_login) """
def parse(self, response): # args, url, method = fill_login_form(response.url, response.body, self.login_user, self.login_pass) #return FormRequest(url, method=method, formdata=args, dont_filter=True,callback=self.after_login) args, url, method, name, number = fill_login_form( response.url, response.body, self.login_user, self.login_pass) if name: yield FormRequest.from_response(response, method=method, formdata=args, formname=name, dont_filter=True, callback=self.after_login) else: yield FormRequest.from_response(response, method=method, formdata=args, formnumber=number, dont_filter=True, callback=self.after_login)
def main(): opts, args = parse_opts() if opts.list: print("\n".join(list_samples())) else: print(args) url = args[0] headers = { 'User-Agent': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)', } r = requests.get(url, headers=headers) values = fill_login_form(url, r.text, "USER", "PASS") values = (url, values) print(json.dumps(values, indent=3)) if opts.write: with open(sample_html(opts.write), 'wb') as f: f.write(r.text.encode('utf-8')) with open(sample_json(opts.write), 'wb') as f: json.dump(values, f, indent=3)
def parse(self, response): #deleting files: try: if os.path.exists("newPost.txt"): os.remove("newPost.txt") if os.path.exists("newgetLinks.txt"): os.remove("newgetLinks.txt") if os.path.exists("scrappedurls.txt"): os.remove("scrappedurls.txt") except: pass #print "Status:",response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" login_user = self.credentials[response.request.url][0] print login_user login_pass = self.credentials[response.request.url][1] print login_pass args, url, method, name, number = fill_login_form( response.url, response.body, login_user, login_pass) if name: yield FormRequest.from_response(response, method=method, formdata=args, formname=name, callback=self.after_login) else: yield FormRequest.from_response(response, method=method, formdata=args, formnumber=number, callback=self.after_login)
def parse(self, response): print "Status:", response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" print "url:" + response.url login_user = "******" #[email protected]"#self.credentials[response.request.url][0] login_pass = "******" #"admin"#self.credentials[response.request.url][1] args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) print "args" + str(args) + ": " + url #self.firstloginscrape() #yield FormRequest(start_urls[0], method=method, formdata=args,dont_filter=True,callback=self.firstpagescrape) #yield FormRequest(self.start_urls[0], meta={'url':self.start_urls[0]},callback=self.firstpagescrape) yield FormRequest(response.url, method=method, formdata=args, dont_filter=True, callback=self.after_login) """
def parse(self, response): print "Status:", response.status #print "Request Headers" #print response.request.headers.items() #print "\n\n" #print "Response Headers" #print response.headers.items() #print "\n\n" login_user = "******" #[email protected]"#self.credentials[response.request.url][0] login_pass = "******" #"admin"#self.credentials[response.request.url][1] main_file = open("Singleinput.json", 'r') infoList = json.load(main_file) start_urls = infoList.get("starturl") login_url = infoList.get("loginurl") login_user = infoList.get("username") login_pass = infoList.get("password") urlDomain = login_url[login_url.find("//"):] urlDomain = urlDomain[2:] if (urlDomain.find("/") != -1): allowed_domains = urlDomain[0:urlDomain.find("/")] else: allowed_domains = urlDomain args, url, method = fill_login_form(response.url, response.body, login_user, login_pass) #print "args",args #self.firstloginscrape() #yield FormRequest(start_urls[0], method=method, formdata=args,dont_filter=True,callback=self.firstpagescrape) #yield FormRequest(self.start_urls[0], meta={'url':self.start_urls[0]},callback=self.firstpagescrape) yield FormRequest(response.url, method=method, formdata=args, dont_filter=True, callback=self.after_login) """
def parse(self, response): #print self.urls #print self.loginRequired #------------attempt login attacks #print self.domain with open('data.json') as data_file: data = json.load(data_file) length = len(data) for i in range(0,length): jsonval = json.dumps(data[i]) data2 = json.loads(jsonval) self.urls.append(json.loads(jsonval)) if self.loginRequired == "false": self.log("nothing to do here") else: #attempt login attack print self.domain if 'bm3.com' in self.domain: for temp in self.urls: for payload in actual_payloads: params = temp["param"] params["username"] = payload params["password"] = "******" metaobj = {'temp':response.url, 'actualuser':self.login_user[0], 'form':"true", "params":params, 'method':temp["method"]} yield FormRequest(response.url, method=temp["method"], meta=metaobj, formdata=params, dont_filter=True, callback=self.check_login_attack) else: for payload in actual_payloads: inject_uname = self.login_user[0] + payload print response.url args, url, method, self.loginid, self.passid = fill_login_form(response.url, response.body, inject_uname, "dummy") print url + "-----------" + inject_uname metaobj = {'temp':url, 'actualuser':self.login_user[0], 'form':"true", "params":args, 'method':method} #print "attack: " + str(args) yield FormRequest(str(url), method=method, meta=metaobj, formdata=args, dont_filter=True, callback=self.check_login_attack) return
from loginform import fill_login_form import requests url = "https://github.com/login" r = requests.get(url) fill_login_form(url, r.text, "john", "secret")
def parse(self, response): doms = ["app1", "app4", "app5", "bm1", "bm2", "bm3", "bm4", "bm5"] if any (d in self.domain for d in doms): f = open("payloads.json","r") self.actual_payloads = json.load(f) f.close() else: f = open("step2.json","r") self.actual_payloads = json.load(f) f.close() #print self.urls #print self.loginRequired #------------attempt login attacks print str(len(self.actual_payloads)) + " " + str(len(self.urls)) if self.loginRequired == "false": #print self.urls for temp in self.urls: login = "******" if any ( term in temp["url"] for term in self.ignoreLinks ) and "app4" in temp["url"]: #print "**********" + temp["url"] continue if self.domain in temp["url"]: start_time = time.time()*1000 if len(temp["param"]) == 0: metaobj = {'temp':str(temp["url"]), 'login':login, 'form':"false", "params":[], 'method':"", 'start_time':start_time} yield Request(url=str(temp["url"]), meta=metaobj , callback=self.save_original_resp) else: #print "Asdasdasd" if "bm1.com" in self.domain or "bm2.com" in self.domain or "bm3.com" in self.domain or "bm4.com" in self.domain or "bm5.com" in self.domain: #print "ertertasdauysdgasdn" self.actual_payloads = [] self.actual_payloads.append("'#") self.actual_payloads.append("' or '1=1' #") self.actual_payloads.append("' and SLEEP(10) #") skip = 0 keyl = [] keyval = {} urlsplit2 = temp["url"].split("?") if len(urlsplit2) > 1: print urlsplit2 #print "urlsplit2",urlsplit2 urlsplit1 = urlsplit2[1].split("&") for urls in urlsplit1: p = urls.split("=") keyl.append(str(p[0])) keyval[str(p[0])] = str(p[1]) args = [] for k,v in temp["param"].iteritems(): #print k,v if(str(k ) in keyl): args.append( (str(k), keyval[str(k)]) ) else: args.append((str(k), v)) if (("version" or "ver") in k.lower()) or (("delete" or "del") in v.lower()): skip = 1 print args metaobj = {'temp':temp["url"], 'login':self.loginRequired, 'form':"true", "params":args, 'method':temp["method"], 'start_time':start_time} if skip == 0: yield FormRequest(temp["url"], method=temp["method"], meta=metaobj, formdata=args, callback=self.save_original_resp) else: #print response.url + ": " + self.login_user[0] + ": " + self.login_pass[0] args, url, method, self.loginid, self.passid = fill_login_form(response.url, response.body, self.login_user[0], self.login_pass[0]) print "IDs: " + self.loginid + ", " + self.passid print "login" metaobj = {'temp':url, 'login':self.loginRequired, 'form':"true", "params":args, 'method':method} yield FormRequest(url, method=method, meta=metaobj, formdata=args, callback=self.after_login) #time.sleep(5) return
def do_login(self, response): args, url, method = fill_login_form(response.url, response.body, USER, PASSWORD) return scrapy.FormRequest(url, method=method, formdata=args, callback=self.parse_dashboard)
def parse(self, response): self._logger.debug("crawled url {}".format(response.request.url)) self._increment_status_code_stat(response) if 'curdepth' in response.meta: cur_depth = response.meta['curdepth'] # capture raw response item = RawResponseItem() # populated from response.meta item['appid'] = response.meta['appid'] item['crawlid'] = response.meta['crawlid'] item['attrs'] = response.meta['attrs'] # populated from raw HTTP response item["url"] = response.request.url item["response_url"] = response.url item["status_code"] = response.status item["status_msg"] = "OK" item["response_headers"] = self.reconstruct_headers(response) item["request_headers"] = response.request.headers item["body"] = response.body item["links"] = [] if isinstance(response, (SplashResponse, SplashTextResponse)): if "png" in response.data: print " @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ " print " @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ " print " @@@@@@@@@@@@@@@@@@@@ image @@@@@@@@@@@@@@@@@@@@@@ " print " @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ " print " @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ " item["image"] = response.data['png'] # login # if response.url == 'http://fyqe73pativ7vdif.onion/login/': # if response.url == 'http://mt3plrzdiyqf6jim.onion/renewal/login.php': if response.url in response.meta['login'] and response.status == 200: _id = response.meta['login'][response.url]['loginid'] _pass = response.meta['login'][response.url]['password'] # print response.body # data, url, method = fill_login_form(response.url, response.body, 'w-_-w', '1234567890') # data, url, method = fill_login_form(response.url, response.body, '0x0', '1234567890') data, url, method = fill_login_form(response.url, response.body, _id, _pass) yield FormRequest(url, formdata=dict(data), method=method, callback=self.parse, meta=make_splash_meta(response.meta)) else: cur_depth = 0 # determine whether to continue spidering if response.meta['maxdepth'] != -1 and cur_depth >= response.meta['maxdepth']: self._logger.debug("Not spidering links in '{}' because" \ " cur_depth={} >= maxdepth={}".format( response.url, cur_depth, response.meta['maxdepth'])) else: # we are spidering -- yield Request for each discovered link link_extractor = LinkExtractor( deny_domains=response.meta['denied_domains'], allow_domains=response.meta['allowed_domains'], allow=response.meta['allow_regex'], deny=response.meta['deny_regex'], deny_extensions=response.meta['deny_extensions']) for link in link_extractor.extract_links(response): # link that was discovered item["links"].append({"url": link.url, "text": link.text, }) req = Request(link.url, callback=self.parse, meta=make_splash_meta({})) # pass along all known meta fields for key in response.meta.keys(): if key != 'splash' and key != 'request': req.meta[key] = response.meta[key] if '_splash_processed' in req.meta: req.meta.pop("_splash_processed") req.meta['priority'] = response.meta['priority'] - 10 req.meta['curdepth'] = response.meta['curdepth'] + 1 if 'useragent' in response.meta and \ response.meta['useragent'] is not None: req.headers['User-Agent'] = response.meta['useragent'] self._logger.debug("Trying to follow link '{}'".format(req.url)) yield req # raw response has been processed, yield to item pipeline yield item
pprint("reading output from phase1") with open('../output/phase1_output.json') as data_file: data = json.load(data_file) pprint("start processing phase3") client = requests.Session() start_urls = parameter.login_urls login_user = parameter.username login_pass = parameter.password login_flag = parameter.login response = client.get(start_urls[0],verify=False) if login_flag == False: loginResponse = client.get(start_urls[0],verify=False) else: args, url, method = fill_login_form(response.url, response.content, login_user, login_pass) loginResponse = client.post(url, data=args, headers=dict(Referer=start_urls)) pprint(loginResponse) jsonform = [] if "Invalid" in response.content: pprint("Login failed") else: pprint("login successful") for formDetails in data: url = formDetails["url"] action = formDetails["action"] if checkStringContainKey(action,negKeywords)==False:#check the Negative keywords to filter out non-sensitive data if formDetails["method"].lower() == "get":# form is a get form, it cannot csrfForm = Form(url,formDetails) valid_parameters = dict(csrfForm.fill_entries())