crawler = Crawler.Crawler(start,opener1) print "Scanning..." try: #for each url in queue while crawler.hasNext(): url = crawler.next() print url #display current url soup=crawler.process(root) if not soup: continue; # Process page's forms # i=0 for m in Form.getAllForms(soup): #take all forms wich have action attribute action = uri.buildAction(url,m['action']) if not action in actionDone and action!='': try: formResults=form.prepareFormInputs(m) #get inputs for form m r1 = request(url,action,formResults,opener1) #send request with user1 formResults=form.prepareFormInputs(m) #regenerate data r2 = request(url,action,formResults,opener2) #send same request with user 2 if(len(csrf)>0): if not re.search(csrf, r2): #We got a CSRF! try: print '=CSRF='+"\n\t"+'Name: '+m['name']+"\n\t"+'Action: '+m['action']+"\n\t"+'From: '+url+"\n" except KeyError: print '=CSRF='+"\n\t"+'Action: '+m['action']+"\n\t"+'From: '+url print "\t"+urllib.urlencode(formResults)