def get_media(self, media_id): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXMPAPI2, self).get_media(media_id) socket.socket = default_socket return r
def scan(): SOCKS_PROXY_HOST = '127.0.0.1' SOCKS_PROXY_PORT = 1234 HEADER = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.75 Safari/537.36'} ip_port = [22, 80, 443, 3389, 6379, 7001, 8080, 27017] default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, SOCKS_PROXY_HOST,SOCKS_PROXY_PORT) socket.socket = socks.socksocket while True: if queue.empty(): break reqIp = queue.get_nowait() for port in ip_port: reqUrl = 'http://'+str(reqIp)+':'+str(port) print 'scanning '+reqUrl try: res = requests.get(reqUrl,headers=HEADER,timeout=5) html = res.text except Exception,e: print 'get html error' continue if html: print 'ok' success_list.append(reqUrl)
def send_message(self, data): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXMPAPI2, self).send_message(data) socket.socket = default_socket return r
def get_user_by_openid(self, openid): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXMPAPI2, self).get_user_by_openid(openid) socket.socket = default_socket return r
def request_info(self, appid): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXOpenAPI2, self).request_info(appid) socket.socket = default_socket return r
def refresh_auth(self, auth_appid, refresh_token): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXOpenAPI2, self).refresh_auth(auth_appid, refresh_token) socket.socket = default_socket return r
def request_auth(self, authorization_code): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXOpenAPI2, self).request_auth(authorization_code) socket.socket = default_socket return r
def web_page(self, mode, string, public_mode): '''Test that the web page contains a string''' (socks_address, socks_port) = self.gui.app.onion.get_tor_socks_port() socks.set_default_proxy(socks.SOCKS5, socks_address, socks_port) s = socks.socksocket() s.settimeout(60) s.connect((self.gui.app.onion_host, 80)) if not public_mode: path = '/{}'.format(mode.server_status.web.slug) else: path = '/' http_request = 'GET {} HTTP/1.0\r\n'.format(path) http_request += 'Host: {}\r\n'.format(self.gui.app.onion_host) http_request += '\r\n' s.sendall(http_request.encode('utf-8')) with open('/tmp/webpage', 'wb') as file_to_write: while True: data = s.recv(1024) if not data: break file_to_write.write(data) file_to_write.close() f = open('/tmp/webpage') self.assertTrue(string in f.read()) f.close()
def proxy(self, proxy): """ Default proxy for all socket connections. Accepts either a string (hostname or IP address) for a SOCKS5 proxy on the default port, **or** a ``tuple`` passed to ``socks.set_default_proxy``, e.g. ``(socks.SOCKS4, 'localhost', 1234)``. >>> context.proxy = 'localhost' #doctest: +ELLIPSIS >>> r=remote('google.com', 80) Traceback (most recent call last): ... ProxyConnectionError: Error connecting to SOCKS5 proxy localhost:1080: [Errno 111] Connection refused >>> context.proxy = None >>> r=remote('google.com', 80, level='error') """ if not proxy: socket.socket = _original_socket return None if isinstance(proxy, str): proxy = (socks.SOCKS5, proxy) if not isinstance(proxy, collections.Iterable): raise AttributeError('proxy must be a string hostname, or tuple of arguments for socks.set_default_proxy') socks.set_default_proxy(*proxy) socket.socket = socks.socksocket return proxy
def request_token(self, ticket): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXOpenAPI2, self).request_token(ticket) socket.socket = default_socket return r
def proxy(self, proxy): """ Default proxy for all socket connections. Examples: >>> context.proxy = 'localhost' #doctest: +ELLIPSIS >>> r = remote('google.com', 80) Traceback (most recent call last): ... pwnlib.exception.PwnlibException: Could not connect to google.com on port 80 >>> context.proxy = None >>> r = remote('google.com', 80, level='error') """ if not proxy: socket.socket = _original_socket return None if isinstance(proxy, str): proxy = (socks.SOCKS5, proxy) if not isinstance(proxy, collections.Iterable): raise AttributeError('proxy must be a string hostname, or tuple of arguments for socks.set_default_proxy') socks.set_default_proxy(*proxy) socket.socket = socks.socksocket return proxy
def getRequestsResponse(self,url,timeout=5): hostname = self.hostname if self.protocol == "http": http_proxy = "http://" + hostname https_proxy = "https://" + hostname ftp_proxy = "ftp://" + hostname proxyDict = { "http" : http_proxy, "https" : https_proxy, "ftp" : ftp_proxy } return requests.get(url, timeout=timeout, proxies=proxyDict) elif self.protocol == "socks": default_socket = socket.socket ip, port = hostname.split(':') SOCKS5_PROXY_HOST = ip SOCKS5_PROXY_PORT = int(port) socks.set_default_proxy(socks.SOCKS5, SOCKS5_PROXY_HOST, SOCKS5_PROXY_PORT) socket.socket = socks.socksocket result = requests.get(url, timeout=timeout) #reset socket socket.socket = default_socket return result
def getSource(url, headers={}, proxy=None, maxRetrys=2, timeWait=2): if proxy: if PY_SOCKS: proxy = proxy.split(':') socks.set_default_proxy(socks.SOCKS4, proxy[0], int(proxy[1])) socket.socket = socks.socksocket else: print "\nCan't use proxy because pysocks is missing !?" socket.setdefaulttimeout(40) source = None while not source: try: request = urllib2.Request(url, headers=headers) respond = urllib2.urlopen(request) encoding = respond.headers.get('Content-Encoding') if not encoding: source = respond.read() elif encoding.lower() == 'gzip': pageBytes = io.BytesIO(respond.read()) gzibFile = gzip.GzipFile(fileobj=pageBytes) source = gzibFile.read() else: raise urllib2.URLError('unknown encoding !!') break except (urllib2.URLError, IOError): if maxRetrys == 0: raise urllib2.URLError("Can't connect !!") break else: time.sleep(random.uniform(0.5 * timeWait, 1.5 * timeWait)) maxRetrys -= 1 socket.setdefaulttimeout(None) return source
def __init__(self, master, worker_name, bot_name, server, access_token, port=6667, proxy=None): self.master = master self.worker_name = worker_name self.command = "!"+self.master.bot_name self.started = False self.is_connected = False if proxy is not None: logging.info('[%s] Proxy set: %s:%s', self.worker_name, proxy["address"], proxy["port"]) socks.set_default_proxy(socks.HTTP, proxy["address"], proxy["port"]) socket.socket = socks.socksocket SingleServerIRCBot.__init__(self, [(server, port, access_token)], bot_name, bot_name) # keep ip for logging self.proxy_name = socket.gethostbyname(socket.getfqdn()) # Channels set up self.channels = IRCDict() self.channel_join_queue = Queue.Queue() self.channel_list = [] # Messages set up self.user_message_queue = Queue.Queue() self.log('Chat worker bot initialized.')
def check(args): if args.username is not None: if args.users: error("Contradicting Args", "Please use --users in combination with -s.") if args.verified: error("Contradicting Args", "Please use --verified in combination with -s.") if args.userid: error("Contradicting Args", "--userid and -u cannot be used together.") if args.tweets and args.users: error("Contradicting Args", "--users and --tweets cannot be used together.") if args.csv and args.output is None: error("Error", "Please specify an output file (Example: -o file.csv).") if args.proxy_host is not None: if args.proxy_host.lower() == "tor": import socks, socket socks.set_default_proxy(socks.SOCKS5, "localhost", 9050) socket.socket = socks.socksocket elif args.proxy_port and args.proxy_type: if args.proxy_type.lower() == "socks5": _type = socks.SOCKS5 elif args.proxy_type.lower() == "socks4": _type = socks.SOCKS4 elif args.proxy_type.lower() == "http": _type = socks.HTTP else: error("Error", "Proxy type allower are: socks5, socks4 and http.") import socks, socket socks.set_default_proxy(_type, args.proxy_host, int(args.proxy_port)) socket.socket = socks.socksocket else: error("Error", "Please specify --proxy-host, --proxy-port and --proxy-type") else: if args.proxy_port or args.proxy_type: error("Error", "Please specify --proxy-host, --proxy-port and --proxy-type")
def global_override_HTTP_test(): socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8081) good = socket.socket socket.socket = socks.socksocket status = urllib2.urlopen("http://ifconfig.me/ip").getcode() socket.socket = good assert status == 200
def get_soup_html(url): socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket #伪装一个IE6.0浏览器访问,如果不伪装,谷歌将返回一个403错误 browser={'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201'} # proxy_support = urllib2.ProxyHandler({'http':'http://212.98.137.34:8080'}) # opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler) # urllib2.install_opener(opener) # content = urllib2.urlopen("http://www.baidu.com").read() # print(content) req = urllib2.Request(url=url,headers=browser) #向谷歌翻译发送请求 count_number=100 while count_number: try: # print 1 response = urllib2.urlopen(req,timeout=3) # print 2 html=response.read() soup = BeautifulSoup(html,"html5lib") print(html) print(soup.prettify()) # print('-------------------') count_number=0 return soup except: count_number-=1 print(url,'download error') return BeautifulSoup('',"html5lib")
def main(): args = parse_argument() try: if args.socks5[0] and args.socks5[1]: if args.proxy: logger.error('invalid proxy protocol count.') raise SystemExit socks.set_default_proxy(socks.SOCKS5, args.socks5[0], int(args.socks5[1]), True, args.socks5[2], args.socks5[3]) socket.socket = socks.socksocket except Exception as e: logger.error('invalid socks5 proxy arguments.') raise SystemExit t = Thread(args.board, args.thread) if not args.downloading: thread_info = t.thread_info() logger.info('/{}/ - {} - {}'.format(args.board, thread_info['sub'], const.BOARDS[args.board])) logger.info('total images - {}'.format(thread_info['images'])) else: downloader = Downloader(path=args.path, threads=args.threads, timeout=args.timeout, is_thumb=args.thumb) q = t.detail_queue() downloader.fetch(q)
def getProxy(): #socks5访问pachong.org socks.set_default_proxy(socks.SOCKS5,'127.0.0.1',1080) socket.socket = socks.socksocket r = requesocks.get(proxyUrl) html = r.text.encode('utf-8') #匹配 网页定义的js声明 reg_script_head = '<script type.*?>(.*?)</script>' pattern_script_head = re.compile(reg_script_head,re.S) result_of_script_head = re.findall(pattern_script_head,html) #匹配ip端口 reg_port = '<td><script>(.*?)</script>' pattern_port = re.compile(reg_port,re.S) result_of_port = re.findall(pattern_port,html) #匹配ip地址 reg_ip = '<td>([0-9]+(?:\.[0-9]+){0,3})</td>' pattern_ip = re.compile(reg_ip,re.S) result_of_ip = re.findall(pattern_ip,html) for i,item in enumerate(result_of_ip): jsevalPort = result_of_script_head[2] + result_of_port[i] js = ''' function add(){ %s } add()''' % jsevalPort.replace('document.write','return') result = js2py.eval_js(js) ip_port[item] = result
def get_coordinates(query, from_sensor=False): if isinstance(query,unicode): query=query.encode("utf-8") # query = query.encode('utf-8') socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket params = { 'address': query, 'sensor': "true" if from_sensor else "false" } url = googleGeocodeUrl + urllib.urlencode(params) count_number=5 while count_number: try: json_response = urllib.urlopen(url) response = json.loads(json_response.read()) count_number=0 except: count_number-=1 if response['results']: location = response['results'][0]['geometry']['location'] latitude, longitude = location['lat'], location['lng'] print query, latitude, longitude else: latitude, longitude = None, None print query, "<no results>" return latitude, longitude
def request_pre_auth_code(self): default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080) socket.socket = socks.socksocket r = super(WXOpenAPI2, self).request_pre_auth_code() socket.socket = default_socket return r
def global_override_HTTP_test(): socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8080) good = socket.socket socket.socket = socks.socksocket status = urllib2.urlopen("http://api.externalip.net/ip/").getcode() socket.socket = good assert status == 200
def global_override_SOCKS5_test(): default_proxy = (socks.SOCKS5, "127.0.0.1", 1081) socks.set_default_proxy(*default_proxy) good = socket.socket socket.socket = socks.socksocket status = urllib2.urlopen("http://api.externalip.net/ip/").getcode() socket.socket = good assert status == 200 assert socks.get_default_proxy()[1].decode() == default_proxy[1]
def __enter__(self): # Save original methods self._socket = socket.socket self._getaddrinfo = socket.getaddrinfo # Configure socks proxy socks.set_default_proxy(socks.SOCKS5, self.host, self.port) socket.socket = socks.socksocket # Patch domain lookup socket.getaddrinfo = _getaddrinfo
def set_proxy(host, port): import socks import socket socket.socket = socks.socksocket socks.set_default_proxy( proxy_type=socks.PROXY_TYPE_HTTP, addr=host, port=port if isinstance(port, int) else int(port) )
def __init__(self): # 使用 google.co.jp 谷歌日本的搜索引擎 self.url = 'http://www.google.com' self.url0 = 'http://ip.cn' self.keyword = 'Anka9080' self.spider() # 设置 sock5 代理 socks.set_default_proxy(socks.SOCKS5,"127.0.0.1",1080) socket.socket = socks.socksocket print len(requests.get(url = self.url, headers = HEADERS).text)
def reset_or_initial_ip_address(reset=False): import os import socket import socks if reset: os.system("""(echo authenticate '"mypassword"'; echo signal newnym; echo quit) | nc localhost 9051""") else: pass socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 9050) socket.socket = socks.socksocket
def connect_tor(): # connect TOR socks.set_default_proxy(socks.SOCKS5, "localhost", 9150) socket.socket = socks.socksocket def getaddrinfo(*args): return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] socket.getaddrinfo = getaddrinfo
def init_tor(header_type): try: rotate_ip() imitate_user() socks.set_default_proxy(socks.SOCKS5, "localhost", 9050) socket.socket = socks.socksocket r = Request('http://icanhazip.com', headers=define_headers(header_type)) test = urlopen(r).read() print("Tor network accessed and using ip: %s" % test.rstrip()) # check ip except: print("There was an error using the TOR network on localhost:9050")
def set_socks_default(): #TOR SETUP GLOBAL Vars SOCKS_PORT = 9050 # TOR proxy port that is default from torrc, change to whatever torrc is configured to socks.set_default_proxy(socks.SOCKS5, "127.0.0.1",SOCKS_PORT) socket.socket = socks.socksocket # Perform DNS resolution through the socket def getaddrinfo(*args): return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] socket.getaddrinfo = getaddrinfo return "success"
args = parser.parse_args() hosts = set(args.hosts) if args.debug: h = logging.StreamHandler() h.setFormatter( logging.Formatter( '[%(levelname)s] %(filename)s:%(lineno)s %(message)s')) for n in [__name__, 'plugins', 'lib']: l = logging.getLogger(n) l.setLevel(logging.DEBUG) l.addHandler(h) if args.proxy: proxy_host, proxy_port = args.proxy.split(':') socks.set_default_proxy(socks.SOCKS5, proxy_host, int(proxy_port)) socket.socket = socks.socksocket dns.query.socket_factory = socks.socksocket if args.nmap: scan = ET.parse(args.nmap).getroot() if not scan.tag == 'nmaprun': print('File is not nmap xml: ' + args.nmap) sys.exit() for host in scan.findall('./host'): ports = [ int(p.get('portid')) for p in host.findall('./ports/port') if p.find('state').get('state') == 'open' ] if args.smb_port in ports: hosts.add([
if sserver.createSocket(port) == False: Log.error(u'----已经有一个实例在实行') sys.exit(-1) # 判断系统类型 if 'nt' in os.name: OSTYPE = 'windows' elif 'posix' in os.name: OSTYPE = 'linux' else: Log.info('不识别的系统类型') sys.exit(-1) # 设置代理信息,如果服务器无法连接外网需要设置代理连接 if 'ON' in use_proxy: socks.set_default_proxy(socks.SOCKS5, PROXY_HOST, PROXY_PORT, True, PROXY_USER, PROXY_PAWD) socket.socket = socks.socksocket # 获取开机线程的个数,开启线程池。获取订单对时间和编号进行并行操作 threadNum = inCfg['TOOL']['thread'] pool = threadpool.ThreadPool(int(threadNum)) # 手动执行跟2个参数,卫星明和时间段 if len(args) == 2: Log.info(u'手动运行订购程序 -----------------------------') satID = args[0] # 卫星全名 str_time = args[1] # 程序执行时间范围 # 进行时间的解析,由YYYYMMDD-YYYYMMDD 转为datetime类型的开始时间和结束时间 date_s, date_e = pb_time.arg_str2date(str_time)
def process_queue(q, quiet=False): if not quiet: print("Processing: %s" % q) if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port: try: import socks except ImportError: raise ImportError( "Queue has been configured with proxy settings, but no socks library was installed. Try to install PySocks via pypi." ) proxy_type = { 'socks4': socks.SOCKS4, 'socks5': socks.SOCKS5, }.get(q.socks_proxy_type) socks.set_default_proxy(proxy_type=proxy_type, addr=q.socks_proxy_host, port=q.socks_proxy_port) socket.socket = socks.socksocket else: socket.socket = socket._socketobject email_box_type = settings.QUEUE_EMAIL_BOX_TYPE if settings.QUEUE_EMAIL_BOX_TYPE else q.email_box_type if email_box_type == 'pop3': if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL: if not q.email_box_port: q.email_box_port = 995 server = poplib.POP3_SSL( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) else: if not q.email_box_port: q.email_box_port = 110 server = poplib.POP3( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) server.getwelcome() server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER) server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD) messagesInfo = server.list()[1] for msg in messagesInfo: msgNum = msg.split(" ")[0] msgSize = msg.split(" ")[1] full_message = "\n".join(server.retr(msgNum)[1]) ticket = ticket_from_message(message=full_message, queue=q, quiet=quiet) if ticket: server.dele(msgNum) server.quit() elif email_box_type == 'imap': if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL: if not q.email_box_port: q.email_box_port = 993 server = imaplib.IMAP4_SSL( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) else: if not q.email_box_port: q.email_box_port = 143 server = imaplib.IMAP4( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) server.login(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER, q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD) server.select(q.email_box_imap_folder) status, data = server.search(None, 'NOT', 'DELETED') if data: msgnums = data[0].split() for num in msgnums: status, data = server.fetch(num, '(RFC822)') ticket = ticket_from_message(message=data[0][1], queue=q, quiet=quiet) if ticket: server.store(num, '+FLAGS', '\\Deleted') server.expunge() server.close() server.logout()
def discover_by_port(host, port, timeout, send_data, socks_proxy, external_run=False): """ request a port to scan and check for existing signatures to discover the service Args: host: host to scan port: port to scan timeout: timeout second send_data: data to send to port socks_proxy: socks proxy external_run: if you run this from other module or not calling it from discovery function, you must set external_run as True Returns: discovered services and ports in JSON dict """ ssl_flag = False if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(timeout) sock.connect((host, port)) except Exception as _: return None try: sock = ssl.wrap_socket(sock) ssl_flag = True except Exception as _: # No SSL Support for Service try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(timeout) sock.connect((host, port)) except Exception: return None data1 = recv_all(sock) try: sock.send(send_data) except Exception as _: pass final_data = recv_all( sock) + data1 # print( "PORT : " + str(port) +final_data) for service in ports_services_and_condition: FLAG = True c = 0 for signature in ports_services_and_condition[service]: if isinstance(signature, list): OFLAG = True for s in ports_services_and_condition[service][c]: if s in final_data: OFLAG = False if OFLAG: FLAG = False else: if signature not in final_data: FLAG = False if FLAG: if ssl_flag: result_dict[port] = service + "/ssl" else: result_dict[port] = service c += 1 for service in ports_services_or_condition: FLAG = False c = 0 for signature in ports_services_or_condition[service]: if isinstance(signature, list): OFLAG = True for s in ports_services_or_condition[service][c]: if s not in final_data: OFLAG = False if OFLAG: FLAG = True else: if signature in final_data: FLAG = True if FLAG: if ssl_flag: result_dict[port] = service + "/ssl" else: result_dict[port] = service c += 1 for service in ports_services_regex: for signature in ports_services_regex[service]: try: pattern = re.compile(signature) if pattern.match(final_data): if ssl_flag: result_dict[port] = service + "/ssl" else: result_dict[port] = service except Exception as _: pass try: result_dict[port] except Exception as _: result_dict[port] = "UNKNOWN" if external_run and port not in external_run_values: external_run_values.append(port) return result_dict[port]
def process_request(self, request, spider): # Called for each request that goes through the downloader # middleware. # Must either: # - return None: continue processing this request # - or return a Response object # - or return a Request object # - or raise IgnoreRequest: process_exception() methods of # installed downloader middleware will be called # cookies = self.read_cookies() # cookie_jar = cookies # # print("cookie_jar", cookie_jar) # request.cookies = cookie_jar ua = UserAgent() USER_AGENT = ua.chrome # 任意头文件 # print(USER_AGENT) request.headers['User-Agent'] = USER_AGENT '''使用socket5 作为代理''' socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 9150) # 8123是在浏览器中设置的, 9150 socket.socket = socks.socksocket #使用Tor代理,只能保存为csv文件 ip = requests.get("http://checkip.amazonaws.com").text print("使用IP:", ip) # 查看包装的IP是 61.238.105.146 本地 # proxy='127.0.0.1:9150' # proxies={ # 'http:':'socks5://'+proxy, # 'https:':'socks5://'+proxy, # # } # request.meta["proxy"] = proxies # socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 9150) # 8123是在浏览器中设置的, 9150 # socket.socket = socks.socksocket #从selenium中手动改邮编之后复制所得,直接复制的也可以 # Cookie_Copy = 'session-id=355-6614492-8783445; i18n-prefs=JPY; ubid-acbjp=358-1525158-7840910; x-wl-uid=11Z8nB/Fz6E8zJUCKjQ423L8B/IaHm3Xh2Pw2guxNW5/wthdeyUf1Qa5xXpYGOxxB1XUNAhp2lV0=; cdn-session=AK-91120dacb76bac627037851815990142; skin=noskin; session-token=GtxLMDrlp1disK4TJayvmAd+8k41b33X+G8okC+UCaFbZm7O9YNew+citRQ8M5u0isrxLKQMf7gTxA2BoYroQJUySB25Pec0i/kBryZTBhMTzdvTU22Npz7uRS2GALyVhkZZ+3VbGH1zdfNHM79jtV8Ertz/1xRs5hD1CGkpMklOH5oXbTMrlbWjaaU2MEyi; x-amz-captcha-1=1586602191538115; x-amz-captcha-2=BMl9V5WnZDQ0cdOVxGEmPQ==; session-id-time=2082726001l; csm-hit=tb:7Y2KVD4W2ESV5WJHWRGP+s-EK0GQRPS7HGR63FGQV51|1586596026704&t:1586596026704&adb:adblk_no' # cookie_dict = {i.split("=")[0]: i.split("=")[1] for i in Cookie_Copy.split("; ")} # request.cookies = cookie_dict request.cookies = { 'session-id': '355-6614492-8783445', 'session-id-time': '2082726001l', } logging.debug('Using headers:%s' % request) '''增加阿布云代理''' # request.meta["proxy"] = proxyServer # print("正常增加IP", proxyAuth) # request.headers["Proxy-Authorization"] = proxyAuth # logging.debug('Using Proxy:%s' % proxyServer) '''使用芝麻代理''' # ip_dict=crawl_xdaili() # proxies = { # 'http': 'http://' + ip_dict['ip']+':'+ip_dict['port'], # 'https': 'https://' + ip_dict['ip']+':'+ip_dict['port'] # } # print(proxies) # proxies='https://' +'58.218.92.150:5238' # request.meta["proxy"] = proxies print('request.header', request.headers) return None
# pip3 install PySocks import requests import socks import socket socks.set_default_proxy(socks.SOCKS5, '127.0.0.1', 9742) socket.socket = socks.socksocket try: response = requests.get('http://httpbin.org/get') print(response.text) except requests.exceptions.ConnectionError as e: print('Error', e.args)
def login(user, passwd, target, port, timeout_sec, log_in_file, language, retries, time_sleep, thread_tmp_filename, socks_proxy, scan_id, scan_cmd): exit = 0 if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo while 1: try: if timeout_sec is not None: my_ftp = FTP(timeout=timeout_sec) else: my_ftp = FTP() my_ftp.connect(target, port) exit = 0 break except: exit += 1 if exit == retries: warn( messages(language, "ftp_connection_timeout").format( target, port, user, passwd)) return 1 time.sleep(time_sleep) flag = 1 try: my_ftp.login(user, passwd) flag = 0 except Exception: pass if flag == 0: try: tmpl = [] tmp = my_ftp.retrlines('LIST', tmpl.append) info( messages(language, "user_pass_found").format(user, passwd, target, port)) data = json.dumps( { 'HOST': target, 'USERNAME': user, 'PASSWORD': passwd, 'PORT': port, 'TYPE': 'ftp_brute', 'DESCRIPTION': messages(language, "login_successful"), 'TIME': now(), 'CATEGORY': "brute", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + "\n" __log_into_file(log_in_file, 'a', data, language) except: info( messages(language, "user_pass_found").format( user, passwd, target, port) + ' ' + messages(language, "file_listing_error")) data = json.dumps( { 'HOST': target, 'USERNAME': user, 'PASSWORD': passwd, 'PORT': port, 'TYPE': 'FTP', 'DESCRIPTION': messages(language, "login_list_error"), 'TIME': now(), 'CATEGORY': "brute", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + "\n" __log_into_file(log_in_file, 'a', data, language) __log_into_file(thread_tmp_filename, 'w', '0', language) else: pass return flag
def set_proxy(port): import socks global socket socks.set_default_proxy(socks.SOCKS5, "localhost", port) socket.socket = socks.socksocket
def handle_common_options(args, parser): args.parser = parser args.verbosity = args.verbose - args.quiet if args.cache_path is not None: args.cache_path = os.path.normpath(os.path.expanduser(args.cache_path)) args.cache = otsclient.cache.TimestampCache(args.cache_path) if args.whitelist is not None: if not args.whitelist: args.whitelist = DEFAULT_WHITELIST whitelist = opentimestamps.calendar.UrlWhitelist() for url in args.whitelist: whitelist.add(url) args.whitelist = whitelist if args.socks5_proxy is not None: try: import socks except ImportError as exp: logging.error("Can not use SOCKS5 proxy: %s" % exp) sys.exit(1) e = args.socks5_proxy.split(':') s5_hostname = e[0] if len(e) > 1: if e[1].isdigit(): s5_port = int(e[1]) else: args.parser.error("SOCKS5 proxy port must be an integer; got %s" % e[1]) else: s5_port = 1080 socks.set_default_proxy(socks.SOCKS5, s5_hostname, s5_port) # Monkey patch socket to use SOCKS5 proxy socket.socket = socks.socksocket # This should prevent DNS leaks def create_connection(address, timeout=None, source_address=None): sock = socks.socksocket() sock.connect(address) return sock socket.create_connection = create_connection def setup_bitcoin(): """Setup Bitcoin-related functionality Sets mainnet/testnet and returns a RPC proxy. """ if args.btc_net == 'testnet': bitcoin.SelectParams('testnet') elif args.btc_net == 'regtest': bitcoin.SelectParams('regtest') elif args.btc_net == 'mainnet': bitcoin.SelectParams('mainnet') else: assert False try: return bitcoin.rpc.Proxy() except Exception as exp: logging.error("Could not connect to local Bitcoin node: %s" % exp) sys.exit(1) args.setup_bitcoin = setup_bitcoin return args
def changeIp(controller): controller.signal(Signal.NEWNYM) socks.set_default_proxy(socks.SOCKS5, "localhost", 9150) socks.socket = socks.socksocket
#!/usr/bin/env python # -*- coding: utf-8 -*- """ @File : SeverTor.py @Author: Xinzhe.Pang @Date : 2019/7/15 23:14 @Desc : """ import socket import socks import requests # Tor使用9150端口为默认的socks端口 socks.set_default_proxy(socks.SOCKS4, "127.0.0.1", 9150) socket.socket = socks.socksocket # 获取这次抓取使用的IP地址 a = requests.get("http://checkip.amazonaws.com").text print(a)
def start(target, users, passwds, ports, timeout_sec, thread_number, num, total, log_in_file, time_sleep, language, verbose_level, show_version, check_update, socks_proxy, retries, ping_flag, methods_args, scan_id, scan_cmd): # Main function from core.targets import target_type from core.targets import target_to_host if target_type(target) != 'SINGLE_IPv4' or target_type( target) != 'DOMAIN' or target_type(target) != 'HTTP': # requirements check new_extra_requirements = extra_requirements_dict() if methods_args is not None: for extra_requirement in extra_requirements_dict(): if extra_requirement in methods_args: new_extra_requirements[extra_requirement] = methods_args[ extra_requirement] extra_requirements = new_extra_requirements if target_type(target) == 'HTTP': target = target_to_host(target) if ping_flag: if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo warn(messages(language, 100).format(target, 'heartbleed_vuln')) if do_one_ping(target, timeout_sec, 8) is None: return None subs = __get_subs(target, timeout_sec, log_in_file, time_sleep, language, verbose_level, socks_proxy, retries, num, total, extra_requirements=extra_requirements) info( messages(language, 135).format(len(subs), ', '.join(subs) if len(subs) > 0 else 'None')) if len(subs) is not 0: save = open(log_in_file, 'a') save.write( json.dumps({ 'HOST': target, 'USERNAME': '', 'PASSWORD': '', 'PORT': '', 'TYPE': 'subdomain_scan', 'DESCRIPTION': messages(language, 135).format( len(subs), ', '.join(subs) if len(subs) > 0 else 'None'), 'TIME': now(), 'CATEGORY': "scan", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + '\n') save.close() if len(subs) is 0 and verbose_level is not 0: save = open(log_in_file, 'a') save.write( json.dumps({ 'HOST': target, 'USERNAME': '', 'PASSWORD': '', 'PORT': '', 'TYPE': 'subdomain_scan', 'DESCRIPTION': messages(language, 135).format( len(subs), ', '.join(subs) if len(subs) > 0 else 'None'), 'TIME': now(), 'CATEGORY': "scan", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + '\n') save.close() return subs else: warn(messages(language, 69).format('subdomain_scan', target)) return []
def rutorLinks(filmID): print("Загрузка торрент-ссылок для filmID " + filmID + ".") if SOCKS_IP: default_socket = socket.socket socks.set_default_proxy(socks.SOCKS5, SOCKS_IP, SOCKS_PORT) socket.socket = socks.socksocket request = urllib.request.Request(RUTOR_BASE_URL + filmID) request.add_header("Accept-encoding", "gzip") request.add_header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:65.0) Gecko/20100101 Firefox/65.0") try: response = urllib.request.urlopen(request) except Exception: print("Ошибка соединения при загрузке торрент-ссылок для filmID " + filmID + ". Даём второй шанс.") response = urllib.request.urlopen(request) if response.info().get('Content-Encoding') == 'gzip': gzipFile = gzip.GzipFile(fileobj=response) content = gzipFile.read().decode("utf-8") else: content = response.read().decode("utf-8") if SOCKS_IP: socket.socket = default_socket strIndex = content.find("<div id=\"index\">") if strIndex != -1: content = content[strIndex:] else: raise IndexError("Ошибка загрузки торрент-ссылок для filmID " + filmID + ". Не найден блок с торрентами. Возможно, сайт rutor заблокирован.") strIndex = content.find("</div>") if strIndex != -1: content = content[:-(len(content) - strIndex)] else: raise IndexError("Ошибка загрузки торрент-ссылок для filmID " + filmID + ". Не найден блок с торрентами. Возможно, сайт rutor заблокирован.") patternLink = re.compile("<a class=\"downgif\" href=\"(.*?)\">") matches1 = re.findall(patternLink, content) patternName = re.compile("<a href=\"/torrent/(.*?)\">(.*?)</a>") matches2 = re.findall(patternName, content) patternSeeders = re.compile("alt=\"S\" />(.*?)</span>") matches3 = re.findall(patternSeeders, content) patternMagnet = re.compile("<a href=\"magnet:(.*?)\">") matches4 = re.findall(patternMagnet, content) patternDate = re.compile("<td>(.*?)</td><td ") #09 Мар 19 matches5 = re.findall(patternDate, content) #print(str(len(matches1)) + " " + str(len(matches2)) + " " +str(len(matches3)) + " " + str(len(matches4)) + " " + str(len(matches5))) if (len(matches1) != len(matches2)) or (len(matches1) != len(matches3)) or (len(matches1) != len(matches4)) or (len(matches1) != len(matches4)) or (len(matches1) != len(matches5)): raise IndexError("Ошибка загрузки торрент-ссылок для filmID " + filmID + ". Неверный формат блока с торрентами.") months = {"Янв": 1, "Фев": 2, "Мар": 3, "Апр": 4, "Май": 5, "Июн": 6, "Июл": 7, "Авг": 8, "Сен": 9, "Окт": 10, "Ноя": 11, "Дек": 12} allTorrents = [] for i in range(len(matches1)): link = matches1[i].strip() if not link.startswith("http"): link = urljoin("http://rutor.info", link) dateStr = matches5[i].strip() components = dateStr.split(" ") if (len(components) != 3): raise ValueError("Ошибка загрузки торрент-ссылок для filmID " + filmID + ". Неверный формат даты.") torrentDate = datetime.date((int(components[2]) + 2000) if int(components[2]) < 2000 else int(components[2]), months[components[1]], int(components[0])) #print(torrentDate) tmpDict = {"link": link, "name": html.unescape(matches2[i][1]).strip(), "seeders": int(html.unescape(matches3[i]).strip()), "magnet": "magnet:" + (matches4[i]).strip(), "date": torrentDate} allTorrents.append(tmpDict) result = {} for item in allTorrents: tmpParts = item["name"].split("|") if len(tmpParts) == 1: continue realName = tmpParts[0].strip().upper() tags = set() for i in range(1, len(tmpParts)): tmpParts2 = tmpParts[i].split(",") for tmpPart in tmpParts2: tags.add(tmpPart.strip().upper()) if ("LINE" in tags) or ("UKR" in tags) or ("3D-VIDEO" in tags) or ("60 FPS" in tags): continue if not (("ЛИЦЕНЗИЯ" in tags) or ("ITUNES" in tags) or ("D" in tags) or ("D2" in tags)): continue if "BDRIP-HEVC 1080" in realName: if result.get("BDRip-HEVC 1080p") != None: if item["seeders"] > result["BDRip-HEVC 1080p"]["seeders"]: result["BDRip-HEVC 1080p"] = item else: result["BDRip-HEVC 1080p"] = item #print("!BDRip-HEVC 1080p: " + tmpParts[0]) elif "BDRIP 1080" in realName: if result.get("BDRip 1080p") != None: if item["seeders"] > result["BDRip 1080p"]["seeders"]: result["BDRip 1080p"] = item else: result["BDRip 1080p"] = item #print("!BDRip 1080p: " + tmpParts[0]) elif "WEB-DL 1080" in realName: if result.get("WEB-DL 1080p") != None: if item["seeders"] > result["WEB-DL 1080p"]["seeders"]: result["WEB-DL 1080p"] = item else: result["WEB-DL 1080p"] = item #print("!WEB-DL 1080p: " + tmpParts[0]) elif "BDRIP-HEVC 720" in realName: if result.get("BDRip-HEVC 720p") != None: if item["seeders"] > result["BDRip-HEVC 720p"]["seeders"]: result["BDRip-HEVC 720p"] = item else: result["BDRip-HEVC 720p"] = item #print("!BDRip-HEVC 720p: " + tmpParts[0]) elif "BDRIP 720" in realName: if result.get("BDRip 720p") != None: if item["seeders"] > result["BDRip 720p"]["seeders"]: result["BDRip 720p"] = item else: result["BDRip 720p"] = item #print("!BDRip 720p: " + tmpParts[0]) elif "WEB-DL 720" in realName: if result.get("WEB-DL 720p") != None: if item["seeders"] > result["WEB-DL 720p"]["seeders"]: result["WEB-DL 720p"] = item else: result["WEB-DL 720p"] = item #print("!WEB-DL 720p: " + tmpParts[0]) # if result.get("UHD BDRemux HDR") or result.get("UHD BDRemux SDR") or result.get("BDRip-HEVC 1080p") or result.get("BDRip 1080p"): # result.pop("WEB-DL 2160p HDR", None) # result.pop("WEB-DL 720p", None) # result.pop("WEB-DL 1080p", None) finalResult = [] if result.get("WEB-DL 1080p"): finalResult.append({"link": result["WEB-DL 1080p"]["link"], "magnet": result["WEB-DL 1080p"]["magnet"], "date": result["WEB-DL 1080p"]["date"], "type": "WEB-DL 1080p"}) if result.get("WEB-DL 720p"): finalResult.append({"link": result["WEB-DL 720p"]["link"], "magnet": result["WEB-DL 720p"]["magnet"], "date": result["WEB-DL 720p"]["date"], "type": "WEB-DL 720p"}) if result.get("BDRip 1080p"): finalResult.append({"link": result["BDRip 1080p"]["link"], "magnet": result["BDRip 1080p"]["magnet"], "date": result["BDRip 1080p"]["date"], "type": "BDRip 1080p"}) if result.get("BDRip-HEVC 1080p"): finalResult.append({"link": result["BDRip-HEVC 1080p"]["link"], "magnet": result["BDRip-HEVC 1080p"]["magnet"], "date": result["BDRip-HEVC 1080p"]["date"], "type": "BDRip-HEVC 1080p"}) if result.get("BDRip 720p"): finalResult.append({"link": result["BDRip 720p"]["link"], "magnet": result["BDRip 720p"]["magnet"], "date": result["BDRip 720p"]["date"], "type": "BDRip 720p"}) if result.get("BDRip-HEVC 720p"): finalResult.append({"link": result["BDRip-HEVC 720p"]["link"], "magnet": result["BDRip-HEVC 720p"]["magnet"], "date": result["BDRip-HEVC 720p"]["date"], "type": "BDRip-HEVC 720p"}) #print(finalResult) return finalResult
def start(target, users, passwds, ports, timeout_sec, thread_number, num, total, log_in_file, time_sleep, language, verbose_level, show_version, check_update, socks_proxy, retries, ping_flag, methods_args, scan_id, scan_cmd): # Main function if target_type(target) != 'SINGLE_IPv4' or target_type( target) != 'DOMAIN' or target_type(target) != 'HTTP': # requirements check new_extra_requirements = extra_requirements_dict() if methods_args is not None: for extra_requirement in extra_requirements_dict(): if extra_requirement in methods_args: new_extra_requirements[extra_requirement] = methods_args[ extra_requirement] extra_requirements = new_extra_requirements if users is None: users = extra_requirements["smtp_brute_users"] if passwds is None: passwds = extra_requirements["smtp_brute_passwds"] if ports is None: ports = extra_requirements["smtp_brute_ports"] if extra_requirements["smtp_brute_split_user_set_pass"][0] not in [ "False", "True" ]: extra_requirements["smtp_brute_split_user_set_pass"][0] = "False" if target_type(target) == 'HTTP': target = target_to_host(target) if ping_flag: if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo warn(messages(language, 100).format(target, 'heartbleed_vuln')) if do_one_ping(target, timeout_sec, 8) is None: return None threads = [] max = thread_number total_req = int( len(users) * len(passwds) * len(ports) * len(extra_requirements["smtp_brute_split_user_set_pass_prefix"])) \ if extra_requirements["smtp_brute_split_user_set_pass"][0] == "False" \ else int(len(users) * len(ports) * len(extra_requirements["smtp_brute_split_user_set_pass_prefix"])) thread_tmp_filename = 'tmp/thread_tmp_' + ''.join( random.choice(string.ascii_letters + string.digits) for _ in range(20)) ports_tmp_filename = 'tmp/ports_tmp_' + ''.join( random.choice(string.ascii_letters + string.digits) for _ in range(20)) thread_write = open(thread_tmp_filename, 'w') thread_write.write('1') thread_write.close() ports_write = open(ports_tmp_filename, 'w') ports_write.write('') ports_write.close() ports = test_ports(ports, timeout_sec, target, retries, language, num, total, time_sleep, ports_tmp_filename, thread_number, total_req, verbose_level, socks_proxy) trying = 0 if extra_requirements["smtp_brute_split_user_set_pass"][0] == "False": for port in ports: for user in users: for passwd in passwds: t = threading.Thread( target=login, args=(user, passwd, target, port, timeout_sec, log_in_file, language, retries, time_sleep, thread_tmp_filename, socks_proxy, scan_id, scan_cmd)) threads.append(t) t.start() trying += 1 if verbose_level is not 0: info( messages(language, 72).format(trying, total_req, num, total, target, port, 'smtp_brute')) while 1: n = 0 for thread in threads: if thread.isAlive(): n += 1 else: threads.remove(thread) if n >= max: time.sleep(0.01) else: break else: for port in ports: for user in users: for prefix in extra_requirements[ "smtp_brute_split_user_set_pass_prefix"]: t = threading.Thread( target=login, args=(user, user.rsplit('@')[0] + prefix, target, port, timeout_sec, log_in_file, language, retries, time_sleep, thread_tmp_filename)) threads.append(t) t.start() trying += 1 if verbose_level is not 0: info( messages(language, 72).format(trying, total_req, num, total, target, port, 'smtp_brute')) while 1: try: if threading.activeCount() >= max: time.sleep(0.01) else: break except KeyboardInterrupt: break break # wait for threads kill_switch = 0 kill_time = int(timeout_sec / 0.1) if int(timeout_sec / 0.1) is not 0 else 1 while 1: time.sleep(0.1) kill_switch += 1 try: if threading.activeCount() is 1 or kill_switch is kill_time: break except KeyboardInterrupt: break thread_write = int(open(thread_tmp_filename).read().rsplit()[0]) if thread_write is 1 and verbose_level is not 0: save = open(log_in_file, 'a') save.write( json.dumps({ 'HOST': target, 'USERNAME': '', 'PASSWORD': '', 'PORT': '', 'TYPE': 'smtp_brute', 'DESCRIPTION': messages(language, 95), 'TIME': now(), 'CATEGORY': "brute", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + '\n') save.close() os.remove(thread_tmp_filename) else: warn(messages(language, 69).format(target))
else: sys.stderr.write("\x1b[2J\x1b[H") def println(s, file=sys.stderr): assert type(s) is type(u'') file.write(s.encode(sys.getfilesystemencoding(), 'replace') + os.linesep) try: socket.create_connection(('127.0.0.1', 1080), timeout=0.5).close() sys.path.append('PySocks') import socks if os.name == 'nt': import win_inet_pton socks.set_default_proxy(socks.SOCKS5, '127.0.0.1', port=1080) socket.socket = socks.socksocket println(u'使用 SOCKS5 代理:127.0.0.1:1080') except socket.error: try: socket.create_connection(('127.0.0.1', 8087), timeout=0.5).close() os.environ['HTTP_PROXY'] = 'http://127.0.0.1:8087' os.environ['HTTPS_PROXY'] = 'http://127.0.0.1:8087' println(u'使用 HTTP 代理:127.0.0.1:8087') except socket.error: println(u'''\ 警告:检测到本机没有在指定端口监听的 HTTP 代理 (8087) 或 SOCKS5 代理 (1080), 建议先启动 GoProxy 客户端或者其它代理,并根据代理类型设定监听的端口。 如果你使用的是 VPN 并且已经正常工作的话,请忽略此警告,按回车键继续。''') raw_input()
交易(获取depth) 更新状态 : return: ''' pass if __name__ == '__main__': #代理设置 if socket.gethostname() == 'DESKTOP-****': print(socket.gethostname()) socks.set_default_proxy(socks.SOCKS5, '127.0.0.1', 1088) socket.socket = socks.socksocket #交易设置 trade_symbols = [ 'BTCUSDT', 'ETHUSDT', 'BCHUSDT', 'ETCUSDT', 'LINKUSDT', 'EOSUSDT', 'XRPUSDT', 'IOSTUSDT' ] init_balance = 99999 set_leverage = 10 _trade = trade_obj(trade_symbols=trade_symbols, init_balance=init_balance, leverage=set_leverage) _trade.run()
def disable_proxy(): set_http_proxy(None) socks.set_default_proxy() socket.socket = socks.socksocket
#!/usr/bin/env python # -*- coding: utf-8 -*- """ 使用socket请求隧道服务器 请求http和https网页均适用 """ import socket import socks # pip install PySocks socks.set_default_proxy(socks.HTTP, addr='tpsXXX.kdlapi.com', port=15818, username='******',password='******') # 设置代理类型为HTPP # socks.set_default_proxy(socks.SOCKS5, addr='tpsXXX.kdlapi.com', port=20818) # 设置代理类型为socks socket.socket = socks.socksocket # 把代理添加到socket def main(): sock = socket.socket() sock.connect(('dev.kdlapi.com', 80)) # 连接 # 按照http协议格式完整构造http request request = 'GET https://dev.kdlapi.com/testproxy \r\nUser-Agent:Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36\r\n Connection: close' # 包含method, url, headers response = b'' # 接收数据 sock.send(request.encode()) # 发送请求 chunk = sock.recv(1024) # 一次接收1024字节数据 while chunk: # 循环接收数据,若没有数据了说明已接收完 response += chunk chunk = sock.recv(1024) print(response.decode())
from config import config if config.PROXY_ENABLE: import socks if config.PROXY_TYPE == "HTTP": proxy_type = socks.HTTP elif config.PROXY_TYPE == "SOCKS4": proxy_type = socks.SOCKS4 elif config.PROXY_TYPE == "SOCKS5": proxy_type = socks.SOCKS5 else: logging.error("proxy type %s unknown, disable proxy", config.PROXY_TYPE) config.PROXY_ENABLE = 0 if config.PROXY_ENABLE: socks.set_default_proxy(proxy_type, config.PROXY_HOST, config.PROXY_PORT, config.PROXY_USER, config.PROXY_PASSWD) from google_ip import google_ip from openssl_wrap import SSLConnection NetWorkIOError = (socket.error, SSLError, OpenSSL.SSL.Error, OSError) g_cacertfile = os.path.join(current_path, "cacert.pem") from connect_manager import Connect_pool, random_hostname import connect_control class Direct_connect_manager(object): thread_num_lock = threading.Lock()
import socks import socket from simplegmail import Gmail import re # 科学上网,你也需要科学使用代理,不然科学不了外网,也许你不会需要。 headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36', 'Connection': 'close' } socks.set_default_proxy(socks.SOCKS5, "127.0.0.1") socket.socket = socks.socksocket gmail = Gmail() # Unread messages in your inbox messages = gmail.get_unread_messages(include_spam_trash=True) # messages = gmail.get_spam_messages() # ...and many more easy to use functions can be found in gmail.py! # Print them out! for message in messages: if message.attachments: for attm in message.attachments: print('File: ' + attm.filename) matches = re.match(r'^(\d{2})[_-](\d+)[_-](.+).ipynb$', attm.filename) if matches: attm.save(filepath=f'{matches.group(1)}/作业/{matches.group(1)}_{matches.group(2)}.ipynb', overwrite=True) message.mark_as_read()
def process_queue(q, logger): logger.info("***** %s: Begin processing mail for django-helpdesk" % ctime()) if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port: try: import socks except ImportError: no_socks_msg = "Queue has been configured with proxy settings, " \ "but no socks library was installed. Try to " \ "install PySocks via PyPI." logger.error(no_socks_msg) raise ImportError(no_socks_msg) proxy_type = { 'socks4': socks.SOCKS4, 'socks5': socks.SOCKS5, }.get(q.socks_proxy_type) socks.set_default_proxy(proxy_type=proxy_type, addr=q.socks_proxy_host, port=q.socks_proxy_port) socket.socket = socks.socksocket email_box_type = settings.QUEUE_EMAIL_BOX_TYPE or q.email_box_type mail_defaults = { 'pop3': { 'ssl': { 'port': 995, 'init': poplib.POP3_SSL, }, 'insecure': { 'port': 110, 'init': poplib.POP3, }, 'sync': pop3_sync, }, 'imap': { 'ssl': { 'port': 993, 'init': imaplib.IMAP4_SSL, }, 'insecure': { 'port': 143, 'init': imaplib.IMAP4, }, 'sync': imap_sync } } if email_box_type in mail_defaults: encryption = 'insecure' if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL: encryption = 'ssl' if not q.email_box_port: q.email_box_port = mail_defaults[email_box_type][encryption][ 'port'] server = mail_defaults[email_box_type][encryption]['init']( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) logger.info("Attempting %s server login" % email_box_type.upper()) mail_defaults[email_box_type]['sync'](q, logger, server) elif email_box_type == 'local': mail_dir = q.email_box_local_dir or '/var/lib/mail/helpdesk/' mail = [ join(mail_dir, f) for f in listdir(mail_dir) if isfile(join(mail_dir, f)) ] logger.info("Found %d messages in local mailbox directory" % len(mail)) logger.info("Found %d messages in local mailbox directory" % len(mail)) for i, m in enumerate(mail, 1): logger.info("Processing message %d" % i) with open(m, 'r') as f: full_message = encoding.force_text(f.read(), errors='replace') ticket = object_from_message(message=full_message, queue=q, logger=logger) if ticket: logger.info( "Successfully processed message %d, ticket/comment created." % i) try: unlink(m) # delete message file if ticket was successful except OSError: logger.error("Unable to delete message %d." % i) else: logger.info("Successfully deleted message %d." % i) else: logger.warn( "Message %d was not successfully processed, and will be left in local directory" % i)
def process_queue(q, logger): logger.info("***** %s: Begin processing mail for autotriage" % ctime()) if q.socks_proxy_type and q.socks_proxy_host and q.socks_proxy_port: try: import socks except ImportError: no_socks_msg = "Queue has been configured with proxy settings, " \ "but no socks library was installed. Try to " \ "install PySocks via PyPI." logger.error(no_socks_msg) raise ImportError(no_socks_msg) proxy_type = { 'socks4': socks.SOCKS4, 'socks5': socks.SOCKS5, }.get(q.socks_proxy_type) socks.set_default_proxy(proxy_type=proxy_type, addr=q.socks_proxy_host, port=q.socks_proxy_port) socket.socket = socks.socksocket elif six.PY2: socket.socket = socket._socketobject email_box_type = settings.QUEUE_EMAIL_BOX_TYPE or q.email_box_type if email_box_type == 'pop3': if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL: if not q.email_box_port: q.email_box_port = 995 server = poplib.POP3_SSL( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) else: if not q.email_box_port: q.email_box_port = 110 server = poplib.POP3( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) logger.info("Attempting POP3 server login") server.getwelcome() server.user(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER) server.pass_(q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD) messagesInfo = server.list()[1] logger.info("Received %d messages from POP3 server" % len(messagesInfo)) for msg in messagesInfo: msgNum = msg.split(" ")[0] logger.info("Processing message %s" % msgNum) full_message = encoding.force_text("\n".join( server.retr(msgNum)[1]), errors='replace') ticket = ticket_from_message(message=full_message, queue=q, logger=logger) if ticket: server.dele(msgNum) logger.info( "Successfully processed message %s, deleted from POP3 server" % msgNum) else: logger.warn( "Message %s was not successfully processed, and will be left on POP3 server" % msgNum) server.quit() elif email_box_type == 'imap': if q.email_box_ssl or settings.QUEUE_EMAIL_BOX_SSL: if not q.email_box_port: q.email_box_port = 993 server = imaplib.IMAP4_SSL( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) else: if not q.email_box_port: q.email_box_port = 143 server = imaplib.IMAP4( q.email_box_host or settings.QUEUE_EMAIL_BOX_HOST, int(q.email_box_port)) logger.info("Attempting IMAP server login") server.login(q.email_box_user or settings.QUEUE_EMAIL_BOX_USER, q.email_box_pass or settings.QUEUE_EMAIL_BOX_PASSWORD) server.select(q.email_box_imap_folder) status, data = server.search(None, 'NOT', 'DELETED') if data: msgnums = data[0].split() logger.info("Received %d messages from IMAP server" % len(msgnums)) for num in msgnums: logger.info("Processing message %s" % num) status, data = server.fetch(num, '(RFC822)') full_message = encoding.force_text(data[0][1], errors='replace') ticket = ticket_from_message(message=full_message, queue=q, logger=logger) if ticket: server.store(num, '+FLAGS', '\\Deleted') logger.info( "Successfully processed message %s, deleted from IMAP server" % num) else: logger.warn( "Message %s was not successfully processed, and will be left on IMAP server" % num) server.expunge() server.close() server.logout() elif email_box_type == 'local': mail_dir = q.email_box_local_dir or '/var/lib/mail/autotriage/' mail = [ join(mail_dir, f) for f in listdir(mail_dir) if isfile(join(mail_dir, f)) ] logger.info("Found %d messages in local mailbox directory" % len(mail)) logger.info("Found %d messages in local mailbox directory" % len(mail)) for i, m in enumerate(mail, 1): logger.info("Processing message %d" % i) with open(m, 'r') as f: full_message = encoding.force_text(f.read(), errors='replace') ticket = ticket_from_message(message=full_message, queue=q, logger=logger) if ticket: logger.info( "Successfully processed message %d, ticket/comment created." % i) try: unlink(m) # delete message file if ticket was successful except: logger.error("Unable to delete message %d." % i) else: logger.info("Successfully deleted message %d." % i) else: logger.warn( "Message %d was not successfully processed, and will be left in local directory" % i)
from bs4 import BeautifulSoup import requests from fake_useragent import UserAgent import xlwt import time import random import socks import socket socks.set_default_proxy(socks.SOCKS5, "localhost", 9150) socket.socket = socks.socksocket useragent = UserAgent() links = [] artikul = [] name = [] price = [] price_discount = [] brand = [] def checkIP(): ip = requests.get('http://checkip.dyndns.org').content soup = BeautifulSoup(ip, 'html.parser') print(soup.find('body').text) for i in range(1, 51): # 50 страниц url = 'https://lapsi.ru/detskaya_komnata/tekstil/?PAGEN_5=' + str(i) response = requests.get(url, headers={'User-Agent':
h, t = x[1].split('+') else: if opts.proxy_auth: un = raw_input(" Proxy Username: "******" Proxy Password: ") else: un, pw = False, False h, t = opts.proxy_dict.split('+') if t == 'socks': import socks h, p = h.split(':') socks.set_default_proxy(socks.SOCKS5, h, int(p), True, un, pw) socket.socket = socks.socksocket opts.proxy_dict = {} opts.proxy_auth = {} try: res = requests.get(proxy_test_url).content if not (opts.json or opts.json_min): print('\n %s[i]%s Your proxied IP is: %s' % (TC.BLUE, TC.END, res.split('Add to Fav')[0].split('<b>') [1].split('<br>')[0].strip())) except Exception, ex: print ex quit(1) elif t in ('http', 'basic', 'digest'):
def login(user, passwd, target, port, timeout_sec, log_in_file, language, retries, time_sleep, thread_tmp_filename, socks_proxy, scan_id, scan_cmd): exit = 0 if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo while 1: try: if timeout_sec is not None: server = smtplib.SMTP(target, int(port), timeout=timeout_sec) else: server = smtplib.SMTP(target, int(port)) server.starttls() exit = 0 break except: exit += 1 if exit is retries: warn(messages(language, 73).format(target, port, user, passwd)) return 1 time.sleep(time_sleep) flag = 1 try: server.login(user, passwd) flag = 0 except smtplib.SMTPException as err: pass if flag is 0: info(messages(language, 70).format(user, passwd, target, port)) save = open(log_in_file, 'a') save.write( json.dumps({ 'HOST': target, 'USERNAME': user, 'PASSWORD': passwd, 'PORT': port, 'TYPE': 'smtp_brute', 'DESCRIPTION': messages(language, 66), 'TIME': now(), 'CATEGORY': "brute", 'SCAN_ID': scan_id, 'SCAN_CMD': scan_cmd }) + '\n') save.close() thread_write = open(thread_tmp_filename, 'w') thread_write.write('0') thread_write.close() else: pass try: server.quit() except: pass return flag
def __netcraft(target, timeout_sec, log_in_file, time_sleep, language, verbose_level, socks_proxy, retries, headers, thread_tmp_filename): try: from core.targets import target_to_host if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy( socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str(socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo n = 0 results = '' url = 'https://searchdns.netcraft.com/?restriction=site+contains&host=*.{0}' \ '&lookup=wait..&position=limited'.format(target) subs = [] while '<b>Next page</b></a>' not in results: while 1: try: results = requests.get(url, headers=headers) break except: n += 1 if n is 3: break break if results.status_code is 200: for l in re.compile( '<a href="http://toolbar.netcraft.com/site_report\?url=(.*)">' ).findall(results.content): if target_to_host(l).endswith(target) and target_to_host( l) not in subs: subs.append(target_to_host(l)) else: # warn 403 break try: url = 'http://searchdns.netcraft.com' + re.compile( '<A href="(.*?)"><b>Next page</b></a>').findall( results.content)[0] except: break f = open(thread_tmp_filename, 'a') f.write('\n'.join(subs) + '\n') f.close() return subs except: return []
import socket import socks import requests socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8080) temp = socket.socket socket.socket = socks.socksocket print(requests.get('http://myip.ipip.net/').text) socket.socket = temp print(requests.get('http://myip.ipip.net/').text) socket.socket = socks.socksocket print(requests.get('http://myip.ipip.net/').text)
def m_set_socks_proxy(address, port): socks.set_default_proxy(socks.SOCKS5, address, int(port)) socket.socket = socks.socksocket
try: result = requests.get("http://www.youtube.com/", proxies=proxies_socks5, verify=False, timeout=20) print(result.text) except Exception as e: print('-------使用requests-socks5-Failed-------') print(e) if SCRAPY_HTTP: print('--------使用Scrapy-http代理-------') try: r = Request(youtube_url, meta={'proxy': proxies_http['http']}) # fetch(r) except Exception as e: print('-------使用Scrapy-http-Failed-------') print(e) if PYSOCKS_SOCKS5: print('--------使用PySocks-socks5代理-------') try: socks.set_default_proxy(socks.SOCKS5, host, port=port) socket.socket = socks.socksocket response = request.urlopen(youtube_url, timeout=20) print(response.read().decode('utf-8')) except Exception as e: print('-------使用PySocks-socks5-Failed-------') print(e)
pageSize=10, fields="nextPageToken, files(id, name,mimeType,md5Checksum,size)" ).execute() items = results.get('files', []) if not items: print('No files found.') else: print('Files:') for item in items: print(u'name={0} (id={1} , mime={2})'.format( item['name'], item['id'], item['mimeType'])) socks.set_default_proxy(proxy_type=socks.SOCKS5, addr="127.0.0.1", port=1081, rdns="1.1.1.1") socket.socket = socks.socksocket if __name__ == '__main__': # main() from google_drive_api.gigu import Gigu, transform_mime gigu = Gigu(credential_file_path="credentials.json") drive_service = gigu.get_service() # https://developers.google.com/drive/api/v3/search-parameters results = drive_service.files().list( q= "mimeType='application/vnd.google-apps.folder' and name contains 'XXX'", pageSize=200, pageToken=None,