return if _server and _token: pass else: try: user = WebUser() if user.server and user.token: _server = user.server _token = user.token # init global var globalVar.server = _server globalVar.token = _token if _auto_proxy: ps = ProxyScraper() ps.proxies_get(1000) globalVar.proxyRequest.add_proxies(ps.format_proxie(type=1)) except Exception,e: print 'Exception',e if e[0] == 'WebUser.loginfail': usage() return # pprint(globalVar.proxyRequest.proxies) # globalVar.proxyRequest # 其它方式 if _server and _token: show()
proxy = None if url.startswith('https://'): porxy = self.rand_proxy('https') elif url.startswith('http://'): porxy = self.rand_proxy('all') else: print 'unknow type of url' return requests.get(url, proxies=proxy, **kwargs) def post(self, url, **kwargs): proxy = None if url.startswith('https://'): porxy = self.rand_proxy('https') elif url.startswith('http://'): porxy = self.rand_proxy('all') else: print 'unknow type of url' return requests.post(url, proxies=proxy, **kwargs) # ---------------------------------------------------------------------------------------------------- # # ---------------------------------------------------------------------------------------------------- if __name__ == '__main__': from proxyScraper_class import ProxyScraper # apr = AutoProxyRequests() # a = apr.get('http://www.baidu.com') # print a.text ps = ProxyScraper() ps.scrap_proxies_1() ps.check_proxies()
def get(self, url, **kwargs): proxy = None if url.startswith('https://'): porxy = self.rand_proxy('https') elif url.startswith('http://'): porxy = self.rand_proxy('all') else: print 'unknow type of url' return requests.get(url,proxies=proxy,**kwargs) def post(self, url, **kwargs): proxy = None if url.startswith('https://'): porxy = self.rand_proxy('https') elif url.startswith('http://'): porxy = self.rand_proxy('all') else: print 'unknow type of url' return requests.post(url,proxies=proxy,**kwargs) # ---------------------------------------------------------------------------------------------------- # # ---------------------------------------------------------------------------------------------------- if __name__ == '__main__': from proxyScraper_class import ProxyScraper # apr = AutoProxyRequests() # a = apr.get('http://www.baidu.com') # print a.text ps = ProxyScraper() ps.scrap_proxies_1() ps.check_proxies()
return if _server and _token: pass else: try: user = WebUser() if user.server and user.token: _server = user.server _token = user.token # init global var globalVar.server = _server globalVar.token = _token if _auto_proxy: ps = ProxyScraper() ps.proxies_get(1000) globalVar.proxyRequest.add_proxies(ps.format_proxie(type=1)) except Exception, e: print 'Exception', e if e[0] == 'WebUser.loginfail': usage() return # pprint(globalVar.proxyRequest.proxies) # globalVar.proxyRequest # 其它方式 if _server and _token: show()
# print '_token=',_token loadPlugins(_pluginpath,_server,_token) elif '_target' in dir(): # plugin type scan if '_plugin' in dir(): sn = PluginMultiRunner(server=_server,token=_token,target=_target,loglevel=_vv,threads=_threads,pluginfilepath=_plugin,pluginargs=_plugin_arg) sn.initInfo() sn.scan() else: sn = Scanner(server=_server,token=_token,target=_target,threads=_threads,loglevel=_vv,gatherdepth=_gather_depth) sn.initInfo() sn.infoGather(depth=_gather_depth) sn.scan() elif _listen: li = Listener(server=_server, token=_token, loglevel=_vv, maxsize=_maxsize) li.run() elif _update_proxy: ps = ProxyScraper() ps.scrap_proxies_1() ps.proxies_submit() else: usage() # ---------------------------------------------------------------------------------------------------- # # ---------------------------------------------------------------------------------------------------- if __name__=='__main__': main()