def testDefaultTimeout(self): # Testing default timeout # The default timeout should initially be None self.assertEqual(socket.getdefaulttimeout(), None) s = socket.socket() self.assertEqual(s.gettimeout(), None) s.close() # Set the default timeout to 10, and see if it propagates socket.setdefaulttimeout(10) self.assertEqual(socket.getdefaulttimeout(), 10) s = socket.socket() self.assertEqual(s.gettimeout(), 10) s.close() # Reset the default timeout to None, and see if it propagates socket.setdefaulttimeout(None) self.assertEqual(socket.getdefaulttimeout(), None) s = socket.socket() self.assertEqual(s.gettimeout(), None) s.close() # Check that setting it to an invalid value raises ValueError self.assertRaises(ValueError, socket.setdefaulttimeout, -1) # Check that setting it to an invalid type raises TypeError self.assertRaises(TypeError, socket.setdefaulttimeout, "spam")
def test_proxy_ip(ip,port,_type='http'): """代理ip有效性检测 参数说明: @ip 代理ip @port 代理端口号 @type 代理类型 http https socket 此模块只收集http代理 返回结果: (ip,port,type) 输入的代理信息 flag False 不可用 , True 可用 """ flag = False try: socket.setdefaulttimeout(10) proxy_handler = request.ProxyHandler({_type.lower():ip+':'+port}) #proxy_auth_handler = urllib.request.ProxyBasicAuthHandler() #proxy_auth_handler.add_password('realm', '123.123.2123.123', 'user', 'password') opener = request.build_opener(proxy_handler) f = opener.open('http://14.17.127.67:81/t.php') resip = f.read().decode() if ip == resip: flag = True except Exception as e: #print(e) pass return (ip,port,_type),flag
def main(bitHopper): backdoor_port = bitHopper.config.getint('backdoor', 'port') try: lastDefaultTimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(None) bitHopper.pile.spawn(backdoor.backdoor_server, eventlet.listen(('127.0.0.1', backdoor_port)), locals={'bh':bitHopper}) socket.setdefaulttimeout(lastDefaultTimeout) except Exception, e: bitHopper.log_msg("Unable to start up backdoor: %s") % (e)
def main(bitHopper): backdoor_port = bitHopper.config.getint('backdoor', 'port') try: lastDefaultTimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(None) bitHopper.pile.spawn(backdoor.backdoor_server, eventlet.listen(('127.0.0.1', backdoor_port)), locals={'bh': bitHopper}) socket.setdefaulttimeout(lastDefaultTimeout) except Exception, e: logging.info("Unable to start up backdoor: %s") % (e)
def main(bitHopper): lastDefaultTimeout = socket.getdefaulttimeout() options = bitHopper.options config = bitHopper.config log = None if options.debug: backdoor_port = config.getint('backdoor', 'port') backdoor_enabled = config.getboolean('backdoor', 'enabled') if backdoor_enabled: try: socket.setdefaulttimeout(None) bitHopper.pile.spawn(backdoor.backdoor_server, eventlet.listen(('127.0.0.1', backdoor_port)), locals={'bh':bitHopper}) socket.setdefaulttimeout(lastDefaultTimeout) except Exception, e: print e
'&Operation=Upload&DestUpFile=endpointconfig.cfg&' + '\r\n' +\ '--' + boundary + '\r\n' +\ 'Content-Disposition: form-data; name="importConfigFile"; filename="endpointconfig.cfg"\r\n' +\ 'Content-Type: application/octet-stream\r\n' +\ '\r\n' +\ cfgcontent + '\r\n' +\ '--' + boundary + '--\r\n' request = urllib2.Request( 'http://' + self._ip + '/fcgi/do?id=6&id=2', postdata, {'Content-Type': ' multipart/form-data; boundary=' + boundary}) # The phone configuration restore is known to hang for 25-30 seconds oldtimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(40) try: response = opener.open(request) finally: socket.setdefaulttimeout(oldtimeout) body = response.read() if not 'reboot' in body.lower(): logging.error('Endpoint %s@%s failed to maintain authentication (POST)' % (self._vendorname, self._ip)) os.remove(sConfigPath) return False except socket.error, e: logging.error('Endpoint %s@%s failed to connect - %s' % (self._vendorname, self._ip, str(e))) return False
def main(): parser = optparse.OptionParser(description='bitHopper') parser.add_option('--debug', action= 'store_true', default = False, help='Extra error output. Basically print all caught errors') parser.add_option('--trace', action= 'store_true', default = False, help='Extra debugging output') parser.add_option('--listschedulers', action='store_true', default = False, help='List alternate schedulers available') parser.add_option('--port', type = int, default=8337, help='Port to listen on') parser.add_option('--scheduler', type=str, default='DefaultScheduler', help='Select an alternate scheduler') parser.add_option('--threshold', type=float, default=None, help='Override difficulty threshold (default 0.43)') parser.add_option('--altslicesize', type=int, default=900, help='Override Default AltSliceScheduler Slice Size of 900') parser.add_option('--altminslicesize', type=int, default=60, help='Override Default Minimum Pool Slice Size of 60 (AltSliceScheduler only)') parser.add_option('--altslicejitter', type=int, default=0, help='Add some random variance to slice size, disabled by default (AltSliceScheduler only)') parser.add_option('--altsliceroundtimebias', action='store_true', default=False, help='Bias slicing slightly by round time duration with respect to round time target (default false)') parser.add_option('--altsliceroundtimetarget', type=int, default=1000, help='Round time target based on GHash/s (default 1000 Ghash/s)') parser.add_option('--altsliceroundtimemagic', type=int, default=10, help='Round time magic number, increase to bias towards round time over shares') parser.add_option('--config', type=str, default='bh.cfg', help='Select an alternate main config file from bh.cfg') parser.add_option('--p2pLP', action='store_true', default=False, help='Starts up an IRC bot to validate LP based hopping.') parser.add_option('--ip', type = str, default='', help='IP to listen on') parser.add_option('--auth', type = str, default=None, help='User,Password') parser.add_option('--logconnections', default = False, action='store_true', help='show connection log') parser.add_option('--simple_logging', default = False, action='store_true', help='remove RCP logging from output') options = parser.parse_args()[0] if options.trace == True: options.debug = True if options.listschedulers: schedulers = "" for s in scheduler.Scheduler.__subclasses__(): schedulers += ", " + s.__name__ print "Available Schedulers: " + schedulers[2:] return config = ConfigParser.ConfigParser() try: # determine if application is a script file or frozen exe if hasattr(sys, 'frozen'): application_path = os.path.dirname(sys.executable) elif __file__: application_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(application_path, options.config)): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(-1) config.read(os.path.join(application_path, options.config)) except: if not os.path.exists(options.config): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(-1) config.read(options.config) bithopper_instance = BitHopper(options, config) if options.auth: auth = options.auth.split(',') bithopper_instance.auth = auth if len(auth) != 2: print 'User,Password. Not whatever you just entered' return # auth from config try: c = config.get('auth', 'username'), config.get('auth', 'password') bithopper_instance.auth = c except: pass override_scheduler = False if options.scheduler != None: scheduler_name = options.scheduler override_scheduler = True try: sched = config.get('main', 'scheduler') if sched != None: override_scheduler = True scheduler_name = sched except: pass if override_scheduler: bithopper_instance.log_msg("Selecting scheduler: " + scheduler_name) foundScheduler = False for s in scheduler.Scheduler.__subclasses__(): if s.__name__ == scheduler_name: bithopper_instance.scheduler = s(bithopper_instance) foundScheduler = True break if not foundScheduler: bithopper_instance.log_msg("Error couldn't find: " + scheduler_name + ". Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) else: bithopper_instance.log_msg("Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) bithopper_instance.select_best_server() if options.p2pLP: bithopper_instance.log_msg('Starting p2p LP') bithopper_instance.lpBot = LpBot(bithopper_instance) lastDefaultTimeout = socket.getdefaulttimeout() if options.logconnections: log = None else: log = open(os.devnull, 'wb') while True: try: listen_port = options.port try: listen_port = config.getint('main', 'port') except ConfigParser.Error: bithopper_instance.log_dbg("Unable to load main listening port from config file") pass socket.setdefaulttimeout(None) wsgi.server(eventlet.listen((options.ip,listen_port)),bithopper_instance.website.handle_start, log=log) socket.setdefaulttimeout(lastDefaultTimeout) break except Exception, e: bithopper_instance.log_msg("Exception in wsgi server loop, restarting wsgi in 60 seconds\n%s") % (e) eventlet.sleep(60)
def main(): parser = optparse.OptionParser(description="bitHopper") parser.add_option( "--debug", action="store_true", default=False, help="Extra error output. Basically print all caught errors" ) parser.add_option("--trace", action="store_true", default=False, help="Extra debugging output") parser.add_option( "--listschedulers", action="store_true", default=False, help="List alternate schedulers available" ) parser.add_option("--port", type=int, default=8337, help="Port to listen on") parser.add_option("--scheduler", type=str, default="DefaultScheduler", help="Select an alternate scheduler") parser.add_option("--threshold", type=float, default=None, help="Override difficulty threshold (default 0.43)") parser.add_option("--config", type=str, default="bh.cfg", help="Select an alternate main config file from bh.cfg") parser.add_option("--ip", type=str, default="", help="IP to listen on") parser.add_option("--auth", type=str, default=None, help="User,Password") parser.add_option("--logconnections", default=False, action="store_true", help="show connection log") # parser.add_option('--simple_logging', default = False, action='store_true', help='remove RCP logging from output') options = parser.parse_args()[0] if options.trace == True: options.debug = True if options.listschedulers: schedulers = "" for s in scheduler.Scheduler.__subclasses__(): schedulers += ", " + s.__name__ print "Available Schedulers: " + schedulers[2:] return config = ConfigParser.ConfigParser() try: # determine if application is a script file or frozen exe if hasattr(sys, "frozen"): application_path = os.path.dirname(sys.executable) elif __file__: application_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(application_path, options.config)): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(1) config.read(os.path.join(application_path, options.config)) except: if not os.path.exists(options.config): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(1) config.read(options.config) bithopper_instance = BitHopper(options, config) if options.auth: auth = options.auth.split(",") bithopper_instance.auth = auth if len(auth) != 2: print "User,Password. Not whatever you just entered" return # auth from config try: c = config.get("auth", "username"), config.get("auth", "password") bithopper_instance.auth = c except: pass override_scheduler = False if options.scheduler != None: scheduler_name = options.scheduler override_scheduler = True try: sched = config.get("main", "scheduler") if sched != None: override_scheduler = True scheduler_name = sched except: pass if override_scheduler: bithopper_instance.log_msg("Selecting scheduler: " + scheduler_name) foundScheduler = False for s in scheduler.Scheduler.__subclasses__(): if s.__name__ == scheduler_name: bithopper_instance.scheduler = s(bithopper_instance) foundScheduler = True break if not foundScheduler: bithopper_instance.log_msg("Error couldn't find: " + scheduler_name + ". Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) else: bithopper_instance.log_msg("Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) bithopper_instance.select_best_server() lastDefaultTimeout = socket.getdefaulttimeout() if options.logconnections: log = None else: log = open(os.devnull, "wb") hook = plugins.Hook("plugins.bithopper.startup") hook.notify(bithopper_instance, config, options) while True: try: listen_port = options.port try: listen_port = config.getint("main", "port") except ConfigParser.Error: bithopper_instance.log_dbg("Unable to load main listening port from config file") pass socket.setdefaulttimeout(None) wsgi.server(eventlet.listen((options.ip, listen_port)), bithopper_instance.website.handle_start, log=log) socket.setdefaulttimeout(lastDefaultTimeout) break except Exception, e: bithopper_instance.log_msg("Exception in wsgi server loop, restarting wsgi in 60 seconds\n%s" % (str(e))) eventlet.sleep(60)
def main(): parser = optparse.OptionParser(description="bitHopper") parser.add_option( "--debug", action="store_true", default=False, help="Extra error output. Basically print all caught errors" ) parser.add_option("--trace", action="store_true", default=False, help="Extra debugging output") parser.add_option( "--listschedulers", action="store_true", default=False, help="List alternate schedulers available" ) parser.add_option("--port", type=int, default=8337, help="Port to listen on") parser.add_option("--scheduler", type=str, default="OldDefaultScheduler", help="Select an alternate scheduler") parser.add_option("--threshold", type=float, default=None, help="Override difficulty threshold (default 0.43)") parser.add_option( "--altslicesize", type=int, default=900, help="Override Default AltSliceScheduler Slice Size of 900" ) parser.add_option( "--altminslicesize", type=int, default=60, help="Override Default Minimum Pool Slice Size of 60 (AltSliceScheduler only)", ) parser.add_option( "--altslicejitter", type=int, default=0, help="Add some random variance to slice size, disabled by default (AltSliceScheduler only)", ) parser.add_option( "--altsliceroundtimebias", action="store_true", default=False, help="Bias slicing slightly by round time duration with respect to round time target (default false)", ) parser.add_option( "--altsliceroundtimetarget", type=int, default=1000, help="Round time target based on GHash/s (default 1000 Ghash/s)", ) parser.add_option( "--altsliceroundtimemagic", type=int, default=10, help="Round time magic number, increase to bias towards round time over shares", ) parser.add_option("--config", type=str, default="bh.cfg", help="Select an alternate main config file from bh.cfg") parser.add_option( "--p2pLP", action="store_true", default=False, help="Starts up an IRC bot to validate LP based hopping." ) parser.add_option("--ip", type=str, default="", help="IP to listen on") parser.add_option("--auth", type=str, default=None, help="User,Password") parser.add_option("--logconnections", default=False, action="store_true", help="show connection log") options = parser.parse_args()[0] if options.trace == True: options.debug = True if options.listschedulers: schedulers = "" for s in scheduler.Scheduler.__subclasses__(): schedulers += ", " + s.__name__ print "Available Schedulers: " + schedulers[2:] return config = ConfigParser.ConfigParser() try: # determine if application is a script file or frozen exe if hasattr(sys, "frozen"): application_path = os.path.dirname(sys.executable) elif __file__: application_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(application_path, options.config)): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(-1) config.read(os.path.join(application_path, options.config)) except: if not os.path.exists(options.config): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(-1) config.read(options.config) bithopper_instance = BitHopper(options, config) if options.auth: auth = options.auth.split(",") bithopper_instance.auth = auth if len(auth) != 2: print "User,Password. Not whatever you just entered" return # auth from config try: c = config.get("auth", "username"), config.get("auth", "password") bithopper_instance.auth = c except: pass override_scheduler = False if options.scheduler != None: scheduler_name = options.scheduler override_scheduler = True try: sched = config.get("main", "scheduler") if sched != None: override_scheduler = True scheduler_name = sched except: pass if override_scheduler: bithopper_instance.log_msg("Selecting scheduler: " + scheduler_name) foundScheduler = False for s in scheduler.Scheduler.__subclasses__(): if s.__name__ == scheduler_name: bithopper_instance.scheduler = s(bithopper_instance) foundScheduler = True break if not foundScheduler: bithopper_instance.log_msg("Error couldn't find: " + scheduler_name + ". Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) else: bithopper_instance.log_msg("Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) bithopper_instance.select_best_server() if options.p2pLP: bithopper_instance.log_msg("Starting p2p LP") bithopper_instance.lpBot = LpBot(bithopper_instance) lastDefaultTimeout = socket.getdefaulttimeout() if options.logconnections: log = None else: log = open(os.devnull, "wb") while True: try: listen_port = options.port try: listen_port = config.getint("main", "port") except ConfigParser.Error: pass socket.setdefaulttimeout(None) wsgi.server(eventlet.listen((options.ip, listen_port)), bithopper_instance.website.handle_start, log=log) socket.setdefaulttimeout(lastDefaultTimeout) break except Exception, e: print e eventlet.sleep(60)
import eventlet eventlet.monkey_patch(os=False) from eventlet.green import httplib, socket import traceback import urlparse import sys from mongoengine import * connect('pi', host='pi-mongo01-int.dal.monitorengine.com', port=27017) from models.mongo_model import * import socket socket.setdefaulttimeout(10.) import urllib2 offset = 200000 count = 10000 num_updated = 0 num_seen = 0 i = 0 counts = {'na': 0, 'eu': 0, 'sa': 0, 'af': 0, 'au': 0, 'as': 0} for d in Domain.objects.filter(continent=None): #[offset:offset+count]: num_seen += 1 i += 1 if i == 1000: i = 0 print "%s: %s" % (num_seen, counts)
def _sendPhoneConfiguration(self, xmlcontent): try: # Login into interface opener = urllib2.build_opener(urllib2.HTTPCookieProcessor()) response = opener.open( 'http://' + self._ip + '/console/j_security_check', urllib.urlencode({ 'submit': 'Login', 'j_username': self._http_username, 'j_password': self._http_password })) body = response.read() if not '/console/start' in body: logging.error( 'Endpoint %s@%s - j_security_check failed login' % (self._vendorname, self._ip)) return False # Build a custom request with form data boundary = '------------------ENDPOINTCONFIG' postdata = '--' + boundary + '\r\n' +\ 'Content-Disposition: form-data; name="COMMAND"\r\n' +\ '\r\n' +\ 'RX' + '\r\n' +\ '--' + boundary + '\r\n' +\ 'Content-Disposition: form-data; name="RX"; filename="config.xml"\r\n' +\ 'Content-Type: text/xml\r\n' +\ '\r\n' +\ xmlcontent + '\r\n' +\ '--' + boundary + '--\r\n' filerequest = urllib2.Request( 'http://' + self._ip + '/console/configuration', postdata, {'Content-Type': 'multipart/form-data; boundary=' + boundary}) # The phone configuration restore is known to hang for 25-30 seconds oldtimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(40) try: response = opener.open(filerequest) finally: socket.setdefaulttimeout(oldtimeout) body = response.read() if not 'Configuration restore complete' in body: logging.error('Endpoint %s@%s - configuration post failed' % (self._vendorname, self._ip)) return False # Attempt to set just the provisioning server response = opener.open( 'http://' + self._ip + '/console/general', urllib.urlencode({ 'COMMAND': 'AP', '@p.provisioningServer': self._serverip, '@dhcp_option_protocol': 'TFTP' })) body = response.read() # Since the web interface will NOT immediately apply the network # changes, we need to go raw and ssh into the phone. Additionally, # if we are changing the network setting from DHCP to static or # viceversa, we expect the SSH connection to be disconnected in the # middle of the update. A timeout of 5 seconds should do it. if self._dhcp: command = '/root/dhcp-configure.sh' else: dns2 = 'none' if self._static_dns2 != None: dns2 = self._static_dns2 command = '/root/staticip-configure.sh %s %s %s %s %s' %\ (self._static_ip, self._static_mask, self._static_gw, self._static_dns1, dns2) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.WarningPolicy()) ssh.connect(self._ip, username=self._ssh_username, password=self._ssh_password, timeout=5) stdin, stdout, stderr = ssh.exec_command(command) logging.info( 'Endpoint %s@%s - about to set timeout of %d on stdout' % ( self._vendorname, self._ip, oldtimeout, )) stdout.channel.settimeout(5) try: s = stdout.read() logging.info('Endpoint %s@%s - answer follows:\n%s' % ( self._vendorname, self._ip, s, )) except socket.error, e: pass ssh.close() return True
def main(): parser = optparse.OptionParser(description='bitHopper') parser.add_option('--debug', action= 'store_true', default = False, help='Extra error output. Basically print all caught errors') parser.add_option('--trace', action= 'store_true', default = False, help='Extra debugging output') parser.add_option('--listschedulers', action='store_true', default = False, help='List alternate schedulers available') parser.add_option('--port', type = int, default=8337, help='Port to listen on') parser.add_option('--scheduler', type=str, default='DefaultScheduler', help='Select an alternate scheduler') parser.add_option('--threshold', type=float, default=None, help='Override difficulty threshold (default 0.43)') parser.add_option('--config', type=str, default='bh.cfg', help='Select an alternate main config file from bh.cfg') parser.add_option('--ip', type = str, default='', help='IP to listen on') parser.add_option('--auth', type = str, default=None, help='User,Password') parser.add_option('--logconnections', default = False, action='store_true', help='show connection log') # parser.add_option('--simple_logging', default = False, action='store_true', help='remove RCP logging from output') options = parser.parse_args()[0] if options.debug: logging.getLogger().setLevel(logging.DEBUG) elif options.trace: logging.getLogger().setLevel(0) else: logging.getLogger().setLevel(logging.INFO) if options.listschedulers: schedulers = "" for s in scheduler.Scheduler.__subclasses__(): schedulers += ", " + s.__name__ print "Available Schedulers: " + schedulers[2:] return config = ConfigParser.ConfigParser() try: # determine if application is a script file or frozen exe if hasattr(sys, 'frozen'): application_path = os.path.dirname(sys.executable) elif __file__: application_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(application_path, options.config)): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(1) config.read(os.path.join(application_path, options.config)) except: if not os.path.exists(options.config): print "Missing " + options.config + " may need to rename bh.cfg.default" os._exit(1) config.read(options.config) bithopper_instance = BitHopper(options, config) if options.auth: auth = options.auth.split(',') bithopper_instance.auth = auth if len(auth) != 2: print 'User,Password. Not whatever you just entered' return # auth from config try: c = config.get('auth', 'username'), config.get('auth', 'password') bithopper_instance.auth = c except: pass override_scheduler = False if options.scheduler != None: scheduler_name = options.scheduler override_scheduler = True try: sched = config.get('main', 'scheduler') if sched != None: override_scheduler = True scheduler_name = sched except: pass if override_scheduler: logging.info("Selecting scheduler: " + scheduler_name) foundScheduler = False for s in scheduler.Scheduler.__subclasses__(): if s.__name__ == scheduler_name: bithopper_instance.scheduler = s(bithopper_instance) foundScheduler = True break if not foundScheduler: logging.info("Error couldn't find: " + scheduler_name + ". Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) else: logging.info("Using default scheduler.") bithopper_instance.scheduler = scheduler.DefaultScheduler(bithopper_instance) bithopper_instance.select_best_server() lastDefaultTimeout = socket.getdefaulttimeout() if options.logconnections: log = None else: log = open(os.devnull, 'wb') hook = plugins.Hook('plugins.bithopper.startup') hook.notify(bithopper_instance, config, options) while True: try: listen_port = options.port try: listen_port = config.getint('main', 'port') except ConfigParser.Error: logging.debug("Unable to load main listening port from config file") pass #This ugly wrapper is required so wsgi server doesn't die socket.setdefaulttimeout(None) wsgi.server(eventlet.listen((options.ip,listen_port), backlog=500),bithopper_instance.website.handle_start, log=log, max_size = 8000) socket.setdefaulttimeout(lastDefaultTimeout) break except Exception, e: logging.info("Exception in wsgi server loop, restarting wsgi in 60 seconds\n%s" % (str(e))) eventlet.sleep(60)
#License# #bitHopper by Colin Rice is licensed under a Creative Commons # Attribution-NonCommercial-ShareAlike 3.0 Unported License. #Based on a work at github.com. import eventlet from eventlet.green import threading, time, socket # Global timeout for sockets in case something leaks socket.setdefaulttimeout(900) class Speed(): """ This class keeps track of the number of shares and tracks a running rate in self.rate Add shares with a = Speed() a.add_shares(1) Get the rate with a.get_rate() Note rates are tallied once per minute. """ def __init__(self): self.shares = 0 eventlet.spawn_n(self.update_rate) self.rate = 0
import eventlet eventlet.monkey_patch(os=False) from eventlet.green import httplib, socket import traceback import urlparse import sys from mongoengine import * connect('pi', host='10.8.22.3', port=27027) from models.mongo_model import * import socket socket.setdefaulttimeout(5.) import urllib2 def check_domain(d): check_domain_name = None try: headers = {'User-Agent': 'Mozilla/5.0'} req = urllib2.Request('http://%s' % d.domain_name, None, headers) r = urllib2.urlopen(req) #r = urllib2.urlopen('http://%s' % d.domain_name) check_domain_name = urlparse.urlparse(r.geturl()).netloc except: try: headers = {'User-Agent': 'Mozilla/5.0'} req = urllib2.Request('http://www.%s' % d.domain_name, None, headers) r = urllib2.urlopen(req) #'http://www.%s' % d.domain_name)
def get_info(corp,proxyinfo=''): socket.setdefaulttimeout(10) """采集函数 参数说明: corp 公司名称 proxyinfo 代理ip (格式为 ip:port) 为空时使用本机ip 返回值说明: status 状态码 base_info 采集到的工商信息 """ #状态码 0 正常,1代理ip失效或者网站无法访问 2 ip被封 3公司不存在 status = 0 #基本信息 base_info={} #股东信息 boss_info=[] #用于匹配json结果 title_base={'C2':'name','C1':'reg_no','C3':'type','C4':'reg_date','C5':'faren','C6':'reg_capital','C7':'addr','C8':'biz_scope','C9':'open_date','C10':'close_date','C11':'reg_authority','C12':'audit_date','C13':'reg_status'} #pdb.set_trace() #http模拟器 http = HttpWrap() #设定代理ip格式 {"代理类型http|https":"ip:port"} if proxyinfo: http.set_proxy({'http':proxyinfo}) res = http.request(url_home,method='GET') #访问主页面用于注册cookie信息,如果无法访问则直接返回失败 if res.code != 200: #print(res.code) if res.code>200: ille_proxy_ip.add(proxyinfo) return [corp,base_info,1,proxyinfo] """验证过程,循环验证直到成功""" #成功标识 flag = 0 html="" cu_time = int(time.time()) #出错次数 err_type = 0 while flag ==0: #if datamodel.g_exit: # return [corp,base_info,1,proxyinfo] try: rand_time = time.strftime('%a %b %d %Y %H:%M:%S GMT 0800') url = url_code #% rand_time res = http.request(url,method='GET') data = {} #print('step...1') if res.code == 200: #保存验证码 try: im = res.read() except: im='' continue code = http_upload_image(img_decode_url,im) #手工输入验证码 #code = raw_input('input the code:').decode('gbk').encode('utf-8') #print(code) #print('step...2') data={'name':corp,'verifyCode':code} #重新设置头 http.reset_headers() http.set_header('Accetp','application/json, text/javascript, */*; q=0.01') http.set_header('Content-Type','application/x-www-form-urlencoded; charset=UTF-8') http.set_header('Referer',url_home) http.set_header('X-Requested-With','XMLHttpRequest') res = http.request(url_check,"POST",data) #print('step...3') if res.code == 200: html = http.read(res) jdata = json.loads(html) #print(jdata) if jdata[0]['TIPS'] and 'IP' in jdata[0]['TIPS']: #print(jdata) ille_proxy_ip.add(proxyinfo) return [corp,base_info,2,proxyinfo] if "没有符合查询条件的结果" in jdata[0]['COUNT']: return [corp,base_info,3,proxyinfo] # logger.info("iperror:%" % jdata[0]['TIPS']) #print ("res:",html) if not jdata[0]['TIPS']: html = jdata[0]['INFO'] break else: err_type+=1 #return [corp,base_info,1,proxyinfo] else: err_type+=1 if err_type >10 : return [corp,base_info,1,proxyinfo] except Exception as e: traceback.print_exc() time.sleep(1) #pdb.set_trace() #列表页 #取出详情页的url if not html: return [corp,base_info,1,proxyinfo] #print ("html:",html) try: context = etree.HTML(html) nodes = context.xpath("//a") link_info = nodes[0].attrib['onclick'].strip()[12:-2].replace("'",'').split(',') url ='http://www.jsgsj.gov.cn:58888%s' % (link_info[0].strip()) data = {'containContextPath':link_info[5].strip(),'id':link_info[2].strip(), 'name':'','org':link_info[1].strip(),'reg_no':link_info[4].strip(),'seq_id':link_info[3].strip()} #详情页基本资料 #self.reset_headers() #self.set_headers('Content-Type','application/x-www-form-urlencoded; charset=UTF-8') #self.set_headers('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') #self.set_headers('Referer','http://www.jsgsj.gov.cn:58888/province/queryResultList.jsp') #res = self.request(url) ############### #http.reset_headers() ''' self.set_headers('Accept','application/json, text/javascript, */*; q=0.01') self.set_headers('Content-Type','application/x-www-form-urlencoded; charset=UTF-8') self.set_headers('X-Requested-With','XMLHttpRequest') self.set_headers('Referer',http://www.jsgsj.gov.cn:58888/ecipplatform/inner_pspc/pspc_queryCorpInfor_gsRelease.jsp') ''' http.headers={'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)','Accept-Encoding': 'gzip, deflate','Accept-Language': 'zh-CN'} http.headers['Accept']='application/json, text/javascript, */*; q=0.01' http.headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8' http.headers['Referer'] = 'http://www.jsgsj.gov.cn:58888/ecipplatform/inner_pspc/pspc_queryCorpInfor_gsRelease.jsp' #基本资料 url = "http://www.jsgsj.gov.cn:58888/ecipplatform/ciServlet.json?ciEnter=true" data={'id':link_info[2].strip(),'org':link_info[1].strip(),'seq_id':link_info[3].strip(),'specificQuery':'basicInfo'} base_info['gov_url']=json.dumps(data) res = http.request(url,'POST',data) #连接出错,做失败处理 if res.code==-1: print(res.code) return [corp,base_info,1,proxyinfo] info = res.read().decode() #print(res.code,info) data = json.loads(info)[0] base_info['corp_id']=link_info[2].strip() base_info['corp_org']=link_info[1].strip() base_info['corp_seq_id']=link_info[3].strip() for k,v in data.items(): if k in title_base: base_info[title_base[k]]=v #股东信息 url = 'http://www.jsgsj.gov.cn:58888/ecipplatform/ciServlet.json?ciEnter=true' data = {'CORP_ID':link_info[2].strip(),'CORP_ORG':link_info[1].strip(),'CORP_SEQ_ID':link_info[3].strip(),'pageNo':1,'pageSize':5,'showRecordLine':1,'specificQuery':'investmentInfor'} res = http.request(url,'POST',data) #取股东出错,放弃股东信息,返回结果 if res.code !=200: return [corp,base_info,status,proxyinfo] info = res.read().decode() #print(res.code,info) try: data = json.loads(info) for row in data['items']: boss_info.append([row['C1'],row['C2']]) if boss_info: base_info['shareholders']=json.dumps(boss_info) except Exception as e: traceback.print_exc() except Exception as e: #traceback.print_exc() try: base_info['name']=corp base_info['reg_status']='已注销' pe={'reg_no':'注册号:\<span\>(.*?)\<', 'faren':'法定代表人:\<span\>(.*?)\<|投资人:\<span\>(.*?)\<|经营者:\<span\>(.*?)\<', 'reg_authority':'登记机关:\<span\>(.*?)\<', 'cancell_date':'注销日期:\<span\>(.*?)\<|吊销日期:\<span\>(.*?)\<'} for k,v in pe.items(): rs = re.findall(v,html) if rs: base_info[k] = rs[0] if type(rs[0]) in [list,tuple]: if rs[0][0]: base_info[k] = rs[0][0] elif rs[0][1]: base_info[k] = rs[0][1] elif(len(rs[0])>2): base_info[k] = rs[0][2] except: pass if 'reg_no' not in base_info: return [corp,base_info,1,proxyinfo] return [corp,base_info,status,proxyinfo]
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method in (None, HTTPMETHOD.POST): post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer}) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" headers[HTTP_HEADER.HOST] = host or getHostHeader(url) if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): for _ in headers.keys(): if _.upper() == key.upper(): del headers[_] headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and conf.authType == AUTH_TYPE.BASIC: kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
return None, None, None elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")): return None, None, None elif threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) finally: page = page if isinstance(page, unicode) else getUnicode(page) socket.setdefaulttimeout(conf.timeout) processResponse(page, responseHeaders) if conn and getattr(conn, "redurl", None): _ = urlparse.urlsplit(conn.redurl) _ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_), requestMsg, 1) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status) else: responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) if not skipLogTraffic:
#License# #bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License. #Based on a work at github.com. import random import re import eventlet from eventlet.green import time, threading, socket import eventlet.patcher irclib = eventlet.patcher.import_patched('irclib') SimpleIRCClient = irclib.SimpleIRCClient from peak.util import plugins # Global timeout for sockets in case something leaks socket.setdefaulttimeout(900) class LpBot(SimpleIRCClient): def __init__(self, bitHopper): SimpleIRCClient.__init__(self) self.bitHopper = bitHopper self.nick = 'lp' + str(random.randint(1,9999999999)) self.chan_list=[] self.newblock_re = re.compile('\*\*\* New Block \{(?P<server>.+)\} - (?P<hash>.*)') self.hashes = [''] self.hashinfo = {'':''} self.server='' self.current_block='' hook_startup = plugins.Hook('plugins.lpbot.init') hook_startup.notify(self) hook_ann = plugins.Hook('plugins.lp.announce')
# Attribution-NonCommercial-ShareAlike 3.0 Unported License. #Based on a work at github.com. try: import eventlet except Exception, e: print "You need to install greenlet. See the readme." raise e from eventlet import wsgi, greenpool, backdoor from eventlet.green import os, time, socket eventlet.monkey_patch() #from eventlet import debug #debug.hub_blocking_detection(True) # Global timeout for sockets in case something leaks socket.setdefaulttimeout(900) import optparse import work import diff import pool import speed import database import scheduler import website import getwork_store import data import lp import lp_callback import plugin