class ConnServer(Thread): def __init__(self): Thread.__init__(self) self.PORT = 1236 self.init = False self.startrecv = False global g_p g_p = ConnProc() global g_connlist g_connlist = [] global g_connid g_connid = [] def run(self): self.initSock() def executeCommand(self, cmd): # not implemented yet #print r = g_p.ProcCommand(cmd) print r def initSock(self): try: self.client = ThreadingTCPServer( ('', self.PORT), RecvServer) print 'listening on PORT', self.PORT self.client.serve_forever() except Exception, e: print e self.init = True;
def RunTCPServer(host='127.0.0.1', port=34567): tcpServ = ThreadingTCPServer((host, port), MyRequestHandler) try: tcpServ.serve_forever() except (KeyboardInterrupt,EOFError): print('Closing TCPServer ...') tcpServ.shutdown()
def main(): import atexit import termios def enable_echo(enable): fd = sys.stdin.fileno() new = termios.tcgetattr(fd) if enable: new[3] |= termios.ECHO else: new[3] &= ~termios.ECHO termios.tcsetattr(fd, termios.TCSANOW, new) atexit.register(enable_echo, True) enable_echo(False) ThreadingTCPServer.allow_reuse_address = True server = ThreadingTCPServer(("", 1080), Socks5RequestHandler) server.session_manager = ClipsSessionManager(server) try: sys.stderr.write("SOCKS server listening on port 1080 ...\n") sys.stderr.flush() server.serve_forever() except KeyboardInterrupt: server.server_close() server.shutdown() server.session_manager.stop()
def run(self): """ """ s = None HOST = ''; # Symbolic name meaning the local host. if sys.platform.startswith("win"): HOST = socket.gethostbyaddr(socket.gethostname())[2][0] else: import commands temp = commands.getoutput('/sbin/ifconfig') HOST = re.search('inet addr:(\d+\.\d+\.\d+\.\d+)', temp).group(1) if USER_PORT: print "listening on port: %s" % xoom_server_port PORT = xoom_server_port print "=================" else: print "XXXX not port specified, using default port: %s" % default_xoom_server_port PORT = default_xoom_server_port print "PORT: %s" % PORT print "SERVER->HOST: %s, SERVER->PORT: %s" % (HOST, PORT) srv = ThreadingTCPServer((HOST,int(PORT)), XMLRequestHandler) self.socket = srv.socket print "srv socket: %s" % self.socket srv.serve_forever()
class ConcurrentHTTPServer(ConcurrentServer): def __init__(self, host='', port=8000, directory='.'): super(ConcurrentHTTPServer, self).__init__(host=host, port=port) class RequestHandler(SimpleHTTPRequestHandler): def translate_path(self, path): path = path.split('?', 1)[0] path = path.split('#', 1)[0] trailing_slash = path.rstrip().endswith('/') path = posixpath.normpath(urllib.unquote(path)) words = path.split('/') words = filter(None, words) path = directory # patch SimpleHTTPRequestHandler to use different directory than working dir for word in words: if os.path.dirname(word) or word in (os.curdir, os.pardir): continue path = os.path.join(path, word) if trailing_slash: path += '/' return path self._request_handler = RequestHandler self._server = None def _run_function(self): self._server = ThreadingTCPServer((self.host, self.port), self._request_handler) self._server.daemon_threads = True self._server.serve_forever() def _close_function(self): self._server.shutdown() self._server.server_close() self._server = None
class ConnServer(Thread): def __init__(self): Thread.__init__(self) self.PORT = 1236 self.init = False self.startrecv = False global g_p g_p = ConnProc() global g_connlist g_connlist = [] global g_connid g_connid = [] def run(self): self.initSock() def executeCommand(self, cmd): # not implemented yet #print r = g_p.ProcCommand(cmd) print r def initSock(self): try: self.client = ThreadingTCPServer(('', self.PORT), RecvServer) print 'listening on PORT', self.PORT self.client.serve_forever() except Exception, e: print e self.init = True
def run(self): ''' UDP服务端进程 :return: ''' bThdRunFlag = True while bThdRunFlag: self.m_evtWaitStop.wait(0.3) if self.m_evtWaitStop.isSet(): bThdRunFlag = False continue try: g_logger.debug("TCP服务端接收消息") #购置TCPServer对象, server = ThreadingTCPServer(self.addr, MyBaseRequestHandlerr) #启动服务监听 server.serve_forever() except Exception, e: print e.args[0], e.args[1] g_logger.error("接收出现异常,错误码为:%s,信息为:%s" % (e.args[0], e.args[1]))
def main(): level = logging.INFO logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=level) ThreadingTCPServer.allow_reuse_address = True server = ThreadingTCPServer(('0.0.0.0', 8118), Encoder) server.serve_forever()
def start(program, options): ThreadingTCPServer.allow_reuse_address = True server = ThreadingTCPServer((host, port), QuasselGrepHandler) server.program = program server.options = options server.serve_forever() print "Finishing."
def run_as_backup_offline(): logging.info("begin run as backup offline") global backup_ip global backup_port server_address = (backup_ip, backup_port) server = ThreadingTCPServer(server_address, MyStreamRequestHandler) server.serve_forever() logging.error(traceback.format_exc())
def start(port=21567): """ this function to start the server, and maintain listening the port. """ host = '' address = (host, port) tcpServ = ThreadingTCPServer(address, MyRequestHandler) print 'waiting for connection...' tcpServ.serve_forever()
def start_server(): myport = 8886 kill_server(myport) host = "127.0.0.1" port = myport addr = (host, port) Log.logger.debug('Start Server...') server = ThreadingTCPServer(addr, HttpServerHandler) server.serve_forever()
def Start_Server(): try: print 'server is running....' httpd_address = ('192.168.10.93', 5555) myhttpd = ThreadingTCPServer(httpd_address, Custom_HTTPRequestHandler) myhttpd.serve_forever() except KeyboardInterrupt: myhttpd.socket.close()
def _server(self): """ 进程服务入口函数 """ host= '' ADDR = (host, self.port) TCP.allow_reuse_address = True tcpServ = TCP(ADDR, MyRequestHandler) print 'waiting for connection...' tcpServ.serve_forever()
def serve_forever(self): try: ThreadingTCPServer.serve_forever(self) except Exception, e: if not self.run: return raise
def _server(self): """ 进程服务入口函数 """ host = '' ADDR = (host, self.port) TCP.allow_reuse_address = True tcpServ = TCP(ADDR, MyRequestHandler) print 'waiting for connection...' tcpServ.serve_forever()
class Server(Thread): def __init__(self, host="127.0.0.1", port=3794, handler=Handler): Thread.__init__(self) self.server = ThreadingTCPServer((host, port), handler) def run(self): self.server.serve_forever() def stopRunning(self): self.server.shutdown() self.server.server_close()
def start_server(host, port): try: addr = (host, port) server = ThreadingTCPServer(addr, client_handler) print_log("waitting connect...") print_log("Listen {}:{}".format("localhost", port)) server.serve_forever() except Exception as e: print_log("threading tcp server error {}".format(e))
def main(): global world world = World() z = ThreadingTCPServer(('', 4000), MudHandler) try: z.serve_forever() except KeyboardInterrupt: world.global_message('World is shutting down') for plr in world.players_at_location(None): try: plr.parse('quit') except: print 'ERROR: %s could not quit gracefully' % plr.name z.server_close() world.save()
def init(method, cacheObj=None): global handleMethod, cache handleMethod = method if (cacheObj == None): cache = Cache("simple") else: cache = cacheObj host = "127.0.0.1" port = 8000 #端口 addr = (host, port) #监听端口 server = ThreadingTCPServer(addr, MyStreamRequestHandlerr) server.serve_forever()
def createAndStartServer(): global serv ThreadingTCPServer.allow_reuse_address = True serv = ThreadingTCPServer(('', 20000), SocketHandler, bind_and_activate=False) l_onoff = 1 l_linger = 0 serv.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', l_onoff, l_linger)) serv.server_bind() serv.server_activate() HP.logger.info('Starting server') serv.serve_forever() # blocking method
class LogServer(): '''由于serve_forever()是阻塞的,所以需要单开一个进程或线程来开启日志服务''' def __init__(self, addr, requestHandler): self.bindAddress = addr self.requestHandler = requestHandler logging.config.fileConfig(LOGCONFIG) def start(self): self.svr = ThreadingTCPServer(self.bindAddress, self.requestHandler) self.svr.serve_forever() def stop(self): self.svr.shutdown()
def run(self): sock = None ThreadingTCPServer.daemon_threads = True ThreadingTCPServer.request_queue_size = 1024 ThreadingTCPServer.allow_reuse_address = True try: sock = ThreadingTCPServer(self._addrs, SOCK2HttpHandler) logging.warning('-'*50) logging.warning('sock proxy start on:%s' % repr(self._addrs)) sock.serve_forever() except Exception, e: logging.error('fatal error:%s' % e)
def start_CC_simulator(configname, listenport): global global_config_file global global_config_name global global_payload global_config_name = configname print global_config_name config = __import__(configname) if (not configcheck(config)): ErrorPrint("Syntax error in config file") else: addr = ("", listenport) server = ThreadingTCPServer(addr, MyStreamRequestHandler) server.serve_forever()
def start_tcp_server(): class Handler(BaseRequestHandler): def handle (self): addr = self.request.getpeername() log.info('Got a connection from {0}'.format(str(addr))) while True: data = self.request.recv(socketclient.SOCKET_BUFFER_SIZE).strip() if not data:break log.info('receive from ({0}):\n{1}'.format(self.client_address, data)) data_dict = ndb.load_string(data) taskinfo = data_dict.get('root') if taskinfo == None: self.request.send('task command error'.encode('utf8')) return task_type = taskinfo.get('tasktype') _type = taskinfo.get('type') task_id = taskinfo.get('taskid') send_data = {'error':'error'} if task_type =='collect' and _type == 'rules': data_dict['datas'] = config_cache.load_rules() send_data = ndb.build_node('root', data_dict) elif task_type =='collect' and _type == 'agent': data_dict['datas'] = controller.load_agent_config() send_data = ndb.build_node('root', data_dict) elif task_type =='collect' and _type == 'status': data_dict['datas'] = collect.collect().load_status() elif task_type =='collect' and _type == 'sysinfo': data_dict['datas'] = config_cache.load_sysinfo() elif task_type =='control' and _type == 'rule': datas = taskinfo.get('datas') data_dict['datas'] = task_manage.execute_job(task_id, datas) elif task_type =='control' and _type == 'preview': datas = taskinfo.get('datas') data_dict['datas'] = task_manage.preview_job(datas) send_data = ndb.build_node('root', data_dict) self.request.send(send_data.encode('utf8')) #获取本地IP local_ip = controller.load_agent_config().get('host', '127.0.0.1') #获取配置文件中的服务端口 service_port = int(controller.load_agent_config().get('tcp_server_port', 8888)) print 'start server' + str(local_ip) + ':' + str(service_port) server = ThreadingTCPServer((local_ip, service_port), Handler) server.serve_forever()
def Start_Server(host, port): try: print 'server is running....' httpd_address = (host, int(port)) myhttpd = ThreadingTCPServer(httpd_address, Custom_HTTPRequestHandler) #myhttpd = HTTPServer(httpd_address, Custom_HTTPRequestHandler) print 'myhttpd:', myhttpd myhttpd.serve_forever() except KeyboardInterrupt: print 11111111111 print 'myhttpd:', myhttpd #myhttpd.socket.close() myhttpd.shutdown print 22222222222
class Server(threading.Thread): def __init__(self, host=None, port=None): threading.Thread.__init__(self) self.__host = host self.__port = port addr = (self.__host, self.__port) self.server = ThreadingTCPServer(addr, Handler) def run(self): if self.__host is None or self.__port is None: log.err('Please specify the host ip and port.') return log.i("Start TCP server at %s:%d" % (self.__host, self.__port)) self.server.serve_forever()
def run(self): """ """ s = None HOST = ''; # Symbolic name meaning the local host. if sys.platform.startswith("win"): HOST = socket.gethostbyaddr(socket.gethostname())[2][0] else: import commands temp = commands.getoutput('/sbin/ifconfig') HOST = re.search('inet addr:(\d+\.\d+\.\d+\.\d+)', temp).group(1) PORT = 50001 srv = ThreadingTCPServer((HOST,PORT), XMLRequestHandler) self.socket = srv.socket srv.serve_forever()
class ConnWebServer(Thread): def __init__(self): Thread.__init__(self) self.WSPORT = 1240 self.init = False self.startrecv = False global g_p g_p = ConnProc() global g_connlist g_connlist = [] global g_connid g_connid = [] global g_conntype g_conntype = [] def run(self): self.initSock() def executeCommand(self, cmd): # not implemented yet #print r = g_p.ProcCommand(cmd) print r def initSock(self): try: self.clientWS = ThreadingTCPServer(('', self.WSPORT), WebSocketServer, False) print 'listening on PORT(WS)', self.WSPORT self.clientWS.allow_reuse_address = True self.clientWS.server_bind() self.clientWS.server_activate() self.clientWS.serve_forever() except Exception, e: print e self.init = True
class ConnWebServer(Thread): def __init__(self): Thread.__init__(self) self.WSPORT = 1240 self.init = False self.startrecv = False global g_p g_p = ConnProc() global g_connlist g_connlist = [] global g_connid g_connid = [] global g_conntype g_conntype = [] def run(self): self.initSock() def executeCommand(self, cmd): # not implemented yet #print r = g_p.ProcCommand(cmd) print r def initSock(self): try: self.clientWS = ThreadingTCPServer( ('', self.WSPORT), WebSocketServer, False) print 'listening on PORT(WS)', self.WSPORT self.clientWS.allow_reuse_address = True self.clientWS.server_bind() self.clientWS.server_activate() self.clientWS.serve_forever() except Exception, e: print e self.init = True;
def serve_forever(self): try: ThreadingTCPServer.serve_forever(self) except socket.error: # no error output if closed pass
#!/usr/bin/env python from SocketServer import (TCPServer as TCP, StreamRequestHandler as SRH, ThreadingTCPServer as TTCP) from time import ctime HOST = '' PORT = 21567 ADDR = (HOST, PORT) class MyRequestHandler(SRH): def handle(self): print '...connected from:', self.client_address while (True): self.wfile.write('[%s] %s' % (ctime(), self.rfile.readline())) tcpServ = TTCP(ADDR, MyRequestHandler) print 'waiting for connection...' tcpServ.serve_forever()
row[3], row[4], )) # STA ACTION for row in sta_action_list: cur.execute( "INSERT INTO sta_action values(NULL, %s, %s, %s" ", %s, %s, %s)", ( device, row[1], row[2], row[3], row[4], row[5], )) print 'Succ:', str(self.client_address[0]), time.strftime( '%Y-%m-%d %X', time.localtime()) except: traceback.print_exc() print 'MySQL Err:', str(self.client_address[0]) pass if __name__ == "__main__": initDB() server = ThreadingTCPServer(listen_addr, TCPHandler) server.serve_forever()
def run(self): global List List =self.List #讲Tkinter界面传入 server = ThreadingTCPServer(("", int(ReadSettingsLineName(4))), MyBaseRequestHandlerrTCP_ChaJian) server.serve_forever()
HOST='192.168.153.124' PORT=1111 ADDR=(HOST, PORT) class MyRequestHandler(StreamRequestHandler): def handle(self): print '...connected from:' , self.client_address self.data=self.rfile.readline().strip() if self.data: print self.data msg_split=self.data.split('+') if not cmp('pub',msg_split[0]): if cmp(HOST,msg_split[1]): NextIp=method.chooseNextNode(IPTABLE,msg_split[1]) method.send(NextIp,1111,self.data) else: index.storeGIndex((msg_split[2],msg_split[3])) if not cmp('test',self.data): self.wfile.write(str(HOST)+':Received') if not cmp('build',self.data): print 'good here' index.buildIndex() if __name__ == "__main__": tcpServ = ThreadingTCPServer(ADDR, MyRequestHandler) print 'waiting for connection...' tcpServ.serve_forever()
def Custom_SynServer(vhost, vport, vfunctioncode, vdatafile): class Custom_HTTPRequestHandler(BaseHTTPRequestHandler): def _getdata(self, path, params): '''Get response data''' print 'request params: ', params if '/niiwoo-open-api/openApiController/' != path: return None if 'favicon.ico' != params: params_dic = json.loads(params) functioncode = params_dic['FunctionCode'] if (vfunctioncode != functioncode): return None #读取json文件数据 datafile = vdatafile datafp = open(datafile, 'r') data = datafp.read() response_data = "".join(data.split()) return response_data def _writeheader(self, data): '''Write header''' if data is None: self.send_response(404) else: self.send_response(200) self.send_header('Content-Type', 'text/plain;charset=utf-8') self.end_headers() def do_GET(self): '''Handle get request''' print "Handling with thread: ", threading.currentThread().getName() print 'got connection from ', self.client_address #解析请求参数 path_list = self.path.split('/') request_params_url = path_list[-1] request_path = self.path.replace(request_params_url, '') request_params = urllib.unquote(request_params_url) data = self._getdata(request_path, request_params) import time print 'sleep...' time.sleep(30) self._writeheader(data) if data is None: self.wfile.write('None') else: self.wfile.write(data) class Custom_HTTPRequestHandler_IMTongxlu(BaseHTTPRequestHandler): def _getdata(self, path, params): '''Get response data''' print 'request params: ', params if '/txlService/api/sync-mobile-contacts' != path: return None if 'favicon.ico' != params: params = '{"' + params + '"}' params_jsonstr = params.replace('=', '":"').replace('&', '","') params_dic = json.loads(params_jsonstr) functioncode = params_dic['FunctionCode'] if (vfunctioncode != functioncode): return None #读取json文件数据 datafile = vdatafile datafp = open(datafile, 'r') data = datafp.read() response_data = "".join(data.split()) return response_data def _writeheader(self, data): '''Write header''' if data is None: self.send_response(404) else: self.send_response(200) self.send_header('Content-Type', 'text/plain;charset=utf-8') self.end_headers() def do_GET(self): '''Handle get request''' print "Handling with thread: ", threading.currentThread().getName() print 'got connection from ', self.client_address #解析请求参数 path_list = self.path.split('?') request_params = path_list[-1] request_path = path_list[0] data = self._getdata(request_path, request_params) import time time.sleep(30) self._writeheader(data) if data is None: self.wfile.write('None') else: self.wfile.write(data) try: httpd_address = (vhost, int(vport)) if '1005000001' == vfunctioncode: myhttpd = ThreadingTCPServer(httpd_address, Custom_HTTPRequestHandler_IMTongxlu) print "server:IMTongxlu" else: myhttpd = ThreadingTCPServer(httpd_address, Custom_HTTPRequestHandler) print 'server is running....' myhttpd.serve_forever() except KeyboardInterrupt: myhttpd.socket.close()
class ServerThread(threading.Thread): def __init__(self): super(type(self), self).__init__() self.setDaemon(True) self.start() class Handler(StreamRequestHandler): req_num = 0 lock = threading.Lock() def get_page_link_list(self, url): #mc = MyCurl(proxy_ip='192.168.200.253:3128', accept_encoding='gzip, deflate') #mc = MyCurl(proxy_ip='127.0.0.1:8888', accept_encoding='gzip, deflate') mc = MyCurl(accept_encoding='gzip, deflate') pic_regex = re.compile( r'.+\.(jpg|jpeg|gif|png|bmp|xml|json|swf|zip)$') _a_list = [] scheme, netloc = urlparse.urlparse(url)[:2] if scheme not in ('http', 'https'): return [] try: count = 0 while True: h, page = mc.get_page(url) #对于q=xxx site:domain.xxx之类的搜索 搜索结果如果有100条 频繁的访问domain.xxx 会导致503 尤其是这个网站用了cf之类的 if h['http-code'] == 503: if count > 2: return [] else: time.sleep(3) count += 1 continue else: break page = gzdecode(page) content_type = h['content-type'] if 'text/html' not in content_type: print 'content_type: %s' % content_type return [] r = re.search(r'charset=(.+)', content_type) if not r: charset = 'utf-8' else: charset = r.group(1) page = page.decode(charset, 'ignore') d = pq(page) a_list = d('a, iframe') for l in a_list.items(): if l[0].tag == 'iframe': href = l.attr('src') if not href: continue else: href = l.attr('href') if not href: continue if 'javascript' in href: continue if pic_regex.search(href.lower()): continue href = 'http:' + href if href[:2] == '//' else href _scheme, _netloc = urlparse.urlparse(href)[:2] if _scheme and (_scheme not in ('http', 'https')): continue if not _scheme and not _netloc: href = scheme + '://' + netloc + ('' if href[0] == '/' else '/') + href _a_list.append(href) except Exception as e: logger.error('get_page_link_list %s %s' % (url, str(e))) logger.exception(e) else: pass # try: # if not _a_list: # _uuid = uuid.uuid1().get_hex() # localtime = time.localtime() # tmp_name = str(localtime[0]) + str(localtime[1]) + str(localtime[2]) + _uuid[0:8] + _uuid[16:20] # with open(tmp_name + '.html', 'w+') as f: # f.write(url + '\n') # f.write(page) # with open(tmp_name + '2.html', 'w+') as f2: # f2.write(url + '\n') # f2.write(str(d)) # except Exception as e: # logger.error('if not _a_list %s %s' % (url, str(e))) # logger.exception(e) return [(l, url) for l in list(set(_a_list))] # def func(self, link_url): # pool = self.server.pool # link_url = link_url.strip() # if not link_url: # logger.debug('google search url null') # return '' # list_url = self.get_page_link_list(link_url) # logger.debug('req_num: %d, link_url: %s, has %d url[begin]' % (self.req_num, link_url, len(list_url))) # r = reduce(lambda _list, elem: _list.extend(elem) or _list, pool.map(self.get_page_link_list, [l[0] for l in list_url]), list()) # for l in list_url: # r.append((l[0], link_url)) # logger.debug('req_num: %d, link_url: %s, has %d url[end]' % (self.req_num, link_url, len(list_url))) # return r def get_num(self): self.lock.acquire() self.__class__.req_num += 1 req_num = self.__class__.req_num self.lock.release() return req_num def handle(self): pool = self.server.pool data = self.request.recv(1024) if not data: logger.debug('self.request.recv null') return try: data = json.loads(data) search_url = data['url'] except: logger.debug('data error') return try: self.req_num = self.get_num() logger.debug('[begin]req_num: %d, search url: %s' % (self.req_num, search_url)) except Exception as e: logger.debug('search url: %s, error %s' % (search_url, str(e))) logger.exception(e) try: #extraInfo = data['extraInfo'] if not search_url: logger.debug('search_url null') logger.debug('[end]req_num: %d, search url: %s' % (self.req_num, search_url)) self.request.send('') else: link_url_list = Search().search(search_url) logger.debug('req_num: %d, google搜索返回的记录数 %d' % (self.req_num, len(link_url_list))) link_url_list = [ 'http:' + l if l[:2] == '//' else l for l in link_url_list ] #link_url_list = [(l, req_num) for l in link_url_list] _results = [] for l in link_url_list: list_url = self.get_page_link_list(l) if config.crawl_level == 2: result = reduce( lambda _list, elem: _list.extend(elem) or _list, pool.map(self.get_page_link_list, [l[0] for l in list_url]), list()) for j in list_url: result.append((j[0], l)) _results.extend(result) else: for j in list_url: _results.append((j[0], l)) d = {} d['request'] = data d['response'] = _results #with open('out.json', 'w+') as f: for l in link_url_list: d['response'].append((l, search_url)) logger.debug( '[end]req_num: %d, search url: %s, 搜索到的记录数 %d' % (self.req_num, search_url, len(d['response']))) out_str = json.dumps(d) self.request.send(out_str) except Exception as e: logger.debug('[end]req_num: %d, search url: %s, error %s' % (self.req_num, search_url, str(e))) logger.exception(e) # def send_response(self, res): # logger.debug('send_response: %d' % len(res)) # try: # connection = pika.BlockingConnection(pika.ConnectionParameters('localhost')) # channel = connection.channel() # channel.queue_declare(queue='google_search') # channel.basic_publish(exchange='', routing_key='google_search', body=res) # connection.close() # except Exception as e: # logger.error(str(e)) # logger.exception(e) class DebugThread(threading.Thread): def __init__(self, pool): super(type(self), self).__init__() self.pool = pool self.setDaemon(True) self.start() def run(self): logger.info('debug thread start!!!') while True: logger.info('pool state %d' % self.pool._state) for l in self.pool._pool: print l.is_alive(), l.name print len(self.pool._pool) time.sleep(5) def run(self): logger.info('server thread start!!!') try: self.server = ThreadingTCPServer(('127.0.0.1', 50005), self.Handler) self.server.pool = ThreadPool(config.thread_num) #self.DebugThread(self.server.pool) self.server.serve_forever() except Exception as e: logger.error(str(e) + ' 具体栈回溯信息查看crit.log ') logger.exception(e) def shut_down(self): self.server.shutdown()
class LoggerServer(threading.Thread): def __init__(self): super(LoggerServer, self).__init__() self.server = None self.mh = None self.omh = None def run(self): # check logger path logger_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), constants.LOGGER_DIR) logger_online_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), constants.LOGGER_ONLINE_DIR) if os.path.exists(logger_path): if not os.path.isdir(logger_path): os.remove(logger_path) os.mkdir(logger_path) else: os.mkdir(logger_path) logger_file = os.path.join(logger_path, constants.LOGGER_FILE) logger_online_file = os.path.join(logger_online_path, constants.LOGGER_ONLINE_FILE) # rotating file handler #rh = logging.handlers.RotatingFileHandler(logger_file, # maxBytes = constants.LOGGER_FILE_MAX_BYTE, # backupCount = constants.LOGGER_FILE_BACKUP_COUNT) #rh.setLevel(logging.DEBUG) # RotatingFileHandler failed sometimes because of os.rename(), so change to FileHandler, and maintain the log files manually if os.path.exists(logger_file): logger_file_stat = os.stat(logger_file) if logger_file_stat.st_size > constants.LOGGER_FILE_MAX_BYTE: file_list = os.listdir(logger_path) for count in range(constants.LOGGER_FILE_BACKUP_COUNT, 0, -1): if os.path.exists('%s.%s' % (logger_file, count)): if count == constants.LOGGER_FILE_BACKUP_COUNT: os.remove('%s.%s' % (logger_file, count)) else: os.rename('%s.%s' % (logger_file, count), '%s.%s' % (logger_file, count + 1)) os.rename(logger_file, '%s.%s' % (logger_file, str(1))) fh = logging.FileHandler(logger_file) fh.setLevel(logging.DEBUG) ofh = logging.FileHandler(logger_online_file) ofh.setLevel(logging.DEBUG) # memory handler for rotate file handler self.mh = logging.handlers.MemoryHandler(constants.LOGGER_FILE_MEMORY_CACHE, target = fh) self.mh.setLevel(logging.DEBUG) self.omh = logging.handlers.MemoryHandler(constants.LOGGER_FILE_MEMORY_CACHE, target = ofh) self.omh.setLevel(logging.DEBUG) # set logger format formatter = logging.Formatter("%(asctime)s - %(name)s - %(filename)s[line:%(lineno)d] - %(levelname)s - %(message)s") #rh.setFormatter(formatter) fh.setFormatter(formatter) ofh.setFormatter(formatter) self.mh.setFormatter(formatter) self.omh.setFormatter(formatter) # main logger logger = logging.getLogger(constants.LOGGER_SERVER_NAME) logger.propagate = 0 logger.setLevel(logging.DEBUG) # add handle to logger logger.addHandler(self.mh) logger.addHandler(self.omh) self.server = ThreadingTCPServer((constants.LOGGER_SERVER_IP, constants.LOGGER_SERVER_PORT), LogRequestHandler) self.server.serve_forever() self.server.server_close() def stop_server(self): if self.mh: self.mh.flush() if self.omh: self.omh.flush() if self.server: self.server.shutdown() def get_server(self): return self.server
active = True while active: transmission = self.request.recv(1024) # wait for something to happen if transmission: command = transmission.split()[0] data = transmission[1+len(command): ] # the rest if command == 'ADD': username = data.strip() _socketLookup[username] = self.request _broadcast('NEW %s\n' % username) elif command == 'MESSAGE': _broadcast('MESSAGE %s\n%s\n' % (username,data) ) elif command == 'PRIVATE': rcpt = data.split('\n')[0] if rcpt in _socketLookup: content = data.split('\n')[1] _socketLookup[rcpt].send('PRIVATE %s\n%s\n'%(username,content) ) elif command == 'QUIT': active = False self.request.send('GOODBYE\n') # acknowledge else: active = False # socket failed self.request.close() _socketLookup.pop(username) _broadcast('LEFT %s\n' % username) # inform others myServer = ThreadingTCPServer( ('localhost', 9000), ChatHandler) myServer.serve_forever()
def serve_forever(self, poll_interval=0.5): logging.info("Create SOCKS5 server at port %d" % self.__port) ThreadingTCPServer.serve_forever(self, poll_interval)
def run(self): server = ThreadingTCPServer((self.host, self.port), RequestHandler) try: server.serve_forever() except Exception as err: logger.debug(traceback.format_exc())
def start_server(): listen = (wanip or natip, 843) tcpServ = TCP(listen, RequestHandler) print 'listen on %s:%d' % listen tcpServ.serve_forever()
def start(): try: server = ThreadingTCPServer(('0.0.0.0', 8088), ServerHandle) server.serve_forever() except KeyboardInterrupt: hardware.clean()
#!/usr/bin/python from SocketServer import ThreadingTCPServer, StreamRequestHandler import traceback import commands class MyStreamRequestHandlerr(StreamRequestHandler): def handle(self): while True: try: data = self.request.recv(1024).strip() print "receive from (%r):%r" % (self.client_address, data) cmd_status,cmd_result=commands.getstatusoutput(data) if len(cmd_result.strip()) ==0: self.request.sendall('Done.') else: self.request.sendall(cmd_result) except: traceback.print_exc() break if __name__ == "__main__": host = "" port = 50007 addr = (host, port) server = ThreadingTCPServer(addr, MyStreamRequestHandlerr) print "HI~ listern on ",port server.serve_forever()
def create_server(addr, port, handler): server = ThreadingTCPServer((addr, port), handler, bind_and_activate=False) server.allow_reuse_address = True server.server_bind() server.server_activate() server.serve_forever()
def serve(): HOST = '' PORT = 9092 server = ThreadingTCPServer((HOST, PORT), SyncRequestHandler) server.serve_forever()
def run(self): server = ThreadingTCPServer(("", int(ReadSettingsLineName(3))), MyBaseRequestHandlerrTCP_YunYing) server.serve_forever()
})) elif not authed: self.json_headers() if not globals()['run_already']: open_new_tab(liw.authentication.authorization_url) globals()['run_already'] = True self.wfile.write( dumps({ 'path': self.path, 'authed': type(liw.authentication.token) is NoneType })) elif authed and len(parsedurl.path) and parsedurl.path[1:] in dir( liw.application): self.json_headers() self.wfile.write( dumps(getattr(liw.application, parsedurl.path[1:])())) else: self.json_headers(501) self.wfile.write(dumps({'error': 'NotImplemented'})) if __name__ == '__main__': ThreadingTCPServer.allow_reuse_address = True httpd = ThreadingTCPServer(('localhost', PORT), CustomHandler) print 'Server started on port:', PORT httpd.serve_forever()
from SocketServer import BaseRequestHandler, ThreadingTCPServer from time import sleep import sys, socket from webloglib import log_fields, hit_tag class WebLogHandler(BaseRequestHandler): def handler(self): print "Connected from", self.client_address self.request.sendall('<hits>') try: while True: for hit in LOG.readlines(): self.request.sendall(hit_tag % log_fields(hit)) sleep(5) except socket.error: self.request.close() print "Disconncted from", self.client_address if __name__ == '__main__': global LOG LOG = open('access-log') LOG.seek(0, 2) srv = ThreadingTCPServer(('', 8888), WebLogHandler) srv.serve_forever()
if parsedurl.path == '/code': self.json_headers() liw.authentication.authorization_code = params_to_d(self.path).get('code') self.wfile.write(dumps({'access_token': liw.authentication.get_access_token(), 'routes': filter(lambda d: not d.startswith('_'), dir(liw.application))})) elif parsedurl.path == '/routes': self.json_headers() self.wfile.write(dumps({'routes': filter(lambda d: not d.startswith('_'), dir(liw.application))})) elif not authed: self.json_headers() if not globals()['run_already']: open_new_tab(liw.authentication.authorization_url) globals()['run_already'] = True self.wfile.write(dumps({'path': self.path, 'authed': type(liw.authentication.token) is NoneType})) elif authed and len(parsedurl.path) and parsedurl.path[1:] in dir(liw.application): self.json_headers() self.wfile.write(dumps(getattr(liw.application, parsedurl.path[1:])())) else: self.json_headers(501) self.wfile.write(dumps({'error': 'NotImplemented'})) if __name__ == '__main__': httpd = ThreadingTCPServer(('localhost', PORT), CustomHandler) print 'Server started on port:', PORT httpd.serve_forever()
while True: self.data = self.request.recv(1024) if not self.data: break gLock.acquire() print "Server received {0} bytes on thread {1} from {2}:{3}".format(len(self.data), threading.current_thread().name, *self.client_address) print " {0}".format(self.data) gLock.release() self.request.send(self.data) try: s = ThreadingTCPServer((server_addr, server_port), EchoHandler) s.allow_reuse_address = True print "Server started" s.serve_forever() except (KeyboardInterrupt, SystemExit): pass finally: s.shutdown() print "Server stopped"
fil = self.path.strip("/") if isfile(fil): z = ctime(getmtime(fil)) y = self.headers.get('If-Modified-Since', None) b = strptime(self.headers.get(y, "%a %b %d %H:%M:%S")) a = strptime(z, "%a %b %d %H:%M:%S") if b > a: self.send_response(304) self.end_headers() return None return SimpleHTTPRequestHandler.send_head(self) def end_headers(self): self.send_header('Cache-control', 'must-revalidate') SimpleHTTPRequestHandler.end_headers(self) def do_POST(self): self.send_response(200) self.send_header('Cache-control', 'no-cache') SimpleHTTPRequestHandler.end_headers(self) host = "" # try: Server_url = (host, s_port) s = ThreadingTCPServer(Server_url, HTTPCacheRequestHandler) s.allow_reuse_address = 1 s.serve_forever() # except: # pass
class ServerThread(threading.Thread): def __init__(self): super(type(self), self).__init__() self.setDaemon(True) self.start() class Handler(StreamRequestHandler): req_num = 0 lock = threading.Lock() def get_page_link_list(self, url): #mc = MyCurl(proxy_ip='192.168.200.253:3128', accept_encoding='gzip, deflate') #mc = MyCurl(proxy_ip='127.0.0.1:8888', accept_encoding='gzip, deflate') mc = MyCurl(accept_encoding='gzip, deflate') pic_regex = re.compile(r'.+\.(jpg|jpeg|gif|png|bmp|xml|json|swf|zip)$') _a_list = [] scheme, netloc = urlparse.urlparse(url)[:2] if scheme not in ('http', 'https'): return [] try: count = 0 while True: h, page = mc.get_page(url) #对于q=xxx site:domain.xxx之类的搜索 搜索结果如果有100条 频繁的访问domain.xxx 会导致503 尤其是这个网站用了cf之类的 if h['http-code'] == 503: if count > 2: return [] else: time.sleep(3) count += 1 continue else: break page = gzdecode(page) content_type = h['content-type'] if 'text/html' not in content_type: print 'content_type: %s' % content_type return [] r = re.search(r'charset=(.+)', content_type) if not r: charset = 'utf-8' else: charset = r.group(1) page = page.decode(charset, 'ignore') d = pq(page) a_list = d('a, iframe') for l in a_list.items(): if l[0].tag == 'iframe': href = l.attr('src') if not href: continue else: href = l.attr('href') if not href: continue if 'javascript' in href: continue if pic_regex.search(href.lower()): continue href = 'http:' + href if href[:2] == '//' else href _scheme, _netloc = urlparse.urlparse(href)[:2] if _scheme and (_scheme not in ('http', 'https')): continue if not _scheme and not _netloc: href = scheme + '://' + netloc + ('' if href[0] == '/' else '/') + href _a_list.append(href) except Exception as e: logger.error('get_page_link_list %s %s' % (url, str(e))) logger.exception(e) else: pass # try: # if not _a_list: # _uuid = uuid.uuid1().get_hex() # localtime = time.localtime() # tmp_name = str(localtime[0]) + str(localtime[1]) + str(localtime[2]) + _uuid[0:8] + _uuid[16:20] # with open(tmp_name + '.html', 'w+') as f: # f.write(url + '\n') # f.write(page) # with open(tmp_name + '2.html', 'w+') as f2: # f2.write(url + '\n') # f2.write(str(d)) # except Exception as e: # logger.error('if not _a_list %s %s' % (url, str(e))) # logger.exception(e) return [(l, url) for l in list(set(_a_list))] # def func(self, link_url): # pool = self.server.pool # link_url = link_url.strip() # if not link_url: # logger.debug('google search url null') # return '' # list_url = self.get_page_link_list(link_url) # logger.debug('req_num: %d, link_url: %s, has %d url[begin]' % (self.req_num, link_url, len(list_url))) # r = reduce(lambda _list, elem: _list.extend(elem) or _list, pool.map(self.get_page_link_list, [l[0] for l in list_url]), list()) # for l in list_url: # r.append((l[0], link_url)) # logger.debug('req_num: %d, link_url: %s, has %d url[end]' % (self.req_num, link_url, len(list_url))) # return r def get_num(self): self.lock.acquire() self.__class__.req_num += 1 req_num = self.__class__.req_num self.lock.release() return req_num def handle(self): pool = self.server.pool data = self.request.recv(1024) if not data: logger.debug('self.request.recv null') return try: data = json.loads(data) search_url = data['url'] except: logger.debug('data error') return try: self.req_num = self.get_num() logger.debug('[begin]req_num: %d, search url: %s' % (self.req_num, search_url)) except Exception as e: logger.debug('search url: %s, error %s' % (search_url, str(e))) logger.exception(e) try: #extraInfo = data['extraInfo'] if not search_url: logger.debug('search_url null') logger.debug('[end]req_num: %d, search url: %s' % (self.req_num, search_url)) self.request.send('') else: link_url_list = Search().search(search_url) logger.debug('req_num: %d, google搜索返回的记录数 %d' % (self.req_num, len(link_url_list))) link_url_list = ['http:' + l if l[:2] == '//' else l for l in link_url_list] #link_url_list = [(l, req_num) for l in link_url_list] _results = [] for l in link_url_list: list_url = self.get_page_link_list(l) if config.crawl_level == 2: result = reduce(lambda _list, elem: _list.extend(elem) or _list, pool.map(self.get_page_link_list, [l[0] for l in list_url]), list()) for j in list_url: result.append((j[0], l)) _results.extend(result) else: for j in list_url: _results.append((j[0], l)) d = {} d['request'] = data d['response'] = _results #with open('out.json', 'w+') as f: for l in link_url_list: d['response'].append((l, search_url)) logger.debug('[end]req_num: %d, search url: %s, 搜索到的记录数 %d' % (self.req_num, search_url, len(d['response']))) out_str = json.dumps(d) self.request.send(out_str) except Exception as e: logger.debug('[end]req_num: %d, search url: %s, error %s' % (self.req_num, search_url, str(e))) logger.exception(e) # def send_response(self, res): # logger.debug('send_response: %d' % len(res)) # try: # connection = pika.BlockingConnection(pika.ConnectionParameters('localhost')) # channel = connection.channel() # channel.queue_declare(queue='google_search') # channel.basic_publish(exchange='', routing_key='google_search', body=res) # connection.close() # except Exception as e: # logger.error(str(e)) # logger.exception(e) class DebugThread(threading.Thread): def __init__(self, pool): super(type(self), self).__init__() self.pool = pool self.setDaemon(True) self.start() def run(self): logger.info('debug thread start!!!') while True: logger.info('pool state %d' % self.pool._state) for l in self.pool._pool: print l.is_alive(), l.name print len(self.pool._pool) time.sleep(5) def run(self): logger.info('server thread start!!!') try: self.server = ThreadingTCPServer(('127.0.0.1', 50005), self.Handler) self.server.pool = ThreadPool(config.thread_num) #self.DebugThread(self.server.pool) self.server.serve_forever() except Exception as e: logger.error(str(e) + ' 具体栈回溯信息查看crit.log ') logger.exception(e) def shut_down(self): self.server.shutdown()